- api/jobs/v1 schema change

pull/12/head
zacharyhampton 2023-07-10 18:04:44 -05:00 committed by Cullen Watson
parent 28ba4fac8a
commit b41bfd3c1a
2 changed files with 8 additions and 8 deletions

View File

@ -10,9 +10,12 @@ class Site(Enum):
class ScraperInput(BaseModel): class ScraperInput(BaseModel):
location: str site_type: Site
search_term: str search_term: str
location: str
distance: int = 25 distance: int = 25
results_wanted: int = 15 #: TODO: implement results_wanted: int = 15 #: TODO: implement

View File

@ -3,7 +3,7 @@ from fastapi import APIRouter, Depends
from api.core.scrapers.indeed import IndeedScraper from api.core.scrapers.indeed import IndeedScraper
from api.core.scrapers.ziprecruiter import ZipRecruiterScraper from api.core.scrapers.ziprecruiter import ZipRecruiterScraper
from api.core.scrapers.linkedin import LinkedInScraper from api.core.scrapers.linkedin import LinkedInScraper
from api.core.scrapers import ScraperInput, Site from api.core.scrapers import ScraperInput, Site, JobResponse
router = APIRouter(prefix="/jobs") router = APIRouter(prefix="/jobs")
@ -14,16 +14,13 @@ SCRAPER_MAPPING = {
} }
@router.get("/") @router.get("/", response_model=JobResponse)
async def scrape_jobs( async def scrape_jobs(
site_type: Site, search_term: str, location: str, page: int = 1, distance: int = 25 scraper_input: ScraperInput
): ):
scraper_class = SCRAPER_MAPPING[site_type] scraper_class = SCRAPER_MAPPING[scraper_input.site_type]
scraper = scraper_class() scraper = scraper_class()
scraper_input = ScraperInput(
search_term=search_term, location=location, page=page, distance=distance
)
job_response = scraper.scrape(scraper_input) job_response = scraper.scrape(scraper_input)
return job_response return job_response