feat(jobs): add distance param

pull/12/head
Cullen Watson 2023-07-10 16:14:05 -05:00
parent 53cbe6ba3f
commit 6f447bcd2c
5 changed files with 14 additions and 3 deletions

View File

@ -12,6 +12,7 @@ class Site(Enum):
class ScraperInput(BaseModel): class ScraperInput(BaseModel):
location: str location: str
search_term: str search_term: str
distance: int = 25
page: int = 1 page: int = 1

View File

@ -26,6 +26,7 @@ class IndeedScraper(Scraper):
"l": scraper_input.location, "l": scraper_input.location,
"filter": 0, "filter": 0,
"start": 0 if scraper_input.page is None else (scraper_input.page - 1) * 10, "start": 0 if scraper_input.page is None else (scraper_input.page - 1) * 10,
"radius": scraper_input.distance,
} }
response = session.get(self.url, params=params) response = session.get(self.url, params=params)

View File

@ -16,7 +16,11 @@ class LinkedInScraper(Scraper):
self.url = "https://www.linkedin.com/jobs" self.url = "https://www.linkedin.com/jobs"
def scrape(self, scraper_input: ScraperInput) -> JobResponse: def scrape(self, scraper_input: ScraperInput) -> JobResponse:
params = {"pageNum": scraper_input.page - 1, "location": scraper_input.location} params = {
"pageNum": scraper_input.page - 1,
"location": scraper_input.location,
"distance": scraper_input.distance,
}
self.url = f"{self.url}/{scraper_input.search_term}-jobs" self.url = f"{self.url}/{scraper_input.search_term}-jobs"
response = requests.get(self.url, params=params) response = requests.get(self.url, params=params)

View File

@ -25,6 +25,7 @@ class ZipRecruiterScraper(Scraper):
"search": scraper_input.search_term, "search": scraper_input.search_term,
"location": scraper_input.location, "location": scraper_input.location,
"page": min(scraper_input.page, 10), "page": min(scraper_input.page, 10),
"radius": scraper_input.distance,
} }
response = session.get( response = session.get(

View File

@ -15,11 +15,15 @@ SCRAPER_MAPPING = {
@router.get("/") @router.get("/")
async def scrape_jobs(site_type: Site, search_term: str, location: str, page: int = 1): async def scrape_jobs(
site_type: Site, search_term: str, location: str, page: int = 1, distance: int = 25
):
scraper_class = SCRAPER_MAPPING[site_type] scraper_class = SCRAPER_MAPPING[site_type]
scraper = scraper_class() scraper = scraper_class()
scraper_input = ScraperInput(search_term=search_term, location=location, page=page) scraper_input = ScraperInput(
search_term=search_term, location=location, page=page, distance=distance
)
job_response = scraper.scrape(scraper_input) job_response = scraper.scrape(scraper_input)
return job_response return job_response