adding multiple search sites

pull/12/head
xp15 2023-08-24 22:19:57 -07:00
parent 18dc9a6cf8
commit bca270b2d8
3 changed files with 28 additions and 8 deletions

16
.vscode/launch.json vendored Normal file
View File

@ -0,0 +1,16 @@
{
// Use IntelliSense to learn about possible attributes.
// Hover to view descriptions of existing attributes.
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
"version": "0.2.0",
"configurations": [
{
"name": "Python: Module",
"type": "python",
"request": "launch",
"module": "uvicorn",
"args": ["main:app","--reload"]
}
]
}

View File

@ -1,4 +1,5 @@
from ..jobs import *
from typing import List
class StatusException(Exception):
@ -13,7 +14,7 @@ class Site(Enum):
class ScraperInput(BaseModel):
site_type: Site
site_type: List[Site]
search_term: str
location: str = None

View File

@ -4,6 +4,7 @@ from api.core.scrapers.indeed import IndeedScraper
from api.core.scrapers.ziprecruiter import ZipRecruiterScraper
from api.core.scrapers.linkedin import LinkedInScraper
from api.core.scrapers import ScraperInput, Site, JobResponse
from typing import List
router = APIRouter(prefix="/jobs", tags=["jobs"])
@ -14,11 +15,13 @@ SCRAPER_MAPPING = {
}
@router.post("/", response_model=JobResponse)
async def scrape_jobs(scraper_input: ScraperInput):
scraper_class = SCRAPER_MAPPING[scraper_input.site_type]
scraper = scraper_class()
@router.post("/", response_model=List[JobResponse])
async def scrape_jobs(scraper_input: ScraperInput) -> JobResponse:
resp = []
for site in scraper_input.site_type:
scraper_class = SCRAPER_MAPPING[site]
scraper = scraper_class()
job_response = scraper.scrape(scraper_input)
resp.append(job_response)
job_response = scraper.scrape(scraper_input)
return job_response
return resp