refactor(jobs): use JobResponse model for bad requests

pull/12/head
Cullen Watson 2023-07-10 18:17:46 -05:00
parent cc48218747
commit a0957f1d5b
4 changed files with 13 additions and 10 deletions

View File

@ -13,7 +13,6 @@ oauth2_scheme = OAuth2PasswordBearer(tokenUrl="/api/auth/token")
def create_access_token(data: dict):
print(JWT_SECRET_KEY)
to_encode = data.copy()
expire = datetime.utcnow() + timedelta(minutes=ACCESS_TOKEN_EXPIRE_MINUTES)
to_encode.update({"exp": expire})

View File

@ -35,9 +35,10 @@ class IndeedScraper(Scraper):
response = session.get(self.url, params=params)
if response.status_code != status.HTTP_200_OK:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=f"Response returned {response.status_code}",
return JobResponse(
success=False,
error=f"Response returned {response.status_code}",
http_response_code=response.status_code
)
soup = BeautifulSoup(response.content, "html.parser")

View File

@ -26,10 +26,12 @@ class LinkedInScraper(Scraper):
self.url = f"{self.url}/{scraper_input.search_term}-jobs"
response = requests.get(self.url, params=params)
response.status_code = 300
if response.status_code != status.HTTP_200_OK:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=f"Response returned {response.status_code} {response.reason}",
return JobResponse(
success=False,
error=f"Response returned {response.status_code}",
http_response_code=response.status_code
)
soup = BeautifulSoup(response.text, "html.parser")

View File

@ -34,9 +34,10 @@ class ZipRecruiterScraper(Scraper):
self.url, headers=ZipRecruiterScraper.headers(), params=params
)
if response.status_code != status.HTTP_200_OK:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=f"Response returned {response.status_code} {response.reason}",
return JobResponse(
success=False,
error=f"Response returned {response.status_code}",
http_response_code=response.status_code
)
html_string = response.content