mirror of https://github.com/Bunsly/JobSpy
Merge pull request #1 from JobSpy-ai/feature/scrapers/Add-Indeed-Scraper
feat: add IndeedScraperpull/12/head
commit
562b46b2a5
|
@ -1,2 +1,6 @@
|
||||||
/.idea
|
/.idea
|
||||||
.DS_Store
|
.DS_Store
|
||||||
|
/venv/
|
||||||
|
/ven/
|
||||||
|
**/__pycache__/
|
||||||
|
*.pyc
|
Binary file not shown.
Binary file not shown.
|
@ -4,18 +4,19 @@ from enum import Enum
|
||||||
|
|
||||||
|
|
||||||
class JobType(Enum):
|
class JobType(Enum):
|
||||||
FULL_TIME = 'full_time'
|
FULL_TIME = "full_time"
|
||||||
PART_TIME = 'part_time'
|
PART_TIME = "part_time"
|
||||||
CONTRACT = 'contract'
|
CONTRACT = "contract"
|
||||||
INTERNSHIP = 'internship'
|
INTERNSHIP = "internship"
|
||||||
|
TEMPORARY = "temporary"
|
||||||
|
|
||||||
|
|
||||||
class CompensationInterval(Enum):
|
class CompensationInterval(Enum):
|
||||||
ANNUAL = 'annual'
|
YEARLY = "yearly"
|
||||||
MONTHLY = 'monthly'
|
MONTHLY = "monthly"
|
||||||
WEEKLY = 'weekly'
|
WEEKLY = "weekly"
|
||||||
DAILY = 'daily'
|
DAILY = "daily"
|
||||||
HOURLY = 'hourly'
|
HOURLY = "hourly"
|
||||||
|
|
||||||
|
|
||||||
class Location(BaseModel):
|
class Location(BaseModel):
|
||||||
|
@ -30,12 +31,12 @@ class Compensation(BaseModel):
|
||||||
interval: CompensationInterval
|
interval: CompensationInterval
|
||||||
min_amount: int
|
min_amount: int
|
||||||
max_amount: int
|
max_amount: int
|
||||||
currency: str
|
currency: str = None
|
||||||
|
|
||||||
|
|
||||||
class DeliveryEnum(Enum):
|
class DeliveryEnum(Enum):
|
||||||
EMAIL = 'email'
|
EMAIL = "email"
|
||||||
URL = 'url'
|
URL = "url"
|
||||||
|
|
||||||
|
|
||||||
class Delivery(BaseModel):
|
class Delivery(BaseModel):
|
||||||
|
@ -45,20 +46,19 @@ class Delivery(BaseModel):
|
||||||
|
|
||||||
class JobPost(BaseModel):
|
class JobPost(BaseModel):
|
||||||
title: str
|
title: str
|
||||||
description: str
|
description: str = None
|
||||||
company_name: str
|
company_name: str
|
||||||
industry: str
|
industry: str = None
|
||||||
location: Location
|
location: Location
|
||||||
job_type: JobType
|
job_type: JobType
|
||||||
compensation: Compensation
|
compensation: Compensation = None
|
||||||
date_posted: datetime
|
date_posted: datetime
|
||||||
delivery: Delivery = None
|
delivery: Delivery = None
|
||||||
|
|
||||||
|
|
||||||
class JobResponse(BaseModel):
|
class JobResponse(BaseModel):
|
||||||
jobs: list[JobPost]
|
|
||||||
|
|
||||||
job_count: int
|
job_count: int
|
||||||
|
page: int = 1
|
||||||
page: int
|
|
||||||
total_pages: int
|
total_pages: int
|
||||||
|
|
||||||
|
jobs: list[JobPost]
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
from pydantic import BaseModel
|
from pydantic import BaseModel
|
||||||
from enum import Enum
|
from enum import Enum
|
||||||
from ..jobs import JobResponse, JobPost
|
from ..jobs import JobResponse
|
||||||
|
|
||||||
|
|
||||||
class Site(Enum):
|
class Site(Enum):
|
||||||
|
@ -10,16 +10,15 @@ class Site(Enum):
|
||||||
|
|
||||||
|
|
||||||
class ScraperInput(BaseModel):
|
class ScraperInput(BaseModel):
|
||||||
site: Site
|
|
||||||
|
|
||||||
location: str
|
location: str
|
||||||
search_term: str
|
search_term: str
|
||||||
|
|
||||||
page: int = None
|
page: int = 1
|
||||||
|
|
||||||
|
|
||||||
class Scraper: #: to be used as a child class
|
class Scraper: #: to be used as a child class
|
||||||
def __init__(self, site: Site):
|
def __init__(self, site: Site):
|
||||||
self.site = site
|
self.site = site
|
||||||
|
|
||||||
def scrape(self, scraper_input: ScraperInput) -> JobResponse: ...
|
def scrape(self, scraper_input: ScraperInput) -> JobResponse:
|
||||||
|
...
|
||||||
|
|
|
@ -0,0 +1,158 @@
|
||||||
|
import json
|
||||||
|
import re
|
||||||
|
import math
|
||||||
|
|
||||||
|
import tls_client
|
||||||
|
from bs4 import BeautifulSoup
|
||||||
|
|
||||||
|
from .. import Scraper, ScraperInput, Site
|
||||||
|
from ...jobs import *
|
||||||
|
|
||||||
|
|
||||||
|
class IndeedScraper(Scraper):
|
||||||
|
def __init__(self):
|
||||||
|
site = Site(Site.INDEED)
|
||||||
|
super().__init__(site)
|
||||||
|
self.url = "https://www.indeed.com/jobs"
|
||||||
|
|
||||||
|
def scrape(self, scraper_input: ScraperInput) -> JobResponse:
|
||||||
|
session = tls_client.Session(
|
||||||
|
client_identifier="chrome112", random_tls_extension_order=True
|
||||||
|
)
|
||||||
|
|
||||||
|
params = {
|
||||||
|
"q": scraper_input.search_term,
|
||||||
|
"l": scraper_input.location,
|
||||||
|
"filter": 0,
|
||||||
|
"start": 0 if scraper_input.page is None else (scraper_input.page - 1) * 10,
|
||||||
|
}
|
||||||
|
|
||||||
|
response = session.get(self.url, params=params)
|
||||||
|
if response.status_code != 200:
|
||||||
|
return {"message": f"Error - Status Code: {response.status_code}"}
|
||||||
|
|
||||||
|
soup = BeautifulSoup(response.content, "html.parser")
|
||||||
|
|
||||||
|
jobs = IndeedScraper.parse_jobs(soup)
|
||||||
|
total_num_jobs = IndeedScraper.total_jobs(soup)
|
||||||
|
total_pages = math.ceil(total_num_jobs / 15)
|
||||||
|
|
||||||
|
job_list: list[JobPost] = []
|
||||||
|
page_number = jobs["metaData"]["mosaicProviderJobCardsModel"]["pageNumber"]
|
||||||
|
for job in jobs["metaData"]["mosaicProviderJobCardsModel"]["results"]:
|
||||||
|
snippet_html = BeautifulSoup(job["snippet"], "html.parser")
|
||||||
|
|
||||||
|
extracted_salary = job.get("extractedSalary")
|
||||||
|
compensation = None
|
||||||
|
if extracted_salary:
|
||||||
|
salary_snippet = job.get("salarySnippet")
|
||||||
|
currency = salary_snippet.get("currency") if salary_snippet else None
|
||||||
|
interval = (extracted_salary.get("type"),)
|
||||||
|
if isinstance(interval, tuple):
|
||||||
|
interval = interval[0]
|
||||||
|
|
||||||
|
interval = interval.upper()
|
||||||
|
if interval in CompensationInterval.__members__:
|
||||||
|
compensation = Compensation(
|
||||||
|
interval=CompensationInterval[interval],
|
||||||
|
min_amount=extracted_salary.get("max"),
|
||||||
|
max_amount=extracted_salary.get("min"),
|
||||||
|
currency=currency,
|
||||||
|
)
|
||||||
|
|
||||||
|
job_type = IndeedScraper.get_job_type(job)
|
||||||
|
if job.get("thirdPartyApplyUrl"):
|
||||||
|
delivery = Delivery(
|
||||||
|
method=DeliveryEnum.URL, value=job["thirdPartyApplyUrl"]
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
delivery = None
|
||||||
|
timestamp_seconds = job["pubDate"] / 1000
|
||||||
|
date_posted = datetime.fromtimestamp(timestamp_seconds)
|
||||||
|
|
||||||
|
first_li = snippet_html.find("li")
|
||||||
|
job_post = JobPost(
|
||||||
|
title=job["normTitle"],
|
||||||
|
description=first_li.text if first_li else None,
|
||||||
|
company_name=job["company"],
|
||||||
|
industry=None,
|
||||||
|
location=Location(
|
||||||
|
city=job["jobLocationCity"],
|
||||||
|
state=job["jobLocationState"],
|
||||||
|
postal_code=job.get("jobLocationPostal"),
|
||||||
|
country="US",
|
||||||
|
),
|
||||||
|
job_type=job_type,
|
||||||
|
compensation=compensation,
|
||||||
|
date_posted=date_posted,
|
||||||
|
delivery=delivery,
|
||||||
|
)
|
||||||
|
job_list.append(job_post)
|
||||||
|
|
||||||
|
job_response = JobResponse(
|
||||||
|
jobs=job_list,
|
||||||
|
job_count=total_num_jobs,
|
||||||
|
page=page_number,
|
||||||
|
total_pages=total_pages,
|
||||||
|
)
|
||||||
|
return job_response
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get_job_type(data):
|
||||||
|
for taxonomy in data["taxonomyAttributes"]:
|
||||||
|
if taxonomy["label"] == "job-types":
|
||||||
|
if len(taxonomy["attributes"]) > 0:
|
||||||
|
job_type_str = (
|
||||||
|
taxonomy["attributes"][0]["label"]
|
||||||
|
.replace("-", "_")
|
||||||
|
.replace(" ", "_")
|
||||||
|
.upper()
|
||||||
|
)
|
||||||
|
return JobType[job_type_str]
|
||||||
|
return None
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def parse_jobs(soup):
|
||||||
|
script_tag = IndeedScraper.find_mosaic_script(soup)
|
||||||
|
|
||||||
|
if script_tag:
|
||||||
|
script_str = script_tag.string
|
||||||
|
|
||||||
|
pattern = r'window.mosaic.providerData\["mosaic-provider-jobcards"\]\s*=\s*({.*?});'
|
||||||
|
p = re.compile(pattern, re.DOTALL)
|
||||||
|
|
||||||
|
m = p.search(script_str)
|
||||||
|
if m:
|
||||||
|
jobs = json.loads(m.group(1).strip())
|
||||||
|
return jobs
|
||||||
|
else:
|
||||||
|
return {"message": f"Could not find mosaic provider job cards data"}
|
||||||
|
else:
|
||||||
|
return {
|
||||||
|
"message": f"Could not find a script tag containing mosaic provider data"
|
||||||
|
}
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def total_jobs(soup):
|
||||||
|
script = soup.find("script", string=lambda t: "window._initialData" in t)
|
||||||
|
|
||||||
|
pattern = re.compile(r"window._initialData\s*=\s*({.*})\s*;", re.DOTALL)
|
||||||
|
match = pattern.search(script.string)
|
||||||
|
total_num_jobs = 0
|
||||||
|
if match:
|
||||||
|
json_str = match.group(1)
|
||||||
|
data = json.loads(json_str)
|
||||||
|
total_num_jobs = data["searchTitleBarModel"]["totalNumResults"]
|
||||||
|
return total_num_jobs
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def find_mosaic_script(soup):
|
||||||
|
script_tags = soup.find_all("script")
|
||||||
|
for script_tag in script_tags:
|
||||||
|
if (
|
||||||
|
script_tag.string
|
||||||
|
and "mosaic.providerData" in script_tag.string
|
||||||
|
and "mosaic-provider-jobcards" in script_tag.string
|
||||||
|
):
|
||||||
|
return script_tag
|
||||||
|
return None
|
Binary file not shown.
|
@ -1,3 +1,16 @@
|
||||||
from fastapi import APIRouter
|
from fastapi import APIRouter
|
||||||
|
|
||||||
|
from api.core.scrapers.indeed import IndeedScraper
|
||||||
|
from api.core.scrapers import ScraperInput
|
||||||
|
from api.core.jobs import JobResponse
|
||||||
|
|
||||||
router = APIRouter(prefix="/jobs")
|
router = APIRouter(prefix="/jobs")
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/")
|
||||||
|
async def scrape_jobs(search_term: str, location: str, page: int = None):
|
||||||
|
scraper = IndeedScraper()
|
||||||
|
|
||||||
|
scraper_input = ScraperInput(search_term=search_term, location=location, page=page)
|
||||||
|
job_response = scraper.scrape(scraper_input)
|
||||||
|
return job_response
|
||||||
|
|
Binary file not shown.
Loading…
Reference in New Issue