mirror of https://github.com/Bunsly/JobSpy
fix(indeed): readd param
parent
0d150d519f
commit
3e93454738
|
@ -69,7 +69,7 @@ Optional
|
|||
├── proxy (str): in format 'http://user:pass@host:port'
|
||||
├── is_remote (bool)
|
||||
├── results_wanted (int): number of job results to retrieve for each site specified in 'site_type'
|
||||
├── easy_apply (bool): filters for jobs that are hosted on the job board site (not supported on Indeed)
|
||||
├── easy_apply (bool): filters for jobs that are hosted on the job board site (LinkedIn & Indeed do not allow pairing this with hours_old)
|
||||
├── linkedin_fetch_description (bool): fetches full description for LinkedIn (slower)
|
||||
├── linkedin_company_ids (list[int): searches for linkedin jobs with specific company ids
|
||||
├── description_format (str): markdown, html (format type of the job descriptions)
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
[tool.poetry]
|
||||
name = "python-jobspy"
|
||||
version = "1.1.50"
|
||||
version = "1.1.51"
|
||||
description = "Job scraper for LinkedIn, Indeed, Glassdoor & ZipRecruiter"
|
||||
authors = ["Zachary Hampton <zachary@bunsly.com>", "Cullen Watson <cullen@bunsly.com>"]
|
||||
homepage = "https://github.com/Bunsly/JobSpy"
|
||||
|
|
|
@ -41,7 +41,7 @@ def scrape_jobs(
|
|||
) -> pd.DataFrame:
|
||||
"""
|
||||
Simultaneously scrapes job data from multiple job sites.
|
||||
:return: results_wanted: pandas dataframe containing job data
|
||||
:return: pandas dataframe containing job data
|
||||
"""
|
||||
SCRAPER_MAPPING = {
|
||||
Site.LINKEDIN: LinkedInScraper,
|
||||
|
|
|
@ -150,6 +150,15 @@ class IndeedScraper(Scraper):
|
|||
""".format(
|
||||
start=self.scraper_input.hours_old
|
||||
)
|
||||
elif self.scraper_input.easy_apply:
|
||||
filters_str = """
|
||||
filters: {
|
||||
keyword: {
|
||||
field: "indeedApplyScope",
|
||||
keys: ["DESKTOP"]
|
||||
}
|
||||
}
|
||||
"""
|
||||
elif self.scraper_input.job_type or self.scraper_input.is_remote:
|
||||
job_type_key_mapping = {
|
||||
JobType.FULL_TIME: "CF3CP",
|
||||
|
|
Loading…
Reference in New Issue