mirror of
https://github.com/Bunsly/JobSpy.git
synced 2026-03-05 03:54:31 -08:00
Compare commits
12 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a37e7f235e | ||
|
|
690739e858 | ||
|
|
43eb2fe0e8 | ||
|
|
e50227bba6 | ||
|
|
45c2d76e15 | ||
|
|
fd883178be | ||
|
|
70e2218c67 | ||
|
|
d6947ecdd7 | ||
|
|
5191658562 | ||
|
|
1c264b8c58 | ||
|
|
1598d4ff63 | ||
|
|
bf2460684b |
@@ -2,18 +2,19 @@
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 1,
|
||||
"execution_count": null,
|
||||
"id": "00a94b47-f47b-420f-ba7e-714ef219c006",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from jobspy import scrape_jobs\n",
|
||||
"import pandas as pd"
|
||||
"import pandas as pd\n",
|
||||
"from IPython.display import display, HTML\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 2,
|
||||
"execution_count": null,
|
||||
"id": "9f773e6c-d9fc-42cc-b0ef-63b739e78435",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
@@ -26,643 +27,121 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 5,
|
||||
"execution_count": null,
|
||||
"id": "1253c1f8-9437-492e-9dd3-e7fe51099420",
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"data": {
|
||||
"text/html": [
|
||||
"<div>\n",
|
||||
"<style scoped>\n",
|
||||
" .dataframe tbody tr th:only-of-type {\n",
|
||||
" vertical-align: middle;\n",
|
||||
" }\n",
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# example 1 (no hyperlinks, USA)\n",
|
||||
"result = scrape_jobs(\n",
|
||||
" site_name=[\"linkedin\", \"zip_recruiter\"],\n",
|
||||
" location='san francisco',\n",
|
||||
" search_term=\"engineer\",\n",
|
||||
" results_wanted=5, \n",
|
||||
")\n",
|
||||
"\n",
|
||||
" .dataframe tbody tr th {\n",
|
||||
" vertical-align: top;\n",
|
||||
" }\n",
|
||||
"\n",
|
||||
" .dataframe thead th {\n",
|
||||
" text-align: right;\n",
|
||||
" }\n",
|
||||
"</style>\n",
|
||||
"<table border=\"1\" class=\"dataframe\">\n",
|
||||
" <thead>\n",
|
||||
" <tr style=\"text-align: right;\">\n",
|
||||
" <th></th>\n",
|
||||
" <th>site</th>\n",
|
||||
" <th>title</th>\n",
|
||||
" <th>company_name</th>\n",
|
||||
" <th>city</th>\n",
|
||||
" <th>state</th>\n",
|
||||
" <th>job_type</th>\n",
|
||||
" <th>interval</th>\n",
|
||||
" <th>min_amount</th>\n",
|
||||
" <th>max_amount</th>\n",
|
||||
" <th>job_url</th>\n",
|
||||
" <th>description</th>\n",
|
||||
" </tr>\n",
|
||||
" </thead>\n",
|
||||
" <tbody>\n",
|
||||
" <tr>\n",
|
||||
" <th>0</th>\n",
|
||||
" <td>indeed</td>\n",
|
||||
" <td>Mental Health Therapist</td>\n",
|
||||
" <td>Sandstone Care</td>\n",
|
||||
" <td>Broomfield</td>\n",
|
||||
" <td>CO</td>\n",
|
||||
" <td>fulltime</td>\n",
|
||||
" <td>yearly</td>\n",
|
||||
" <td>68000</td>\n",
|
||||
" <td>57500</td>\n",
|
||||
" <td>https://www.indeed.com/viewjob?jk=f5f33d72e030...</td>\n",
|
||||
" <td>Mental Health Therapist- Broomfield, CO Locati...</td>\n",
|
||||
" </tr>\n",
|
||||
" <tr>\n",
|
||||
" <th>1</th>\n",
|
||||
" <td>indeed</td>\n",
|
||||
" <td>.NET Developer</td>\n",
|
||||
" <td>Noir Consulting</td>\n",
|
||||
" <td>Irving</td>\n",
|
||||
" <td>TX</td>\n",
|
||||
" <td>None</td>\n",
|
||||
" <td>yearly</td>\n",
|
||||
" <td>200000</td>\n",
|
||||
" <td>200000</td>\n",
|
||||
" <td>https://www.indeed.com/viewjob?jk=1b22ba65296c...</td>\n",
|
||||
" <td>.NET Software Engineer, C#, WPF - Irving (Tech...</td>\n",
|
||||
" </tr>\n",
|
||||
" <tr>\n",
|
||||
" <th>2</th>\n",
|
||||
" <td>indeed</td>\n",
|
||||
" <td>Senior Software Engineer</td>\n",
|
||||
" <td>Johns Hopkins Applied Physics Laboratory (APL)</td>\n",
|
||||
" <td>Laurel</td>\n",
|
||||
" <td>MD</td>\n",
|
||||
" <td>None</td>\n",
|
||||
" <td>None</td>\n",
|
||||
" <td>None</td>\n",
|
||||
" <td>None</td>\n",
|
||||
" <td>https://www.indeed.com/viewjob?jk=309eed270a88...</td>\n",
|
||||
" <td>Description Are you a communications systems d...</td>\n",
|
||||
" </tr>\n",
|
||||
" <tr>\n",
|
||||
" <th>3</th>\n",
|
||||
" <td>indeed</td>\n",
|
||||
" <td>Front End Developer</td>\n",
|
||||
" <td>Verkada</td>\n",
|
||||
" <td>San Mateo</td>\n",
|
||||
" <td>CA</td>\n",
|
||||
" <td>fulltime</td>\n",
|
||||
" <td>yearly</td>\n",
|
||||
" <td>285000</td>\n",
|
||||
" <td>120000</td>\n",
|
||||
" <td>https://www.indeed.com/viewjob?jk=a3ea45daca75...</td>\n",
|
||||
" <td>Who We Are Verkada is the largest cloud-based ...</td>\n",
|
||||
" </tr>\n",
|
||||
" <tr>\n",
|
||||
" <th>4</th>\n",
|
||||
" <td>indeed</td>\n",
|
||||
" <td>Software Engineer</td>\n",
|
||||
" <td>Adobe</td>\n",
|
||||
" <td>San Jose</td>\n",
|
||||
" <td>CA</td>\n",
|
||||
" <td>fulltime</td>\n",
|
||||
" <td>yearly</td>\n",
|
||||
" <td>142700</td>\n",
|
||||
" <td>73200</td>\n",
|
||||
" <td>https://www.indeed.com/viewjob?jk=0f2dc9901fc7...</td>\n",
|
||||
" <td>Our Company Changing the world through digital...</td>\n",
|
||||
" </tr>\n",
|
||||
" <tr>\n",
|
||||
" <th>5</th>\n",
|
||||
" <td>indeed</td>\n",
|
||||
" <td>Full Stack Developer</td>\n",
|
||||
" <td>Comcast</td>\n",
|
||||
" <td>Philadelphia</td>\n",
|
||||
" <td>PA</td>\n",
|
||||
" <td>fulltime</td>\n",
|
||||
" <td>yearly</td>\n",
|
||||
" <td>184663</td>\n",
|
||||
" <td>78789</td>\n",
|
||||
" <td>https://www.indeed.com/viewjob?jk=eb5c927221eb...</td>\n",
|
||||
" <td>Make your mark at Comcast - a Fortune 30 globa...</td>\n",
|
||||
" </tr>\n",
|
||||
" <tr>\n",
|
||||
" <th>6</th>\n",
|
||||
" <td>indeed</td>\n",
|
||||
" <td>Senior Software Engineer</td>\n",
|
||||
" <td>Smart City Solutions</td>\n",
|
||||
" <td></td>\n",
|
||||
" <td>FL</td>\n",
|
||||
" <td>fulltime</td>\n",
|
||||
" <td>yearly</td>\n",
|
||||
" <td>100000</td>\n",
|
||||
" <td>85000</td>\n",
|
||||
" <td>https://www.indeed.com/viewjob?jk=ba1945f143a1...</td>\n",
|
||||
" <td>Smart City hiring a full stack software develo...</td>\n",
|
||||
" </tr>\n",
|
||||
" <tr>\n",
|
||||
" <th>7</th>\n",
|
||||
" <td>indeed</td>\n",
|
||||
" <td>Computer Engineer</td>\n",
|
||||
" <td>Honeywell</td>\n",
|
||||
" <td></td>\n",
|
||||
" <td>None</td>\n",
|
||||
" <td>fulltime</td>\n",
|
||||
" <td>None</td>\n",
|
||||
" <td>None</td>\n",
|
||||
" <td>None</td>\n",
|
||||
" <td>https://www.indeed.com/viewjob?jk=5a1da623ee75...</td>\n",
|
||||
" <td>Join a team recognized for leadership, innovat...</td>\n",
|
||||
" </tr>\n",
|
||||
" <tr>\n",
|
||||
" <th>8</th>\n",
|
||||
" <td>indeed</td>\n",
|
||||
" <td>Software Engineer</td>\n",
|
||||
" <td>Fidelity Investments</td>\n",
|
||||
" <td>Westlake</td>\n",
|
||||
" <td>TX</td>\n",
|
||||
" <td>None</td>\n",
|
||||
" <td>None</td>\n",
|
||||
" <td>None</td>\n",
|
||||
" <td>None</td>\n",
|
||||
" <td>https://www.indeed.com/viewjob?jk=b600392166bb...</td>\n",
|
||||
" <td>Job Description: Software Engineer in Test The...</td>\n",
|
||||
" </tr>\n",
|
||||
" <tr>\n",
|
||||
" <th>9</th>\n",
|
||||
" <td>indeed</td>\n",
|
||||
" <td>Fpga Engineer</td>\n",
|
||||
" <td>R-DEX Systems, Inc.</td>\n",
|
||||
" <td>Atlanta</td>\n",
|
||||
" <td>GA</td>\n",
|
||||
" <td>fulltime</td>\n",
|
||||
" <td>yearly</td>\n",
|
||||
" <td>160000</td>\n",
|
||||
" <td>120000</td>\n",
|
||||
" <td>https://www.indeed.com/viewjob?jk=a7e9d356c333...</td>\n",
|
||||
" <td>Title: Senior DSP/FPGA Firmware Engineer Descr...</td>\n",
|
||||
" </tr>\n",
|
||||
" <tr>\n",
|
||||
" <th>10</th>\n",
|
||||
" <td>linkedin</td>\n",
|
||||
" <td>Software Engineer</td>\n",
|
||||
" <td>Fieldguide</td>\n",
|
||||
" <td>San Francisco</td>\n",
|
||||
" <td>CA</td>\n",
|
||||
" <td>fulltime</td>\n",
|
||||
" <td>yearly</td>\n",
|
||||
" <td>None</td>\n",
|
||||
" <td>None</td>\n",
|
||||
" <td>https://www.linkedin.com/jobs/view/3696158160</td>\n",
|
||||
" <td>About us:Fieldguide is establishing a new stat...</td>\n",
|
||||
" </tr>\n",
|
||||
" <tr>\n",
|
||||
" <th>11</th>\n",
|
||||
" <td>linkedin</td>\n",
|
||||
" <td>Software Engineer - Early Career</td>\n",
|
||||
" <td>Lockheed Martin</td>\n",
|
||||
" <td>Sunnyvale</td>\n",
|
||||
" <td>CA</td>\n",
|
||||
" <td>fulltime</td>\n",
|
||||
" <td>yearly</td>\n",
|
||||
" <td>None</td>\n",
|
||||
" <td>None</td>\n",
|
||||
" <td>https://www.linkedin.com/jobs/view/3693012711</td>\n",
|
||||
" <td>Description:By bringing together people that u...</td>\n",
|
||||
" </tr>\n",
|
||||
" <tr>\n",
|
||||
" <th>12</th>\n",
|
||||
" <td>linkedin</td>\n",
|
||||
" <td>Software Engineer - Early Career</td>\n",
|
||||
" <td>Lockheed Martin</td>\n",
|
||||
" <td>Edwards</td>\n",
|
||||
" <td>CA</td>\n",
|
||||
" <td>fulltime</td>\n",
|
||||
" <td>yearly</td>\n",
|
||||
" <td>None</td>\n",
|
||||
" <td>None</td>\n",
|
||||
" <td>https://www.linkedin.com/jobs/view/3700669785</td>\n",
|
||||
" <td>Description:By bringing together people that u...</td>\n",
|
||||
" </tr>\n",
|
||||
" <tr>\n",
|
||||
" <th>13</th>\n",
|
||||
" <td>linkedin</td>\n",
|
||||
" <td>Software Engineer - Early Career</td>\n",
|
||||
" <td>Lockheed Martin</td>\n",
|
||||
" <td>Fort Worth</td>\n",
|
||||
" <td>TX</td>\n",
|
||||
" <td>fulltime</td>\n",
|
||||
" <td>yearly</td>\n",
|
||||
" <td>None</td>\n",
|
||||
" <td>None</td>\n",
|
||||
" <td>https://www.linkedin.com/jobs/view/3701770659</td>\n",
|
||||
" <td>Description:By bringing together people that u...</td>\n",
|
||||
" </tr>\n",
|
||||
" <tr>\n",
|
||||
" <th>14</th>\n",
|
||||
" <td>linkedin</td>\n",
|
||||
" <td>Software Engineer - Early Career</td>\n",
|
||||
" <td>Lockheed Martin</td>\n",
|
||||
" <td>Fort Worth</td>\n",
|
||||
" <td>TX</td>\n",
|
||||
" <td>fulltime</td>\n",
|
||||
" <td>yearly</td>\n",
|
||||
" <td>None</td>\n",
|
||||
" <td>None</td>\n",
|
||||
" <td>https://www.linkedin.com/jobs/view/3701769637</td>\n",
|
||||
" <td>Description:By bringing together people that u...</td>\n",
|
||||
" </tr>\n",
|
||||
" <tr>\n",
|
||||
" <th>15</th>\n",
|
||||
" <td>linkedin</td>\n",
|
||||
" <td>Software Engineer - Early Career</td>\n",
|
||||
" <td>Lockheed Martin</td>\n",
|
||||
" <td>Fort Worth</td>\n",
|
||||
" <td>TX</td>\n",
|
||||
" <td>fulltime</td>\n",
|
||||
" <td>yearly</td>\n",
|
||||
" <td>None</td>\n",
|
||||
" <td>None</td>\n",
|
||||
" <td>https://www.linkedin.com/jobs/view/3701772329</td>\n",
|
||||
" <td>Description:By bringing together people that u...</td>\n",
|
||||
" </tr>\n",
|
||||
" <tr>\n",
|
||||
" <th>16</th>\n",
|
||||
" <td>linkedin</td>\n",
|
||||
" <td>Software Engineer - Early Career</td>\n",
|
||||
" <td>Lockheed Martin</td>\n",
|
||||
" <td>Fort Worth</td>\n",
|
||||
" <td>TX</td>\n",
|
||||
" <td>fulltime</td>\n",
|
||||
" <td>yearly</td>\n",
|
||||
" <td>None</td>\n",
|
||||
" <td>None</td>\n",
|
||||
" <td>https://www.linkedin.com/jobs/view/3701775201</td>\n",
|
||||
" <td>Description:By bringing together people that u...</td>\n",
|
||||
" </tr>\n",
|
||||
" <tr>\n",
|
||||
" <th>17</th>\n",
|
||||
" <td>linkedin</td>\n",
|
||||
" <td>Software Engineer</td>\n",
|
||||
" <td>SpiderOak</td>\n",
|
||||
" <td>Austin</td>\n",
|
||||
" <td>TX</td>\n",
|
||||
" <td>fulltime</td>\n",
|
||||
" <td>yearly</td>\n",
|
||||
" <td>None</td>\n",
|
||||
" <td>None</td>\n",
|
||||
" <td>https://www.linkedin.com/jobs/view/3707174719</td>\n",
|
||||
" <td>We're only as strong as our weakest link.In th...</td>\n",
|
||||
" </tr>\n",
|
||||
" <tr>\n",
|
||||
" <th>18</th>\n",
|
||||
" <td>linkedin</td>\n",
|
||||
" <td>Full-Stack Software Engineer</td>\n",
|
||||
" <td>Rain</td>\n",
|
||||
" <td>New York</td>\n",
|
||||
" <td>NY</td>\n",
|
||||
" <td>fulltime</td>\n",
|
||||
" <td>yearly</td>\n",
|
||||
" <td>None</td>\n",
|
||||
" <td>None</td>\n",
|
||||
" <td>https://www.linkedin.com/jobs/view/3696158877</td>\n",
|
||||
" <td>Rain’s mission is to create the fastest and ea...</td>\n",
|
||||
" </tr>\n",
|
||||
" <tr>\n",
|
||||
" <th>19</th>\n",
|
||||
" <td>linkedin</td>\n",
|
||||
" <td>Software Engineer</td>\n",
|
||||
" <td>Nike</td>\n",
|
||||
" <td>Portland</td>\n",
|
||||
" <td>OR</td>\n",
|
||||
" <td>contract</td>\n",
|
||||
" <td>yearly</td>\n",
|
||||
" <td>None</td>\n",
|
||||
" <td>None</td>\n",
|
||||
" <td>https://www.linkedin.com/jobs/view/3693340247</td>\n",
|
||||
" <td>Work options: FlexibleWe consider remote, on-p...</td>\n",
|
||||
" </tr>\n",
|
||||
" <tr>\n",
|
||||
" <th>20</th>\n",
|
||||
" <td>zip_recruiter</td>\n",
|
||||
" <td>Software Engineer - New Grad</td>\n",
|
||||
" <td>ZipRecruiter</td>\n",
|
||||
" <td>Santa Monica</td>\n",
|
||||
" <td>CA</td>\n",
|
||||
" <td>fulltime</td>\n",
|
||||
" <td>yearly</td>\n",
|
||||
" <td>130000</td>\n",
|
||||
" <td>150000</td>\n",
|
||||
" <td>https://www.ziprecruiter.com/c/ZipRecruiter/Jo...</td>\n",
|
||||
" <td>Demonstrated foundation in software engineerin...</td>\n",
|
||||
" </tr>\n",
|
||||
" <tr>\n",
|
||||
" <th>21</th>\n",
|
||||
" <td>zip_recruiter</td>\n",
|
||||
" <td>Full Stack Software Engineer</td>\n",
|
||||
" <td>ZipRecruiter</td>\n",
|
||||
" <td>Phoenix</td>\n",
|
||||
" <td>AZ</td>\n",
|
||||
" <td>fulltime</td>\n",
|
||||
" <td>yearly</td>\n",
|
||||
" <td>105000</td>\n",
|
||||
" <td>145000</td>\n",
|
||||
" <td>https://www.ziprecruiter.com/c/ZipRecruiter/Jo...</td>\n",
|
||||
" <td>Experience in client side development using Re...</td>\n",
|
||||
" </tr>\n",
|
||||
" <tr>\n",
|
||||
" <th>22</th>\n",
|
||||
" <td>zip_recruiter</td>\n",
|
||||
" <td>Software Developer | Onsite | Omaha, NE - Omaha</td>\n",
|
||||
" <td>OneStaff Medical</td>\n",
|
||||
" <td>Omaha</td>\n",
|
||||
" <td>NE</td>\n",
|
||||
" <td>fulltime</td>\n",
|
||||
" <td>yearly</td>\n",
|
||||
" <td>60000</td>\n",
|
||||
" <td>110000</td>\n",
|
||||
" <td>https://www.ziprecruiter.com/c/OneStaff-Medica...</td>\n",
|
||||
" <td>We are looking for a well-rounded Software Dev...</td>\n",
|
||||
" </tr>\n",
|
||||
" <tr>\n",
|
||||
" <th>23</th>\n",
|
||||
" <td>zip_recruiter</td>\n",
|
||||
" <td>Senior Software Engineer, Onsite [Real-time]</td>\n",
|
||||
" <td>Raytheon</td>\n",
|
||||
" <td>McKinney</td>\n",
|
||||
" <td>TX</td>\n",
|
||||
" <td>fulltime</td>\n",
|
||||
" <td>yearly</td>\n",
|
||||
" <td>116000</td>\n",
|
||||
" <td>153000</td>\n",
|
||||
" <td>https://jsv3.recruitics.com/redirect?rx_cid=34...</td>\n",
|
||||
" <td>By joining the Silent Knight team as a Senior ...</td>\n",
|
||||
" </tr>\n",
|
||||
" <tr>\n",
|
||||
" <th>24</th>\n",
|
||||
" <td>zip_recruiter</td>\n",
|
||||
" <td>Senior Software Engineer - TS/SCI **Minimum $2...</td>\n",
|
||||
" <td>Raytheon</td>\n",
|
||||
" <td>Dallas</td>\n",
|
||||
" <td>TX</td>\n",
|
||||
" <td>fulltime</td>\n",
|
||||
" <td>yearly</td>\n",
|
||||
" <td>122000</td>\n",
|
||||
" <td>162000</td>\n",
|
||||
" <td>https://jsv3.recruitics.com/redirect?rx_cid=34...</td>\n",
|
||||
" <td>Object Oriented Programming using C++ with Lin...</td>\n",
|
||||
" </tr>\n",
|
||||
" <tr>\n",
|
||||
" <th>25</th>\n",
|
||||
" <td>zip_recruiter</td>\n",
|
||||
" <td>Software Engineer III (full stack, AI/ML, Djan...</td>\n",
|
||||
" <td>Ayahealthcare</td>\n",
|
||||
" <td>Remote</td>\n",
|
||||
" <td>OR</td>\n",
|
||||
" <td>None</td>\n",
|
||||
" <td>yearly</td>\n",
|
||||
" <td>156000</td>\n",
|
||||
" <td>165000</td>\n",
|
||||
" <td>https://click.appcast.io/track/hcbh0qq?cs=ngp&...</td>\n",
|
||||
" <td>The Software Engineer III will be an integral ...</td>\n",
|
||||
" </tr>\n",
|
||||
" <tr>\n",
|
||||
" <th>26</th>\n",
|
||||
" <td>zip_recruiter</td>\n",
|
||||
" <td>Software Engineer Full Stack</td>\n",
|
||||
" <td>Generac Power Systems</td>\n",
|
||||
" <td>Denver</td>\n",
|
||||
" <td>CO</td>\n",
|
||||
" <td>fulltime</td>\n",
|
||||
" <td>yearly</td>\n",
|
||||
" <td>90000</td>\n",
|
||||
" <td>115000</td>\n",
|
||||
" <td>https://www.ziprecruiter.com/c/Generac-Power-S...</td>\n",
|
||||
" <td>As a Software Engineer on the Energy Technolog...</td>\n",
|
||||
" </tr>\n",
|
||||
" <tr>\n",
|
||||
" <th>27</th>\n",
|
||||
" <td>zip_recruiter</td>\n",
|
||||
" <td>Embedded Software Engineer (Fort Worth, TX or ...</td>\n",
|
||||
" <td>Kubota</td>\n",
|
||||
" <td>Fort Worth</td>\n",
|
||||
" <td>TX</td>\n",
|
||||
" <td>fulltime</td>\n",
|
||||
" <td>yearly</td>\n",
|
||||
" <td>122000</td>\n",
|
||||
" <td>167000</td>\n",
|
||||
" <td>https://us62e2.dayforcehcm.com/CandidatePortal...</td>\n",
|
||||
" <td>Work with a cross-functional team to design, t...</td>\n",
|
||||
" </tr>\n",
|
||||
" <tr>\n",
|
||||
" <th>28</th>\n",
|
||||
" <td>zip_recruiter</td>\n",
|
||||
" <td>Senior Software Engineer (FT)</td>\n",
|
||||
" <td>National Indoor RV Center</td>\n",
|
||||
" <td>Lewisville</td>\n",
|
||||
" <td>TX</td>\n",
|
||||
" <td>fulltime</td>\n",
|
||||
" <td>yearly</td>\n",
|
||||
" <td>125000</td>\n",
|
||||
" <td>0</td>\n",
|
||||
" <td>https://www.ziprecruiter.com/c/National-Indoor...</td>\n",
|
||||
" <td>As a Senior Software Engineer, you will: * Des...</td>\n",
|
||||
" </tr>\n",
|
||||
" <tr>\n",
|
||||
" <th>29</th>\n",
|
||||
" <td>zip_recruiter</td>\n",
|
||||
" <td>2024 Next Gen IT Program | Software Engineerin...</td>\n",
|
||||
" <td>Southern Glazer's Wine & Spirits</td>\n",
|
||||
" <td>Dallas</td>\n",
|
||||
" <td>TX</td>\n",
|
||||
" <td>None</td>\n",
|
||||
" <td>yearly</td>\n",
|
||||
" <td>70000</td>\n",
|
||||
" <td>0</td>\n",
|
||||
" <td>https://click.appcast.io/track/hdsbnae?cs=b4&j...</td>\n",
|
||||
" <td>Finally, through the work assigned, the analys...</td>\n",
|
||||
" </tr>\n",
|
||||
" </tbody>\n",
|
||||
"</table>\n",
|
||||
"</div>"
|
||||
],
|
||||
"text/plain": [
|
||||
" site title \\\n",
|
||||
"0 indeed Mental Health Therapist \n",
|
||||
"1 indeed .NET Developer \n",
|
||||
"2 indeed Senior Software Engineer \n",
|
||||
"3 indeed Front End Developer \n",
|
||||
"4 indeed Software Engineer \n",
|
||||
"5 indeed Full Stack Developer \n",
|
||||
"6 indeed Senior Software Engineer \n",
|
||||
"7 indeed Computer Engineer \n",
|
||||
"8 indeed Software Engineer \n",
|
||||
"9 indeed Fpga Engineer \n",
|
||||
"10 linkedin Software Engineer \n",
|
||||
"11 linkedin Software Engineer - Early Career \n",
|
||||
"12 linkedin Software Engineer - Early Career \n",
|
||||
"13 linkedin Software Engineer - Early Career \n",
|
||||
"14 linkedin Software Engineer - Early Career \n",
|
||||
"15 linkedin Software Engineer - Early Career \n",
|
||||
"16 linkedin Software Engineer - Early Career \n",
|
||||
"17 linkedin Software Engineer \n",
|
||||
"18 linkedin Full-Stack Software Engineer \n",
|
||||
"19 linkedin Software Engineer \n",
|
||||
"20 zip_recruiter Software Engineer - New Grad \n",
|
||||
"21 zip_recruiter Full Stack Software Engineer \n",
|
||||
"22 zip_recruiter Software Developer | Onsite | Omaha, NE - Omaha \n",
|
||||
"23 zip_recruiter Senior Software Engineer, Onsite [Real-time] \n",
|
||||
"24 zip_recruiter Senior Software Engineer - TS/SCI **Minimum $2... \n",
|
||||
"25 zip_recruiter Software Engineer III (full stack, AI/ML, Djan... \n",
|
||||
"26 zip_recruiter Software Engineer Full Stack \n",
|
||||
"27 zip_recruiter Embedded Software Engineer (Fort Worth, TX or ... \n",
|
||||
"28 zip_recruiter Senior Software Engineer (FT) \n",
|
||||
"29 zip_recruiter 2024 Next Gen IT Program | Software Engineerin... \n",
|
||||
"\n",
|
||||
" company_name city state \\\n",
|
||||
"0 Sandstone Care Broomfield CO \n",
|
||||
"1 Noir Consulting Irving TX \n",
|
||||
"2 Johns Hopkins Applied Physics Laboratory (APL) Laurel MD \n",
|
||||
"3 Verkada San Mateo CA \n",
|
||||
"4 Adobe San Jose CA \n",
|
||||
"5 Comcast Philadelphia PA \n",
|
||||
"6 Smart City Solutions FL \n",
|
||||
"7 Honeywell None \n",
|
||||
"8 Fidelity Investments Westlake TX \n",
|
||||
"9 R-DEX Systems, Inc. Atlanta GA \n",
|
||||
"10 Fieldguide San Francisco CA \n",
|
||||
"11 Lockheed Martin Sunnyvale CA \n",
|
||||
"12 Lockheed Martin Edwards CA \n",
|
||||
"13 Lockheed Martin Fort Worth TX \n",
|
||||
"14 Lockheed Martin Fort Worth TX \n",
|
||||
"15 Lockheed Martin Fort Worth TX \n",
|
||||
"16 Lockheed Martin Fort Worth TX \n",
|
||||
"17 SpiderOak Austin TX \n",
|
||||
"18 Rain New York NY \n",
|
||||
"19 Nike Portland OR \n",
|
||||
"20 ZipRecruiter Santa Monica CA \n",
|
||||
"21 ZipRecruiter Phoenix AZ \n",
|
||||
"22 OneStaff Medical Omaha NE \n",
|
||||
"23 Raytheon McKinney TX \n",
|
||||
"24 Raytheon Dallas TX \n",
|
||||
"25 Ayahealthcare Remote OR \n",
|
||||
"26 Generac Power Systems Denver CO \n",
|
||||
"27 Kubota Fort Worth TX \n",
|
||||
"28 National Indoor RV Center Lewisville TX \n",
|
||||
"29 Southern Glazer's Wine & Spirits Dallas TX \n",
|
||||
"\n",
|
||||
" job_type interval min_amount max_amount \\\n",
|
||||
"0 fulltime yearly 68000 57500 \n",
|
||||
"1 None yearly 200000 200000 \n",
|
||||
"2 None None None None \n",
|
||||
"3 fulltime yearly 285000 120000 \n",
|
||||
"4 fulltime yearly 142700 73200 \n",
|
||||
"5 fulltime yearly 184663 78789 \n",
|
||||
"6 fulltime yearly 100000 85000 \n",
|
||||
"7 fulltime None None None \n",
|
||||
"8 None None None None \n",
|
||||
"9 fulltime yearly 160000 120000 \n",
|
||||
"10 fulltime yearly None None \n",
|
||||
"11 fulltime yearly None None \n",
|
||||
"12 fulltime yearly None None \n",
|
||||
"13 fulltime yearly None None \n",
|
||||
"14 fulltime yearly None None \n",
|
||||
"15 fulltime yearly None None \n",
|
||||
"16 fulltime yearly None None \n",
|
||||
"17 fulltime yearly None None \n",
|
||||
"18 fulltime yearly None None \n",
|
||||
"19 contract yearly None None \n",
|
||||
"20 fulltime yearly 130000 150000 \n",
|
||||
"21 fulltime yearly 105000 145000 \n",
|
||||
"22 fulltime yearly 60000 110000 \n",
|
||||
"23 fulltime yearly 116000 153000 \n",
|
||||
"24 fulltime yearly 122000 162000 \n",
|
||||
"25 None yearly 156000 165000 \n",
|
||||
"26 fulltime yearly 90000 115000 \n",
|
||||
"27 fulltime yearly 122000 167000 \n",
|
||||
"28 fulltime yearly 125000 0 \n",
|
||||
"29 None yearly 70000 0 \n",
|
||||
"\n",
|
||||
" job_url \\\n",
|
||||
"0 https://www.indeed.com/viewjob?jk=f5f33d72e030... \n",
|
||||
"1 https://www.indeed.com/viewjob?jk=1b22ba65296c... \n",
|
||||
"2 https://www.indeed.com/viewjob?jk=309eed270a88... \n",
|
||||
"3 https://www.indeed.com/viewjob?jk=a3ea45daca75... \n",
|
||||
"4 https://www.indeed.com/viewjob?jk=0f2dc9901fc7... \n",
|
||||
"5 https://www.indeed.com/viewjob?jk=eb5c927221eb... \n",
|
||||
"6 https://www.indeed.com/viewjob?jk=ba1945f143a1... \n",
|
||||
"7 https://www.indeed.com/viewjob?jk=5a1da623ee75... \n",
|
||||
"8 https://www.indeed.com/viewjob?jk=b600392166bb... \n",
|
||||
"9 https://www.indeed.com/viewjob?jk=a7e9d356c333... \n",
|
||||
"10 https://www.linkedin.com/jobs/view/3696158160 \n",
|
||||
"11 https://www.linkedin.com/jobs/view/3693012711 \n",
|
||||
"12 https://www.linkedin.com/jobs/view/3700669785 \n",
|
||||
"13 https://www.linkedin.com/jobs/view/3701770659 \n",
|
||||
"14 https://www.linkedin.com/jobs/view/3701769637 \n",
|
||||
"15 https://www.linkedin.com/jobs/view/3701772329 \n",
|
||||
"16 https://www.linkedin.com/jobs/view/3701775201 \n",
|
||||
"17 https://www.linkedin.com/jobs/view/3707174719 \n",
|
||||
"18 https://www.linkedin.com/jobs/view/3696158877 \n",
|
||||
"19 https://www.linkedin.com/jobs/view/3693340247 \n",
|
||||
"20 https://www.ziprecruiter.com/c/ZipRecruiter/Jo... \n",
|
||||
"21 https://www.ziprecruiter.com/c/ZipRecruiter/Jo... \n",
|
||||
"22 https://www.ziprecruiter.com/c/OneStaff-Medica... \n",
|
||||
"23 https://jsv3.recruitics.com/redirect?rx_cid=34... \n",
|
||||
"24 https://jsv3.recruitics.com/redirect?rx_cid=34... \n",
|
||||
"25 https://click.appcast.io/track/hcbh0qq?cs=ngp&... \n",
|
||||
"26 https://www.ziprecruiter.com/c/Generac-Power-S... \n",
|
||||
"27 https://us62e2.dayforcehcm.com/CandidatePortal... \n",
|
||||
"28 https://www.ziprecruiter.com/c/National-Indoor... \n",
|
||||
"29 https://click.appcast.io/track/hdsbnae?cs=b4&j... \n",
|
||||
"\n",
|
||||
" description \n",
|
||||
"0 Mental Health Therapist- Broomfield, CO Locati... \n",
|
||||
"1 .NET Software Engineer, C#, WPF - Irving (Tech... \n",
|
||||
"2 Description Are you a communications systems d... \n",
|
||||
"3 Who We Are Verkada is the largest cloud-based ... \n",
|
||||
"4 Our Company Changing the world through digital... \n",
|
||||
"5 Make your mark at Comcast - a Fortune 30 globa... \n",
|
||||
"6 Smart City hiring a full stack software develo... \n",
|
||||
"7 Join a team recognized for leadership, innovat... \n",
|
||||
"8 Job Description: Software Engineer in Test The... \n",
|
||||
"9 Title: Senior DSP/FPGA Firmware Engineer Descr... \n",
|
||||
"10 About us:Fieldguide is establishing a new stat... \n",
|
||||
"11 Description:By bringing together people that u... \n",
|
||||
"12 Description:By bringing together people that u... \n",
|
||||
"13 Description:By bringing together people that u... \n",
|
||||
"14 Description:By bringing together people that u... \n",
|
||||
"15 Description:By bringing together people that u... \n",
|
||||
"16 Description:By bringing together people that u... \n",
|
||||
"17 We're only as strong as our weakest link.In th... \n",
|
||||
"18 Rain’s mission is to create the fastest and ea... \n",
|
||||
"19 Work options: FlexibleWe consider remote, on-p... \n",
|
||||
"20 Demonstrated foundation in software engineerin... \n",
|
||||
"21 Experience in client side development using Re... \n",
|
||||
"22 We are looking for a well-rounded Software Dev... \n",
|
||||
"23 By joining the Silent Knight team as a Senior ... \n",
|
||||
"24 Object Oriented Programming using C++ with Lin... \n",
|
||||
"25 The Software Engineer III will be an integral ... \n",
|
||||
"26 As a Software Engineer on the Energy Technolog... \n",
|
||||
"27 Work with a cross-functional team to design, t... \n",
|
||||
"28 As a Senior Software Engineer, you will: * Des... \n",
|
||||
"29 Finally, through the work assigned, the analys... "
|
||||
"display(result.jobs)\n",
|
||||
"display(result.errors)"
|
||||
]
|
||||
},
|
||||
"execution_count": 5,
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "6a581b2d-f7da-4fac-868d-9efe143ee20a",
|
||||
"metadata": {},
|
||||
"output_type": "execute_result"
|
||||
}
|
||||
],
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"scrape_jobs(\n",
|
||||
" site_name=[\"indeed\", \"linkedin\", \"zip_recruiter\"],\n",
|
||||
"# example 2 - remote USA & hyperlinks\n",
|
||||
"result = scrape_jobs(\n",
|
||||
" site_name=[\"linkedin\", \"zip_recruiter\", \"indeed\"],\n",
|
||||
" # location='san francisco',\n",
|
||||
" search_term=\"software engineer\",\n",
|
||||
" results_wanted=10\n",
|
||||
" country_indeed=\"USA\",\n",
|
||||
" hyperlinks=True,\n",
|
||||
" is_remote=True,\n",
|
||||
" results_wanted=5, \n",
|
||||
")"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "fe8289bc-5b64-4202-9a64-7c117c83fd9a",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# use if hyperlinks=True\n",
|
||||
"html = result.jobs.to_html(escape=False)\n",
|
||||
"# change max-width: 200px to show more or less of the content\n",
|
||||
"truncate_width = f'<style>.dataframe td {{ max-width: 200px; overflow: hidden; text-overflow: ellipsis; white-space: nowrap; }}</style>{html}'\n",
|
||||
"display(HTML(truncate_width))\n",
|
||||
"display(result.errors)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "951c2fe1-52ff-407d-8bb1-068049b36777",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# example 3 - with hyperlinks, international - linkedin (no zip_recruiter)\n",
|
||||
"result = scrape_jobs(\n",
|
||||
" site_name=[\"linkedin\"],\n",
|
||||
" location='berlin',\n",
|
||||
" search_term=\"engineer\",\n",
|
||||
" hyperlinks=True,\n",
|
||||
" results_wanted=5,\n",
|
||||
" easy_apply=True\n",
|
||||
")"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "1e37a521-caef-441c-8fc2-2eb5b2e7da62",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# use if hyperlinks=True\n",
|
||||
"html = result.jobs.to_html(escape=False)\n",
|
||||
"# change max-width: 200px to show more or less of the content\n",
|
||||
"truncate_width = f'<style>.dataframe td {{ max-width: 200px; overflow: hidden; text-overflow: ellipsis; white-space: nowrap; }}</style>{html}'\n",
|
||||
"display(HTML(truncate_width))\n",
|
||||
"display(result.errors)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "0650e608-0b58-4bf5-ae86-68348035b16a",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# example 4 - international indeed (no zip_recruiter)\n",
|
||||
"result = scrape_jobs(\n",
|
||||
" site_name=[\"indeed\"],\n",
|
||||
" location='berlin',\n",
|
||||
" search_term=\"engineer\",\n",
|
||||
" country_indeed = \"Germany\",\n",
|
||||
" hyperlinks=True\n",
|
||||
")"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "40913ac8-3f8a-4d7e-ac47-afb88316432b",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# use if hyperlinks=True\n",
|
||||
"html = result.jobs.to_html(escape=False)\n",
|
||||
"# change max-width: 200px to show more or less of the content\n",
|
||||
"truncate_width = f'<style>.dataframe td {{ max-width: 200px; overflow: hidden; text-overflow: ellipsis; white-space: nowrap; }}</style>{html}'\n",
|
||||
"display(HTML(truncate_width))\n",
|
||||
"display(result.errors)"
|
||||
]
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
@@ -681,7 +160,7 @@
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.11.4"
|
||||
"version": "3.10.11"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
|
||||
86
README.md
86
README.md
@@ -1,15 +1,22 @@
|
||||
# <img src="https://github.com/cullenwatson/JobSpy/assets/78247585/2f61a059-9647-4a9c-bfb9-e3a9448bdc6a" style="vertical-align: sub; margin-right: 5px;"> JobSpy
|
||||
<img src="https://github.com/cullenwatson/JobSpy/assets/78247585/ae185b7e-e444-4712-8bb9-fa97f53e896b" width="400">
|
||||
|
||||
**JobSpy** is a simple, yet comprehensive, job scraping library.
|
||||
## Features
|
||||
|
||||
|
||||
- Scrapes job postings from **LinkedIn**, **Indeed** & **ZipRecruiter** simultaneously
|
||||
- Aggregates the job postings in a Pandas DataFrame
|
||||
|
||||
[Video Guide for JobSpy](https://www.youtube.com/watch?v=-yS3mgI5H-4)
|
||||
|
||||
|
||||
|
||||

|
||||
|
||||
### Installation
|
||||
`pip install python-jobspy`
|
||||
```
|
||||
pip install python-jobspy
|
||||
```
|
||||
|
||||
_Python version >= [3.10](https://www.python.org/downloads/release/python-3100/) required_
|
||||
|
||||
@@ -19,27 +26,30 @@
|
||||
from jobspy import scrape_jobs
|
||||
import pandas as pd
|
||||
|
||||
jobs: pd.DataFrame = scrape_jobs(
|
||||
result: pd.DataFrame = scrape_jobs(
|
||||
site_name=["indeed", "linkedin", "zip_recruiter"],
|
||||
search_term="software engineer",
|
||||
results_wanted=10
|
||||
location="Dallas, TX",
|
||||
results_wanted=10,
|
||||
|
||||
country_indeed='USA' # only needed for indeed
|
||||
)
|
||||
|
||||
if jobs.empty:
|
||||
print("No jobs found.")
|
||||
else:
|
||||
#1 print
|
||||
pd.set_option('display.max_columns', None)
|
||||
pd.set_option('display.max_rows', None)
|
||||
pd.set_option('display.width', None)
|
||||
pd.set_option('display.max_colwidth', 50) # set to 0 to see full job url / desc
|
||||
print(jobs)
|
||||
|
||||
#1 output
|
||||
print(result.jobs)
|
||||
print(result.errors)
|
||||
|
||||
#2 display in Jupyter Notebook
|
||||
#display(jobs)
|
||||
#display(result.jobs)
|
||||
#display(result.errors)
|
||||
|
||||
#3 output to .csv
|
||||
#jobs.to_csv('jobs.csv', index=False)
|
||||
#result.jobs.to_csv('result.jobs.csv', index=False)
|
||||
```
|
||||
|
||||
### Output
|
||||
@@ -63,29 +73,67 @@ Optional
|
||||
├── job_type (enum): fulltime, parttime, internship, contract
|
||||
├── is_remote (bool)
|
||||
├── results_wanted (int): number of job results to retrieve for each site specified in 'site_type'
|
||||
├── easy_apply (bool): filters for jobs on LinkedIn that have the 'Easy Apply' option
|
||||
├── easy_apply (bool): filters for jobs that are hosted on LinkedIn
|
||||
├── country_indeed (enum): filters the country on Indeed
|
||||
```
|
||||
|
||||
|
||||
### JobPost Schema
|
||||
```plaintext
|
||||
JobPost
|
||||
├── title (str)
|
||||
├── company_name (str)
|
||||
├── company (str)
|
||||
├── job_url (str)
|
||||
├── location (object)
|
||||
│ ├── country (str)
|
||||
│ ├── city (str)
|
||||
│ ├── state (str)
|
||||
├── description (str)
|
||||
├── job_type (enum)
|
||||
├── job_type (enum): fulltime, parttime, internship, contract
|
||||
├── compensation (object)
|
||||
│ ├── interval (CompensationInterval): yearly, monthly, weekly, daily, hourly
|
||||
│ ├── min_amount (float)
|
||||
│ ├── max_amount (float)
|
||||
│ └── currency (str)
|
||||
└── date_posted (datetime)
|
||||
│ ├── interval (enum): yearly, monthly, weekly, daily, hourly
|
||||
│ ├── min_amount (int)
|
||||
│ ├── max_amount (int)
|
||||
│ └── currency (enum)
|
||||
└── date_posted (date)
|
||||
```
|
||||
|
||||
## Supported Countries for Job Searching
|
||||
|
||||
|
||||
### **LinkedIn**
|
||||
|
||||
LinkedIn searches globally & uses only the `location` parameter
|
||||
|
||||
### **ZipRecruiter**
|
||||
|
||||
ZipRecruiter searches for jobs in US/Canada & uses only the `location` parameter
|
||||
|
||||
|
||||
### **Indeed**
|
||||
For Indeed, the `country_indeed` parameter is required. Additionally, use the `location` parameter and include the city or state if necessary.
|
||||
|
||||
You can specify the following countries when searching on Indeed (use the exact name):
|
||||
|
||||
|
||||
| | | | |
|
||||
|------|------|------|------|
|
||||
| Argentina | Australia | Austria | Bahrain |
|
||||
| Belgium | Brazil | Canada | Chile |
|
||||
| China | Colombia | Costa Rica | Czech Republic |
|
||||
| Denmark | Ecuador | Egypt | Finland |
|
||||
| France | Germany | Greece | Hong Kong |
|
||||
| Hungary | India | Indonesia | Ireland |
|
||||
| Israel | Italy | Japan | Kuwait |
|
||||
| Luxembourg | Malaysia | Mexico | Morocco |
|
||||
| Netherlands | New Zealand | Nigeria | Norway |
|
||||
| Oman | Pakistan | Panama | Peru |
|
||||
| Philippines | Poland | Portugal | Qatar |
|
||||
| Romania | Saudi Arabia | Singapore | South Africa |
|
||||
| South Korea | Spain | Sweden | Switzerland |
|
||||
| Taiwan | Thailand | Turkey | Ukraine |
|
||||
| United Arab Emirates | UK | USA | Uruguay |
|
||||
| Venezuela | Vietnam | | |
|
||||
|
||||
## Frequently Asked Questions
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[tool.poetry]
|
||||
name = "python-jobspy"
|
||||
version = "1.0.3"
|
||||
version = "1.1.2"
|
||||
description = "Job scraper for LinkedIn, Indeed & ZipRecruiter"
|
||||
authors = ["Zachary Hampton <zachary@zacharysproducts.com>", "Cullen Watson <cullen@cullen.ai>"]
|
||||
readme = "README.md"
|
||||
|
||||
@@ -1,16 +1,13 @@
|
||||
import pandas as pd
|
||||
from typing import List, Tuple
|
||||
import concurrent.futures
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
from typing import List, Tuple, NamedTuple, Dict
|
||||
|
||||
from .jobs import JobType
|
||||
from .jobs import JobType, Location
|
||||
from .scrapers.indeed import IndeedScraper
|
||||
from .scrapers.ziprecruiter import ZipRecruiterScraper
|
||||
from .scrapers.linkedin import LinkedInScraper
|
||||
from .scrapers import (
|
||||
ScraperInput,
|
||||
Site,
|
||||
JobResponse,
|
||||
)
|
||||
|
||||
from .scrapers import ScraperInput, Site, JobResponse, Country
|
||||
|
||||
SCRAPER_MAPPING = {
|
||||
Site.LINKEDIN: LinkedInScraper,
|
||||
@@ -19,12 +16,17 @@ SCRAPER_MAPPING = {
|
||||
}
|
||||
|
||||
|
||||
class ScrapeResults(NamedTuple):
|
||||
jobs: pd.DataFrame
|
||||
errors: pd.DataFrame
|
||||
|
||||
|
||||
def _map_str_to_site(site_name: str) -> Site:
|
||||
return Site[site_name.upper()]
|
||||
|
||||
|
||||
def scrape_jobs(
|
||||
site_name: str | Site | List[Site],
|
||||
site_name: str | List[str] | Site | List[Site],
|
||||
search_term: str,
|
||||
location: str = "",
|
||||
distance: int = None,
|
||||
@@ -32,18 +34,24 @@ def scrape_jobs(
|
||||
job_type: JobType = None,
|
||||
easy_apply: bool = False, # linkedin
|
||||
results_wanted: int = 15,
|
||||
) -> pd.DataFrame:
|
||||
country_indeed: str = "usa",
|
||||
hyperlinks: bool = False
|
||||
) -> ScrapeResults:
|
||||
"""
|
||||
Asynchronously scrapes job data from multiple job sites.
|
||||
:return: results_wanted: pandas dataframe containing job data
|
||||
"""
|
||||
|
||||
if type(site_name) == str:
|
||||
site_name = _map_str_to_site(site_name)
|
||||
site_type = [_map_str_to_site(site_name)]
|
||||
else: #: if type(site_name) == list
|
||||
site_type = [_map_str_to_site(site) if type(site) == str else site_name for site in site_name]
|
||||
|
||||
country_enum = Country.from_string(country_indeed)
|
||||
|
||||
site_type = [site_name] if type(site_name) == Site else site_name
|
||||
scraper_input = ScraperInput(
|
||||
site_type=site_type,
|
||||
country=country_enum,
|
||||
search_term=search_term,
|
||||
location=location,
|
||||
distance=distance,
|
||||
@@ -54,39 +62,45 @@ def scrape_jobs(
|
||||
)
|
||||
|
||||
def scrape_site(site: Site) -> Tuple[str, JobResponse]:
|
||||
try:
|
||||
scraper_class = SCRAPER_MAPPING[site]
|
||||
scraper = scraper_class()
|
||||
scraped_data: JobResponse = scraper.scrape(scraper_input)
|
||||
|
||||
except Exception as e:
|
||||
scraped_data = JobResponse(jobs=[], error=str(e), success=False)
|
||||
return site.value, scraped_data
|
||||
|
||||
results = {}
|
||||
for site in scraper_input.site_type:
|
||||
results, errors = {}, {}
|
||||
|
||||
def worker(site):
|
||||
site_value, scraped_data = scrape_site(site)
|
||||
return site_value, scraped_data
|
||||
|
||||
with ThreadPoolExecutor() as executor:
|
||||
future_to_site = {executor.submit(worker, site): site for site in scraper_input.site_type}
|
||||
|
||||
for future in concurrent.futures.as_completed(future_to_site):
|
||||
site_value, scraped_data = future.result()
|
||||
results[site_value] = scraped_data
|
||||
if scraped_data.error:
|
||||
errors[site_value] = scraped_data.error
|
||||
|
||||
dfs = []
|
||||
|
||||
for site, job_response in results.items():
|
||||
for job in job_response.jobs:
|
||||
data = job.dict()
|
||||
data["job_url_hyper"] = f'<a href="{data["job_url"]}">{data["job_url"]}</a>'
|
||||
data["site"] = site
|
||||
|
||||
# Formatting JobType
|
||||
data["job_type"] = data["job_type"].value if data["job_type"] else None
|
||||
|
||||
# Formatting Location
|
||||
location_obj = data.get("location")
|
||||
if location_obj and isinstance(location_obj, dict):
|
||||
data["city"] = location_obj.get("city", "")
|
||||
data["state"] = location_obj.get("state", "")
|
||||
data["country"] = location_obj.get("country", "USA")
|
||||
data["company"] = data["company_name"]
|
||||
if data["job_type"]:
|
||||
# Take the first value from the job type tuple
|
||||
data["job_type"] = data["job_type"].value[0]
|
||||
else:
|
||||
data["city"] = None
|
||||
data["state"] = None
|
||||
data["country"] = None
|
||||
data["job_type"] = None
|
||||
|
||||
data["location"] = Location(**data["location"]).display_location()
|
||||
|
||||
# Formatting Compensation
|
||||
compensation_obj = data.get("compensation")
|
||||
if compensation_obj and isinstance(compensation_obj, dict):
|
||||
data["interval"] = (
|
||||
@@ -106,18 +120,36 @@ def scrape_jobs(
|
||||
job_df = pd.DataFrame([data])
|
||||
dfs.append(job_df)
|
||||
|
||||
errors_list = [(key, value) for key, value in errors.items()]
|
||||
errors_df = pd.DataFrame(errors_list, columns=["Site", "Error"])
|
||||
|
||||
if dfs:
|
||||
df = pd.concat(dfs, ignore_index=True)
|
||||
if hyperlinks:
|
||||
desired_order = [
|
||||
"site",
|
||||
"title",
|
||||
"company_name",
|
||||
"city",
|
||||
"state",
|
||||
"company",
|
||||
"location",
|
||||
"job_type",
|
||||
"interval",
|
||||
"min_amount",
|
||||
"max_amount",
|
||||
"currency",
|
||||
"job_url_hyper",
|
||||
"description",
|
||||
]
|
||||
else:
|
||||
desired_order = [
|
||||
"site",
|
||||
"title",
|
||||
"company",
|
||||
"location",
|
||||
"job_type",
|
||||
"interval",
|
||||
"min_amount",
|
||||
"max_amount",
|
||||
"currency",
|
||||
"job_url",
|
||||
"description",
|
||||
]
|
||||
@@ -125,4 +157,4 @@ def scrape_jobs(
|
||||
else:
|
||||
df = pd.DataFrame()
|
||||
|
||||
return df
|
||||
return ScrapeResults(jobs=df, errors=errors_df)
|
||||
|
||||
@@ -6,24 +6,160 @@ from pydantic import BaseModel, validator
|
||||
|
||||
|
||||
class JobType(Enum):
|
||||
FULL_TIME = "fulltime"
|
||||
PART_TIME = "parttime"
|
||||
CONTRACT = "contract"
|
||||
TEMPORARY = "temporary"
|
||||
INTERNSHIP = "internship"
|
||||
FULL_TIME = (
|
||||
"fulltime",
|
||||
"períodointegral",
|
||||
"estágio/trainee",
|
||||
"cunormăîntreagă",
|
||||
"tiempocompleto",
|
||||
"vollzeit",
|
||||
"voltijds",
|
||||
"tempointegral",
|
||||
"全职",
|
||||
"plnýúvazek",
|
||||
"fuldtid",
|
||||
"دوامكامل",
|
||||
"kokopäivätyö",
|
||||
"tempsplein",
|
||||
"vollzeit",
|
||||
"πλήρηςαπασχόληση",
|
||||
"teljesmunkaidő",
|
||||
"tempopieno",
|
||||
"tempsplein",
|
||||
"heltid",
|
||||
"jornadacompleta",
|
||||
"pełnyetat",
|
||||
"정규직",
|
||||
"100%",
|
||||
"全職",
|
||||
"งานประจำ",
|
||||
"tamzamanlı",
|
||||
"повназайнятість",
|
||||
"toànthờigian",
|
||||
)
|
||||
PART_TIME = ("parttime", "teilzeit")
|
||||
CONTRACT = ("contract", "contractor")
|
||||
TEMPORARY = ("temporary",)
|
||||
INTERNSHIP = ("internship", "prácticas", "ojt(onthejobtraining)", "praktikum")
|
||||
|
||||
PER_DIEM = "perdiem"
|
||||
NIGHTS = "nights"
|
||||
OTHER = "other"
|
||||
SUMMER = "summer"
|
||||
VOLUNTEER = "volunteer"
|
||||
PER_DIEM = ("perdiem",)
|
||||
NIGHTS = ("nights",)
|
||||
OTHER = ("other",)
|
||||
SUMMER = ("summer",)
|
||||
VOLUNTEER = ("volunteer",)
|
||||
|
||||
|
||||
class Country(Enum):
|
||||
ARGENTINA = ("argentina", "ar")
|
||||
AUSTRALIA = ("australia", "au")
|
||||
AUSTRIA = ("austria", "at")
|
||||
BAHRAIN = ("bahrain", "bh")
|
||||
BELGIUM = ("belgium", "be")
|
||||
BRAZIL = ("brazil", "br")
|
||||
CANADA = ("canada", "ca")
|
||||
CHILE = ("chile", "cl")
|
||||
CHINA = ("china", "cn")
|
||||
COLOMBIA = ("colombia", "co")
|
||||
COSTARICA = ("costa rica", "cr")
|
||||
CZECHREPUBLIC = ("czech republic", "cz")
|
||||
DENMARK = ("denmark", "dk")
|
||||
ECUADOR = ("ecuador", "ec")
|
||||
EGYPT = ("egypt", "eg")
|
||||
FINLAND = ("finland", "fi")
|
||||
FRANCE = ("france", "fr")
|
||||
GERMANY = ("germany", "de")
|
||||
GREECE = ("greece", "gr")
|
||||
HONGKONG = ("hong kong", "hk")
|
||||
HUNGARY = ("hungary", "hu")
|
||||
INDIA = ("india", "in")
|
||||
INDONESIA = ("indonesia", "id")
|
||||
IRELAND = ("ireland", "ie")
|
||||
ISRAEL = ("israel", "il")
|
||||
ITALY = ("italy", "it")
|
||||
JAPAN = ("japan", "jp")
|
||||
KUWAIT = ("kuwait", "kw")
|
||||
LUXEMBOURG = ("luxembourg", "lu")
|
||||
MALAYSIA = ("malaysia", "malaysia")
|
||||
MEXICO = ("mexico", "mx")
|
||||
MOROCCO = ("morocco", "ma")
|
||||
NETHERLANDS = ("netherlands", "nl")
|
||||
NEWZEALAND = ("new zealand", "nz")
|
||||
NIGERIA = ("nigeria", "ng")
|
||||
NORWAY = ("norway", "no")
|
||||
OMAN = ("oman", "om")
|
||||
PAKISTAN = ("pakistan", "pk")
|
||||
PANAMA = ("panama", "pa")
|
||||
PERU = ("peru", "pe")
|
||||
PHILIPPINES = ("philippines", "ph")
|
||||
POLAND = ("poland", "pl")
|
||||
PORTUGAL = ("portugal", "pt")
|
||||
QATAR = ("qatar", "qa")
|
||||
ROMANIA = ("romania", "ro")
|
||||
SAUDIARABIA = ("saudi arabia", "sa")
|
||||
SINGAPORE = ("singapore", "sg")
|
||||
SOUTHAFRICA = ("south africa", "za")
|
||||
SOUTHKOREA = ("south korea", "kr")
|
||||
SPAIN = ("spain", "es")
|
||||
SWEDEN = ("sweden", "se")
|
||||
SWITZERLAND = ("switzerland", "ch")
|
||||
TAIWAN = ("taiwan", "tw")
|
||||
THAILAND = ("thailand", "th")
|
||||
TURKEY = ("turkey", "tr")
|
||||
UKRAINE = ("ukraine", "ua")
|
||||
UNITEDARABEMIRATES = ("united arab emirates", "ae")
|
||||
UK = ("uk", "uk")
|
||||
USA = ("usa", "www")
|
||||
URUGUAY = ("uruguay", "uy")
|
||||
VENEZUELA = ("venezuela", "ve")
|
||||
VIETNAM = ("vietnam", "vn")
|
||||
|
||||
# internal for ziprecruiter
|
||||
US_CANADA = ("usa/ca", "www")
|
||||
|
||||
# internal for linkeind
|
||||
WORLDWIDE = ("worldwide", "www")
|
||||
|
||||
def __new__(cls, country, domain):
|
||||
obj = object.__new__(cls)
|
||||
obj._value_ = country
|
||||
obj.domain = domain
|
||||
return obj
|
||||
|
||||
@property
|
||||
def domain_value(self):
|
||||
return self.domain
|
||||
|
||||
@classmethod
|
||||
def from_string(cls, country_str: str):
|
||||
"""Convert a string to the corresponding Country enum."""
|
||||
country_str = country_str.strip().lower()
|
||||
for country in cls:
|
||||
if country.value == country_str:
|
||||
return country
|
||||
valid_countries = [country.value for country in cls]
|
||||
raise ValueError(
|
||||
f"Invalid country string: '{country_str}'. Valid countries (only include this param for Indeed) are: {', '.join(valid_countries)}"
|
||||
)
|
||||
|
||||
|
||||
class Location(BaseModel):
|
||||
country: str = "USA"
|
||||
city: str = None
|
||||
country: Country = None
|
||||
city: Optional[str] = None
|
||||
state: Optional[str] = None
|
||||
|
||||
def display_location(self) -> str:
|
||||
location_parts = []
|
||||
if self.city:
|
||||
location_parts.append(self.city)
|
||||
if self.state:
|
||||
location_parts.append(self.state)
|
||||
if self.country and self.country not in (Country.US_CANADA, Country.WORLDWIDE):
|
||||
if self.country.value in ("usa", "uk"):
|
||||
location_parts.append(self.country.value.upper())
|
||||
else:
|
||||
location_parts.append(self.country.value.title())
|
||||
return ", ".join(location_parts)
|
||||
|
||||
|
||||
class CompensationInterval(Enum):
|
||||
YEARLY = "yearly"
|
||||
@@ -37,7 +173,7 @@ class Compensation(BaseModel):
|
||||
interval: CompensationInterval
|
||||
min_amount: int = None
|
||||
max_amount: int = None
|
||||
currency: str = "USD"
|
||||
currency: Optional[str] = "USD"
|
||||
|
||||
|
||||
class JobPost(BaseModel):
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from ..jobs import Enum, BaseModel, JobType, JobResponse
|
||||
from ..jobs import Enum, BaseModel, JobType, JobResponse, Country
|
||||
from typing import List, Optional, Any
|
||||
|
||||
|
||||
@@ -18,6 +18,7 @@ class ScraperInput(BaseModel):
|
||||
search_term: str
|
||||
|
||||
location: str = None
|
||||
country: Optional[Country] = Country.USA
|
||||
distance: Optional[int] = None
|
||||
is_remote: bool = False
|
||||
job_type: Optional[JobType] = None
|
||||
@@ -26,18 +27,9 @@ class ScraperInput(BaseModel):
|
||||
results_wanted: int = 15
|
||||
|
||||
|
||||
class CommonResponse(BaseModel):
|
||||
status: Optional[str]
|
||||
error: Optional[str]
|
||||
linkedin: Optional[Any] = None
|
||||
indeed: Optional[Any] = None
|
||||
zip_recruiter: Optional[Any] = None
|
||||
|
||||
|
||||
class Scraper:
|
||||
def __init__(self, site: Site, url: str):
|
||||
def __init__(self, site: Site):
|
||||
self.site = site
|
||||
self.url = url
|
||||
|
||||
def scrape(self, scraper_input: ScraperInput) -> JobResponse:
|
||||
...
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
import re
|
||||
import math
|
||||
import io
|
||||
import json
|
||||
import traceback
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
|
||||
@@ -18,7 +20,7 @@ from ...jobs import (
|
||||
JobResponse,
|
||||
JobType,
|
||||
)
|
||||
from .. import Scraper, ScraperInput, Site, StatusException
|
||||
from .. import Scraper, ScraperInput, Site, Country, StatusException
|
||||
|
||||
|
||||
class ParsingException(Exception):
|
||||
@@ -31,8 +33,7 @@ class IndeedScraper(Scraper):
|
||||
Initializes IndeedScraper with the Indeed job search url
|
||||
"""
|
||||
site = Site(Site.INDEED)
|
||||
url = "https://www.indeed.com"
|
||||
super().__init__(site, url)
|
||||
super().__init__(site)
|
||||
|
||||
self.jobs_per_page = 15
|
||||
self.seen_urls = set()
|
||||
@@ -47,16 +48,21 @@ class IndeedScraper(Scraper):
|
||||
:param session:
|
||||
:return: jobs found on page, total number of jobs found for search
|
||||
"""
|
||||
self.country = scraper_input.country
|
||||
domain = self.country.domain_value
|
||||
self.url = f"https://{domain}.indeed.com"
|
||||
|
||||
job_list = []
|
||||
|
||||
params = {
|
||||
"q": scraper_input.search_term,
|
||||
"l": scraper_input.location,
|
||||
"radius": scraper_input.distance,
|
||||
"filter": 0,
|
||||
"start": 0 + page * 10,
|
||||
}
|
||||
if scraper_input.distance:
|
||||
params["radius"] = scraper_input.distance
|
||||
|
||||
sc_values = []
|
||||
if scraper_input.is_remote:
|
||||
sc_values.append("attr(DSQF7)")
|
||||
@@ -65,13 +71,14 @@ class IndeedScraper(Scraper):
|
||||
|
||||
if sc_values:
|
||||
params["sc"] = "0kf:" + "".join(sc_values) + ";"
|
||||
response = session.get(self.url + "/jobs", params=params)
|
||||
response = session.get(self.url + "/jobs", params=params, allow_redirects=True)
|
||||
# print(response.status_code)
|
||||
|
||||
if response.status_code != 200 and response.status_code != 307:
|
||||
if response.status_code not in range(200, 400):
|
||||
raise StatusException(response.status_code)
|
||||
|
||||
soup = BeautifulSoup(response.content, "html.parser")
|
||||
if "did not match any jobs" in str(soup):
|
||||
if "did not match any jobs" in response.text:
|
||||
raise ParsingException("Search did not match any jobs")
|
||||
|
||||
jobs = IndeedScraper.parse_jobs(
|
||||
@@ -92,8 +99,6 @@ class IndeedScraper(Scraper):
|
||||
if job_url in self.seen_urls:
|
||||
return None
|
||||
|
||||
snippet_html = BeautifulSoup(job["snippet"], "html.parser")
|
||||
|
||||
extracted_salary = job.get("extractedSalary")
|
||||
compensation = None
|
||||
if extracted_salary:
|
||||
@@ -118,11 +123,12 @@ class IndeedScraper(Scraper):
|
||||
date_posted = date_posted.strftime("%Y-%m-%d")
|
||||
|
||||
description = self.get_description(job_url, session)
|
||||
li_elements = snippet_html.find_all("li")
|
||||
with io.StringIO(job["snippet"]) as f:
|
||||
soup = BeautifulSoup(f, "html.parser")
|
||||
li_elements = soup.find_all("li")
|
||||
if description is None and li_elements:
|
||||
description = " ".join(li.text for li in li_elements)
|
||||
|
||||
first_li = snippet_html.find("li")
|
||||
job_post = JobPost(
|
||||
title=job["normTitle"],
|
||||
description=description,
|
||||
@@ -130,6 +136,7 @@ class IndeedScraper(Scraper):
|
||||
location=Location(
|
||||
city=job.get("jobLocationCity"),
|
||||
state=job.get("jobLocationState"),
|
||||
country=self.country,
|
||||
),
|
||||
job_type=job_type,
|
||||
compensation=compensation,
|
||||
@@ -138,7 +145,7 @@ class IndeedScraper(Scraper):
|
||||
)
|
||||
return job_post
|
||||
|
||||
with ThreadPoolExecutor(max_workers=10) as executor:
|
||||
with ThreadPoolExecutor(max_workers=1) as executor:
|
||||
job_results: list[Future] = [
|
||||
executor.submit(process_job, job)
|
||||
for job in jobs["metaData"]["mosaicProviderJobCardsModel"]["results"]
|
||||
@@ -166,7 +173,7 @@ class IndeedScraper(Scraper):
|
||||
#: get first page to initialize session
|
||||
job_list, total_results = self.scrape_page(scraper_input, 0, session)
|
||||
|
||||
with ThreadPoolExecutor(max_workers=10) as executor:
|
||||
with ThreadPoolExecutor(max_workers=1) as executor:
|
||||
futures: list[Future] = [
|
||||
executor.submit(self.scrape_page, scraper_input, page, session)
|
||||
for page in range(1, pages_to_process + 1)
|
||||
@@ -215,7 +222,12 @@ class IndeedScraper(Scraper):
|
||||
jk_value = params.get("jk", [None])[0]
|
||||
formatted_url = f"{self.url}/viewjob?jk={jk_value}&spa=1"
|
||||
|
||||
response = session.get(formatted_url, allow_redirects=True)
|
||||
try:
|
||||
response = session.get(
|
||||
formatted_url, allow_redirects=True, timeout_seconds=5
|
||||
)
|
||||
except requests.exceptions.Timeout:
|
||||
return None
|
||||
|
||||
if response.status_code not in range(200, 400):
|
||||
return None
|
||||
@@ -223,7 +235,8 @@ class IndeedScraper(Scraper):
|
||||
raw_description = response.json()["body"]["jobInfoWrapperModel"][
|
||||
"jobInfoModel"
|
||||
]["sanitizedJobDescription"]
|
||||
soup = BeautifulSoup(raw_description, "html.parser")
|
||||
with io.StringIO(raw_description) as f:
|
||||
soup = BeautifulSoup(f, "html.parser")
|
||||
text_content = " ".join(soup.get_text().split()).strip()
|
||||
return text_content
|
||||
|
||||
@@ -237,13 +250,18 @@ class IndeedScraper(Scraper):
|
||||
for taxonomy in job["taxonomyAttributes"]:
|
||||
if taxonomy["label"] == "job-types":
|
||||
if len(taxonomy["attributes"]) > 0:
|
||||
job_type_str = (
|
||||
taxonomy["attributes"][0]["label"]
|
||||
.replace("-", "_")
|
||||
.replace(" ", "_")
|
||||
.upper()
|
||||
)
|
||||
return JobType[job_type_str]
|
||||
label = taxonomy["attributes"][0].get("label")
|
||||
if label:
|
||||
job_type_str = label.replace("-", "").replace(" ", "").lower()
|
||||
# print(f"Debug: job_type_str = {job_type_str}")
|
||||
return IndeedScraper.get_enum_from_value(job_type_str)
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def get_enum_from_value(value_str):
|
||||
for job_type in JobType:
|
||||
if value_str in job_type.value:
|
||||
return job_type
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
@@ -294,7 +312,7 @@ class IndeedScraper(Scraper):
|
||||
:param soup:
|
||||
:return: total_num_jobs
|
||||
"""
|
||||
script = soup.find("script", string=lambda t: "window._initialData" in t)
|
||||
script = soup.find("script", string=lambda t: t and "window._initialData" in t)
|
||||
|
||||
pattern = re.compile(r"window._initialData\s*=\s*({.*})\s*;", re.DOTALL)
|
||||
match = pattern.search(script.string)
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
from typing import Optional, Tuple
|
||||
from datetime import datetime
|
||||
import traceback
|
||||
|
||||
import requests
|
||||
from requests.exceptions import Timeout
|
||||
from bs4 import BeautifulSoup
|
||||
from bs4.element import Tag
|
||||
|
||||
@@ -22,8 +24,8 @@ class LinkedInScraper(Scraper):
|
||||
Initializes LinkedInScraper with the LinkedIn job search url
|
||||
"""
|
||||
site = Site(Site.LINKEDIN)
|
||||
url = "https://www.linkedin.com"
|
||||
super().__init__(site, url)
|
||||
self.url = "https://www.linkedin.com"
|
||||
super().__init__(site)
|
||||
|
||||
def scrape(self, scraper_input: ScraperInput) -> JobResponse:
|
||||
"""
|
||||
@@ -31,6 +33,7 @@ class LinkedInScraper(Scraper):
|
||||
:param scraper_input:
|
||||
:return: job_response
|
||||
"""
|
||||
self.country = "worldwide"
|
||||
job_list: list[JobPost] = []
|
||||
seen_urls = set()
|
||||
page, processed_jobs, job_count = 0, 0, 0
|
||||
@@ -66,9 +69,12 @@ class LinkedInScraper(Scraper):
|
||||
)
|
||||
|
||||
if response.status_code != 200:
|
||||
reason = ' (too many requests)' if response.status_code == 429 else ''
|
||||
return JobResponse(
|
||||
success=False,
|
||||
error=f"Response returned {response.status_code}",
|
||||
error=f"LinkedIn returned {response.status_code} {reason}",
|
||||
jobs=job_list,
|
||||
total_results=job_count,
|
||||
)
|
||||
|
||||
soup = BeautifulSoup(response.text, "html.parser")
|
||||
@@ -104,7 +110,7 @@ class LinkedInScraper(Scraper):
|
||||
metadata_card = job_info.find(
|
||||
"div", class_="base-search-card__metadata"
|
||||
)
|
||||
location: Location = LinkedInScraper.get_location(metadata_card)
|
||||
location: Location = self.get_location(metadata_card)
|
||||
|
||||
datetime_tag = metadata_card.find(
|
||||
"time", class_="job-search-card__listdate"
|
||||
@@ -112,7 +118,10 @@ class LinkedInScraper(Scraper):
|
||||
description, job_type = LinkedInScraper.get_description(job_url)
|
||||
if datetime_tag:
|
||||
datetime_str = datetime_tag["datetime"]
|
||||
try:
|
||||
date_posted = datetime.strptime(datetime_str, "%Y-%m-%d")
|
||||
except Exception as e:
|
||||
date_posted = None
|
||||
else:
|
||||
date_posted = None
|
||||
|
||||
@@ -125,19 +134,17 @@ class LinkedInScraper(Scraper):
|
||||
job_url=job_url,
|
||||
job_type=job_type,
|
||||
compensation=Compensation(
|
||||
interval=CompensationInterval.YEARLY, currency="USD"
|
||||
interval=CompensationInterval.YEARLY, currency=None
|
||||
),
|
||||
)
|
||||
job_list.append(job_post)
|
||||
if (
|
||||
len(job_list) >= scraper_input.results_wanted
|
||||
or processed_jobs >= job_count
|
||||
):
|
||||
if processed_jobs >= job_count:
|
||||
break
|
||||
if (
|
||||
len(job_list) >= scraper_input.results_wanted
|
||||
or processed_jobs >= job_count
|
||||
):
|
||||
if len(job_list) >= scraper_input.results_wanted:
|
||||
break
|
||||
if processed_jobs >= job_count:
|
||||
break
|
||||
if len(job_list) >= scraper_input.results_wanted:
|
||||
break
|
||||
|
||||
page += 1
|
||||
@@ -157,7 +164,11 @@ class LinkedInScraper(Scraper):
|
||||
:param job_page_url:
|
||||
:return: description or None
|
||||
"""
|
||||
response = requests.get(job_page_url, allow_redirects=True)
|
||||
try:
|
||||
response = requests.get(job_page_url, timeout=5)
|
||||
except Timeout:
|
||||
return None, None
|
||||
|
||||
if response.status_code not in range(200, 400):
|
||||
return None, None
|
||||
|
||||
@@ -195,17 +206,24 @@ class LinkedInScraper(Scraper):
|
||||
employment_type = employment_type.lower()
|
||||
employment_type = employment_type.replace("-", "")
|
||||
|
||||
return JobType(employment_type)
|
||||
return LinkedInScraper.get_enum_from_value(employment_type)
|
||||
|
||||
return text_content, get_job_type(soup)
|
||||
|
||||
@staticmethod
|
||||
def get_location(metadata_card: Optional[Tag]) -> Location:
|
||||
def get_enum_from_value(value_str):
|
||||
for job_type in JobType:
|
||||
if value_str in job_type.value:
|
||||
return job_type
|
||||
return None
|
||||
|
||||
def get_location(self, metadata_card: Optional[Tag]) -> Location:
|
||||
"""
|
||||
Extracts the location data from the job metadata card.
|
||||
:param metadata_card
|
||||
:return: location
|
||||
"""
|
||||
location = Location(country=self.country)
|
||||
if metadata_card is not None:
|
||||
location_tag = metadata_card.find(
|
||||
"span", class_="job-search-card__location"
|
||||
@@ -217,6 +235,7 @@ class LinkedInScraper(Scraper):
|
||||
location = Location(
|
||||
city=city,
|
||||
state=state,
|
||||
country=self.country,
|
||||
)
|
||||
|
||||
return location
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import math
|
||||
import json
|
||||
import re
|
||||
import traceback
|
||||
from datetime import datetime
|
||||
from typing import Optional, Tuple
|
||||
from urllib.parse import urlparse, parse_qs
|
||||
@@ -18,6 +19,7 @@ from ...jobs import (
|
||||
Location,
|
||||
JobResponse,
|
||||
JobType,
|
||||
Country,
|
||||
)
|
||||
|
||||
|
||||
@@ -27,8 +29,8 @@ class ZipRecruiterScraper(Scraper):
|
||||
Initializes LinkedInScraper with the ZipRecruiter job search url
|
||||
"""
|
||||
site = Site(Site.ZIP_RECRUITER)
|
||||
url = "https://www.ziprecruiter.com"
|
||||
super().__init__(site, url)
|
||||
self.url = "https://www.ziprecruiter.com"
|
||||
super().__init__(site)
|
||||
|
||||
self.jobs_per_page = 20
|
||||
self.seen_urls = set()
|
||||
@@ -80,8 +82,10 @@ class ZipRecruiterScraper(Scraper):
|
||||
self.url + "/jobs-search",
|
||||
headers=ZipRecruiterScraper.headers(),
|
||||
params=params,
|
||||
allow_redirects=True,
|
||||
)
|
||||
|
||||
# print(response.status_code)
|
||||
if response.status_code != 200:
|
||||
raise StatusException(response.status_code)
|
||||
|
||||
@@ -181,15 +185,12 @@ class ZipRecruiterScraper(Scraper):
|
||||
description = job.find("p", {"class": "job_snippet"}).text.strip()
|
||||
|
||||
job_type_element = job.find("li", {"class": "perk_item perk_type"})
|
||||
job_type = None
|
||||
if job_type_element:
|
||||
job_type_text = (
|
||||
job_type_element.text.strip().lower().replace("-", "").replace(" ", "")
|
||||
)
|
||||
if job_type_text == "contractor":
|
||||
job_type_text = "contract"
|
||||
job_type = JobType(job_type_text)
|
||||
else:
|
||||
job_type = None
|
||||
job_type = ZipRecruiterScraper.get_job_type_enum(job_type_text)
|
||||
|
||||
date_posted = ZipRecruiterScraper.get_date_posted(job)
|
||||
|
||||
@@ -206,16 +207,17 @@ class ZipRecruiterScraper(Scraper):
|
||||
return job_post
|
||||
|
||||
def process_job_js(self, job: dict) -> JobPost:
|
||||
# Map the job data to the expected fields by the Pydantic model
|
||||
title = job.get("Title")
|
||||
description = BeautifulSoup(
|
||||
job.get("Snippet", "").strip(), "html.parser"
|
||||
).get_text()
|
||||
|
||||
company = job.get("OrgName")
|
||||
location = Location(city=job.get("City"), state=job.get("State"))
|
||||
location = Location(
|
||||
city=job.get("City"), state=job.get("State"), country=Country.US_CANADA
|
||||
)
|
||||
try:
|
||||
job_type = ZipRecruiterScraper.job_type_from_string(
|
||||
job_type = ZipRecruiterScraper.get_job_type_enum(
|
||||
job.get("EmploymentType", "").replace("-", "_").lower()
|
||||
)
|
||||
except ValueError:
|
||||
@@ -244,6 +246,7 @@ class ZipRecruiterScraper(Scraper):
|
||||
interval=CompensationInterval.YEARLY,
|
||||
min_amount=min_amount,
|
||||
max_amount=max_amount,
|
||||
currency="USD/CAD",
|
||||
)
|
||||
save_job_url = job.get("SaveJobURL", "")
|
||||
posted_time_match = re.search(
|
||||
@@ -270,17 +273,18 @@ class ZipRecruiterScraper(Scraper):
|
||||
return job_post
|
||||
|
||||
@staticmethod
|
||||
def job_type_from_string(value: str) -> Optional[JobType]:
|
||||
if not value:
|
||||
def get_enum_from_value(value_str):
|
||||
for job_type in JobType:
|
||||
if value_str in job_type.value:
|
||||
return job_type
|
||||
return None
|
||||
|
||||
if value.lower() == "contractor":
|
||||
value = "contract"
|
||||
normalized_value = value.replace("_", "")
|
||||
for item in JobType:
|
||||
if item.value == normalized_value:
|
||||
return item
|
||||
raise ValueError(f"Invalid value for JobType: {value}")
|
||||
@staticmethod
|
||||
def get_job_type_enum(job_type_str: str) -> Optional[JobType]:
|
||||
for job_type in JobType:
|
||||
if job_type_str in job_type.value:
|
||||
return job_type
|
||||
return None
|
||||
|
||||
def get_description(self, job_page_url: str) -> Tuple[Optional[str], Optional[str]]:
|
||||
"""
|
||||
@@ -289,11 +293,15 @@ class ZipRecruiterScraper(Scraper):
|
||||
:param session:
|
||||
:return: description or None, response url
|
||||
"""
|
||||
try:
|
||||
response = self.session.get(
|
||||
job_page_url, headers=ZipRecruiterScraper.headers(), allow_redirects=True
|
||||
job_page_url,
|
||||
headers=ZipRecruiterScraper.headers(),
|
||||
allow_redirects=True,
|
||||
timeout_seconds=5,
|
||||
)
|
||||
if response.status_code not in range(200, 400):
|
||||
return None, None
|
||||
except requests.exceptions.Timeout:
|
||||
return None
|
||||
|
||||
html_string = response.content
|
||||
soup_job = BeautifulSoup(html_string, "html.parser")
|
||||
@@ -375,7 +383,10 @@ class ZipRecruiterScraper(Scraper):
|
||||
amounts.append(amount)
|
||||
|
||||
compensation = Compensation(
|
||||
interval=interval, min_amount=min(amounts), max_amount=max(amounts)
|
||||
interval=interval,
|
||||
min_amount=min(amounts),
|
||||
max_amount=max(amounts),
|
||||
currency="USD/CAD",
|
||||
)
|
||||
|
||||
return compensation
|
||||
@@ -399,10 +410,7 @@ class ZipRecruiterScraper(Scraper):
|
||||
city, state = None, None
|
||||
else:
|
||||
city, state = None, None
|
||||
return Location(
|
||||
city=city,
|
||||
state=state,
|
||||
)
|
||||
return Location(city=city, state=state, country=Country.US_CANADA)
|
||||
|
||||
@staticmethod
|
||||
def headers() -> dict:
|
||||
|
||||
10
src/tests/test_all.py
Normal file
10
src/tests/test_all.py
Normal file
@@ -0,0 +1,10 @@
|
||||
from ..jobspy import scrape_jobs
|
||||
|
||||
|
||||
def test_all():
|
||||
result = scrape_jobs(
|
||||
site_name=["linkedin", "indeed", "zip_recruiter"],
|
||||
search_term="software engineer",
|
||||
results_wanted=5,
|
||||
)
|
||||
assert result is not None and result.errors.empty is True
|
||||
@@ -6,4 +6,4 @@ def test_indeed():
|
||||
site_name="indeed",
|
||||
search_term="software engineer",
|
||||
)
|
||||
assert result is not None
|
||||
assert result is not None and result.errors.empty is True
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from jobspy import scrape_jobs
|
||||
from ..jobspy import scrape_jobs
|
||||
|
||||
|
||||
def test_linkedin():
|
||||
@@ -6,4 +6,4 @@ def test_linkedin():
|
||||
site_name="linkedin",
|
||||
search_term="software engineer",
|
||||
)
|
||||
assert result is not None
|
||||
assert result is not None and result.errors.empty is True
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from jobspy import scrape_jobs
|
||||
from ..jobspy import scrape_jobs
|
||||
|
||||
|
||||
def test_ziprecruiter():
|
||||
@@ -7,4 +7,4 @@ def test_ziprecruiter():
|
||||
search_term="software engineer",
|
||||
)
|
||||
|
||||
assert result is not None
|
||||
assert result is not None and result.errors.empty is True
|
||||
|
||||
Reference in New Issue
Block a user