Compare commits
23 Commits
Author | SHA1 | Date |
---|---|---|
|
e378feeefe | |
|
8a5683fe79 | |
|
65f799a27d | |
|
0de916e590 | |
|
6a3f7df087 | |
|
a75bcc2aa0 | |
|
1082b86fa1 | |
|
8e04f6b117 | |
|
1f717bd9e3 | |
|
8cfe056f79 | |
|
1010c743b6 | |
|
32fdc281e3 | |
|
6d14b8df5a | |
|
3f44744d61 | |
|
ac0cad62a7 | |
|
beb885cc8d | |
|
011680f7d8 | |
|
93e6778a48 | |
|
ec036bb989 | |
|
aacd168545 | |
|
0d70007000 | |
|
018d3fbac4 | |
|
803fd618e9 |
|
@ -0,0 +1 @@
|
||||||
|
github: Bunsly
|
72
README.md
72
README.md
|
@ -2,10 +2,6 @@
|
||||||
|
|
||||||
**HomeHarvest** is a real estate scraping library that extracts and formats data in the style of MLS listings.
|
**HomeHarvest** is a real estate scraping library that extracts and formats data in the style of MLS listings.
|
||||||
|
|
||||||
**Not technical?** Try out the web scraping tool on our site at [tryhomeharvest.com](https://tryhomeharvest.com).
|
|
||||||
|
|
||||||
*Looking to build a data-focused software product?* **[Book a call](https://bunsly.com)** *to work with us.*
|
|
||||||
|
|
||||||
## HomeHarvest Features
|
## HomeHarvest Features
|
||||||
|
|
||||||
- **Source**: Fetches properties directly from **Realtor.com**.
|
- **Source**: Fetches properties directly from **Realtor.com**.
|
||||||
|
@ -21,7 +17,7 @@
|
||||||
```bash
|
```bash
|
||||||
pip install -U homeharvest
|
pip install -U homeharvest
|
||||||
```
|
```
|
||||||
_Python version >= [3.10](https://www.python.org/downloads/release/python-3100/) required_
|
_Python version >= [3.9](https://www.python.org/downloads/release/python-3100/) required_
|
||||||
|
|
||||||
## Usage
|
## Usage
|
||||||
|
|
||||||
|
@ -40,6 +36,7 @@ properties = scrape_property(
|
||||||
listing_type="sold", # or (for_sale, for_rent, pending)
|
listing_type="sold", # or (for_sale, for_rent, pending)
|
||||||
past_days=30, # sold in last 30 days - listed in last 30 days if (for_sale, for_rent)
|
past_days=30, # sold in last 30 days - listed in last 30 days if (for_sale, for_rent)
|
||||||
|
|
||||||
|
# property_type=['single_family','multi_family'],
|
||||||
# date_from="2023-05-01", # alternative to past_days
|
# date_from="2023-05-01", # alternative to past_days
|
||||||
# date_to="2023-05-28",
|
# date_to="2023-05-28",
|
||||||
# foreclosure=True
|
# foreclosure=True
|
||||||
|
@ -68,13 +65,30 @@ print(properties.head())
|
||||||
```
|
```
|
||||||
Required
|
Required
|
||||||
├── location (str): The address in various formats - this could be just a zip code, a full address, or city/state, etc.
|
├── location (str): The address in various formats - this could be just a zip code, a full address, or city/state, etc.
|
||||||
└── listing_type (option): Choose the type of listing.
|
├── listing_type (option): Choose the type of listing.
|
||||||
- 'for_rent'
|
- 'for_rent'
|
||||||
- 'for_sale'
|
- 'for_sale'
|
||||||
- 'sold'
|
- 'sold'
|
||||||
- 'pending'
|
- 'pending' (for pending/contingent sales)
|
||||||
|
|
||||||
Optional
|
Optional
|
||||||
|
├── property_type (list): Choose the type of properties.
|
||||||
|
- 'single_family'
|
||||||
|
- 'multi_family'
|
||||||
|
- 'condos'
|
||||||
|
- 'condo_townhome_rowhome_coop'
|
||||||
|
- 'condo_townhome'
|
||||||
|
- 'townhomes'
|
||||||
|
- 'duplex_triplex'
|
||||||
|
- 'farm'
|
||||||
|
- 'land'
|
||||||
|
- 'mobile'
|
||||||
|
│
|
||||||
|
├── return_type (option): Choose the return type.
|
||||||
|
│ - 'pandas' (default)
|
||||||
|
│ - 'pydantic'
|
||||||
|
│ - 'raw' (json)
|
||||||
|
│
|
||||||
├── radius (decimal): Radius in miles to find comparable properties based on individual addresses.
|
├── radius (decimal): Radius in miles to find comparable properties based on individual addresses.
|
||||||
│ Example: 5.5 (fetches properties within a 5.5-mile radius if location is set to a specific address; otherwise, ignored)
|
│ Example: 5.5 (fetches properties within a 5.5-mile radius if location is set to a specific address; otherwise, ignored)
|
||||||
│
|
│
|
||||||
|
@ -90,9 +104,13 @@ Optional
|
||||||
│
|
│
|
||||||
├── foreclosure (True/False): If set, fetches only foreclosures
|
├── foreclosure (True/False): If set, fetches only foreclosures
|
||||||
│
|
│
|
||||||
└── proxy (string): In format 'http://user:pass@host:port'
|
├── proxy (string): In format 'http://user:pass@host:port'
|
||||||
│
|
│
|
||||||
└── extra_property_data (bool): Increases requests by O(n). If set, this fetches additional property data (e.g. agent, broker, property evaluations etc.)
|
├── extra_property_data (True/False): Increases requests by O(n). If set, this fetches additional property data for general searches (e.g. schools, tax appraisals etc.)
|
||||||
|
│
|
||||||
|
├── exclude_pending (True/False): If set, excludes 'pending' properties from the 'for_sale' results unless listing_type is 'pending'
|
||||||
|
│
|
||||||
|
└── limit (integer): Limit the number of properties to fetch. Max & default is 10000.
|
||||||
```
|
```
|
||||||
|
|
||||||
### Property Schema
|
### Property Schema
|
||||||
|
@ -100,6 +118,8 @@ Optional
|
||||||
Property
|
Property
|
||||||
├── Basic Information:
|
├── Basic Information:
|
||||||
│ ├── property_url
|
│ ├── property_url
|
||||||
|
│ ├── property_id
|
||||||
|
│ ├── listing_id
|
||||||
│ ├── mls
|
│ ├── mls
|
||||||
│ ├── mls_id
|
│ ├── mls_id
|
||||||
│ └── status
|
│ └── status
|
||||||
|
@ -119,34 +139,60 @@ Property
|
||||||
│ ├── sqft
|
│ ├── sqft
|
||||||
│ ├── year_built
|
│ ├── year_built
|
||||||
│ ├── stories
|
│ ├── stories
|
||||||
|
│ ├── garage
|
||||||
│ └── lot_sqft
|
│ └── lot_sqft
|
||||||
|
|
||||||
├── Property Listing Details:
|
├── Property Listing Details:
|
||||||
│ ├── days_on_mls
|
│ ├── days_on_mls
|
||||||
│ ├── list_price
|
│ ├── list_price
|
||||||
|
│ ├── list_price_min
|
||||||
|
│ ├── list_price_max
|
||||||
│ ├── list_date
|
│ ├── list_date
|
||||||
│ ├── pending_date
|
│ ├── pending_date
|
||||||
│ ├── sold_price
|
│ ├── sold_price
|
||||||
│ ├── last_sold_date
|
│ ├── last_sold_date
|
||||||
│ ├── price_per_sqft
|
│ ├── price_per_sqft
|
||||||
│ ├── parking_garage
|
│ ├── new_construction
|
||||||
│ └── hoa_fee
|
│ └── hoa_fee
|
||||||
|
|
||||||
|
├── Tax Information:
|
||||||
|
│ ├── year
|
||||||
|
│ ├── tax
|
||||||
|
│ ├── assessment
|
||||||
|
│ │ ├── building
|
||||||
|
│ │ ├── land
|
||||||
|
│ │ └── total
|
||||||
|
|
||||||
├── Location Details:
|
├── Location Details:
|
||||||
│ ├── latitude
|
│ ├── latitude
|
||||||
│ ├── longitude
|
│ ├── longitude
|
||||||
│ ├── nearby_schools
|
│ ├── nearby_schools
|
||||||
|
|
||||||
|
|
||||||
├── Agent Info:
|
├── Agent Info:
|
||||||
│ ├── agent
|
│ ├── agent_id
|
||||||
|
│ ├── agent_name
|
||||||
│ ├── agent_email
|
│ ├── agent_email
|
||||||
│ └── agent_phone
|
│ └── agent_phone
|
||||||
|
|
||||||
|
├── Broker Info:
|
||||||
|
│ ├── broker_id
|
||||||
|
│ └── broker_name
|
||||||
|
|
||||||
|
├── Builder Info:
|
||||||
|
│ ├── builder_id
|
||||||
|
│ └── builder_name
|
||||||
|
|
||||||
|
├── Office Info:
|
||||||
|
│ ├── office_id
|
||||||
|
│ ├── office_name
|
||||||
|
│ ├── office_phones
|
||||||
|
│ └── office_email
|
||||||
|
|
||||||
```
|
```
|
||||||
|
|
||||||
### Exceptions
|
### Exceptions
|
||||||
The following exceptions may be raised when using HomeHarvest:
|
The following exceptions may be raised when using HomeHarvest:
|
||||||
|
|
||||||
- `InvalidListingType` - valid options: `for_sale`, `for_rent`, `sold`
|
- `InvalidListingType` - valid options: `for_sale`, `for_rent`, `sold`, `pending`.
|
||||||
- `InvalidDate` - date_from or date_to is not in the format YYYY-MM-DD.
|
- `InvalidDate` - date_from or date_to is not in the format YYYY-MM-DD.
|
||||||
- `AuthenticationError` - Realtor.com token request failed.
|
- `AuthenticationError` - Realtor.com token request failed.
|
||||||
|
|
|
@ -1,141 +0,0 @@
|
||||||
{
|
|
||||||
"cells": [
|
|
||||||
{
|
|
||||||
"cell_type": "code",
|
|
||||||
"execution_count": null,
|
|
||||||
"id": "cb48903e-5021-49fe-9688-45cd0bc05d0f",
|
|
||||||
"metadata": {
|
|
||||||
"is_executing": true
|
|
||||||
},
|
|
||||||
"outputs": [],
|
|
||||||
"source": [
|
|
||||||
"from homeharvest import scrape_property\n",
|
|
||||||
"import pandas as pd"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"cell_type": "code",
|
|
||||||
"execution_count": null,
|
|
||||||
"id": "156488ce-0d5f-43c5-87f4-c33e9c427860",
|
|
||||||
"metadata": {},
|
|
||||||
"outputs": [],
|
|
||||||
"source": [
|
|
||||||
"pd.set_option('display.max_columns', None) # Show all columns\n",
|
|
||||||
"pd.set_option('display.max_rows', None) # Show all rows\n",
|
|
||||||
"pd.set_option('display.width', None) # Auto-adjust display width to fit console\n",
|
|
||||||
"pd.set_option('display.max_colwidth', 50) # Limit max column width to 50 characters"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"cell_type": "code",
|
|
||||||
"execution_count": null,
|
|
||||||
"id": "1c8b9744-8606-4e9b-8add-b90371a249a7",
|
|
||||||
"metadata": {},
|
|
||||||
"outputs": [],
|
|
||||||
"source": [
|
|
||||||
"# check for sale properties\n",
|
|
||||||
"scrape_property(\n",
|
|
||||||
" location=\"dallas\",\n",
|
|
||||||
" listing_type=\"for_sale\"\n",
|
|
||||||
")"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"cell_type": "code",
|
|
||||||
"execution_count": null,
|
|
||||||
"id": "aaf86093",
|
|
||||||
"metadata": {
|
|
||||||
"collapsed": false,
|
|
||||||
"jupyter": {
|
|
||||||
"outputs_hidden": false
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"outputs": [],
|
|
||||||
"source": [
|
|
||||||
"# search a specific address\n",
|
|
||||||
"scrape_property(\n",
|
|
||||||
" location=\"2530 Al Lipscomb Way\",\n",
|
|
||||||
" listing_type=\"for_sale\"\n",
|
|
||||||
")"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"cell_type": "code",
|
|
||||||
"execution_count": null,
|
|
||||||
"id": "ab7b4c21-da1d-4713-9df4-d7425d8ce21e",
|
|
||||||
"metadata": {},
|
|
||||||
"outputs": [],
|
|
||||||
"source": [
|
|
||||||
"# check rentals\n",
|
|
||||||
"scrape_property(\n",
|
|
||||||
" location=\"chicago, illinois\",\n",
|
|
||||||
" listing_type=\"for_rent\"\n",
|
|
||||||
")"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"cell_type": "code",
|
|
||||||
"execution_count": null,
|
|
||||||
"id": "af280cd3",
|
|
||||||
"metadata": {
|
|
||||||
"collapsed": false,
|
|
||||||
"jupyter": {
|
|
||||||
"outputs_hidden": false
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"outputs": [],
|
|
||||||
"source": [
|
|
||||||
"# check sold properties\n",
|
|
||||||
"properties = scrape_property(\n",
|
|
||||||
" location=\"90210\",\n",
|
|
||||||
" listing_type=\"sold\",\n",
|
|
||||||
" past_days=10\n",
|
|
||||||
")\n",
|
|
||||||
"display(properties)"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"cell_type": "code",
|
|
||||||
"execution_count": null,
|
|
||||||
"id": "628c1ce2",
|
|
||||||
"metadata": {
|
|
||||||
"collapsed": false,
|
|
||||||
"is_executing": true,
|
|
||||||
"jupyter": {
|
|
||||||
"outputs_hidden": false
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"outputs": [],
|
|
||||||
"source": [
|
|
||||||
"# display clickable URLs\n",
|
|
||||||
"from IPython.display import display, HTML\n",
|
|
||||||
"properties['property_url'] = '<a href=\"' + properties['property_url'] + '\" target=\"_blank\">' + properties['property_url'] + '</a>'\n",
|
|
||||||
"\n",
|
|
||||||
"html = properties.to_html(escape=False)\n",
|
|
||||||
"truncate_width = f'<style>.dataframe td {{ max-width: 200px; overflow: hidden; text-overflow: ellipsis; white-space: nowrap; }}</style>{html}'\n",
|
|
||||||
"display(HTML(truncate_width))"
|
|
||||||
]
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"metadata": {
|
|
||||||
"kernelspec": {
|
|
||||||
"display_name": "Python 3 (ipykernel)",
|
|
||||||
"language": "python",
|
|
||||||
"name": "python3"
|
|
||||||
},
|
|
||||||
"language_info": {
|
|
||||||
"codemirror_mode": {
|
|
||||||
"name": "ipython",
|
|
||||||
"version": 3
|
|
||||||
},
|
|
||||||
"file_extension": ".py",
|
|
||||||
"mimetype": "text/x-python",
|
|
||||||
"name": "python",
|
|
||||||
"nbconvert_exporter": "python",
|
|
||||||
"pygments_lexer": "ipython3",
|
|
||||||
"version": "3.10.11"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"nbformat": 4,
|
|
||||||
"nbformat_minor": 5
|
|
||||||
}
|
|
|
@ -1,20 +0,0 @@
|
||||||
from homeharvest import scrape_property
|
|
||||||
from datetime import datetime
|
|
||||||
|
|
||||||
# Generate filename based on current timestamp
|
|
||||||
current_timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
|
|
||||||
filename = f"HomeHarvest_{current_timestamp}.csv"
|
|
||||||
|
|
||||||
properties = scrape_property(
|
|
||||||
location="San Diego, CA",
|
|
||||||
listing_type="sold", # or (for_sale, for_rent)
|
|
||||||
past_days=30, # sold in last 30 days - listed in last x days if (for_sale, for_rent)
|
|
||||||
# pending_or_contingent=True # use on for_sale listings to find pending / contingent listings
|
|
||||||
# mls_only=True, # only fetch MLS listings
|
|
||||||
# proxy="http://user:pass@host:port" # use a proxy to change your IP address
|
|
||||||
)
|
|
||||||
print(f"Number of properties: {len(properties)}")
|
|
||||||
|
|
||||||
# Export to csv
|
|
||||||
properties.to_csv(filename, index=False)
|
|
||||||
print(properties.head())
|
|
|
@ -0,0 +1,104 @@
|
||||||
|
"""
|
||||||
|
This script scrapes sold and pending sold land listings in past year for a list of zip codes and saves the data to individual Excel files.
|
||||||
|
It adds two columns to the data: 'lot_acres' and 'ppa' (price per acre) for user to analyze average price of land in a zip code.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import os
|
||||||
|
import pandas as pd
|
||||||
|
from homeharvest import scrape_property
|
||||||
|
|
||||||
|
|
||||||
|
def get_property_details(zip: str, listing_type):
|
||||||
|
properties = scrape_property(location=zip, listing_type=listing_type, property_type=["land"], past_days=365)
|
||||||
|
if not properties.empty:
|
||||||
|
properties["lot_acres"] = properties["lot_sqft"].apply(lambda x: x / 43560 if pd.notnull(x) else None)
|
||||||
|
|
||||||
|
properties = properties[properties["sqft"].isnull()]
|
||||||
|
properties["ppa"] = properties.apply(
|
||||||
|
lambda row: (
|
||||||
|
int(
|
||||||
|
(
|
||||||
|
row["sold_price"]
|
||||||
|
if (pd.notnull(row["sold_price"]) and row["status"] == "SOLD")
|
||||||
|
else row["list_price"]
|
||||||
|
)
|
||||||
|
/ row["lot_acres"]
|
||||||
|
)
|
||||||
|
if pd.notnull(row["lot_acres"])
|
||||||
|
and row["lot_acres"] > 0
|
||||||
|
and (pd.notnull(row["sold_price"]) or pd.notnull(row["list_price"]))
|
||||||
|
else None
|
||||||
|
),
|
||||||
|
axis=1,
|
||||||
|
)
|
||||||
|
properties["ppa"] = properties["ppa"].astype("Int64")
|
||||||
|
selected_columns = [
|
||||||
|
"property_url",
|
||||||
|
"property_id",
|
||||||
|
"style",
|
||||||
|
"status",
|
||||||
|
"street",
|
||||||
|
"city",
|
||||||
|
"state",
|
||||||
|
"zip_code",
|
||||||
|
"county",
|
||||||
|
"list_date",
|
||||||
|
"last_sold_date",
|
||||||
|
"list_price",
|
||||||
|
"sold_price",
|
||||||
|
"lot_sqft",
|
||||||
|
"lot_acres",
|
||||||
|
"ppa",
|
||||||
|
]
|
||||||
|
properties = properties[selected_columns]
|
||||||
|
return properties
|
||||||
|
|
||||||
|
|
||||||
|
def output_to_excel(zip_code, sold_df, pending_df):
|
||||||
|
root_folder = os.getcwd()
|
||||||
|
zip_folder = os.path.join(root_folder, "zips", zip_code)
|
||||||
|
|
||||||
|
# Create zip code folder if it doesn't exist
|
||||||
|
os.makedirs(zip_folder, exist_ok=True)
|
||||||
|
|
||||||
|
# Define file paths
|
||||||
|
sold_file = os.path.join(zip_folder, f"{zip_code}_sold.xlsx")
|
||||||
|
pending_file = os.path.join(zip_folder, f"{zip_code}_pending.xlsx")
|
||||||
|
|
||||||
|
# Save individual sold and pending files
|
||||||
|
sold_df.to_excel(sold_file, index=False)
|
||||||
|
pending_df.to_excel(pending_file, index=False)
|
||||||
|
|
||||||
|
|
||||||
|
zip_codes = map(
|
||||||
|
str,
|
||||||
|
[
|
||||||
|
22920,
|
||||||
|
77024,
|
||||||
|
78028,
|
||||||
|
24553,
|
||||||
|
22967,
|
||||||
|
22971,
|
||||||
|
22922,
|
||||||
|
22958,
|
||||||
|
22969,
|
||||||
|
22949,
|
||||||
|
22938,
|
||||||
|
24599,
|
||||||
|
24562,
|
||||||
|
22976,
|
||||||
|
24464,
|
||||||
|
22964,
|
||||||
|
24581,
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
|
combined_df = pd.DataFrame()
|
||||||
|
for zip in zip_codes:
|
||||||
|
sold_df = get_property_details(zip, "sold")
|
||||||
|
pending_df = get_property_details(zip, "pending")
|
||||||
|
combined_df = pd.concat([combined_df, sold_df, pending_df], ignore_index=True)
|
||||||
|
output_to_excel(zip, sold_df, pending_df)
|
||||||
|
|
||||||
|
combined_file = os.path.join(os.getcwd(), "zips", "combined.xlsx")
|
||||||
|
combined_df.to_excel(combined_file, index=False)
|
|
@ -1,14 +1,16 @@
|
||||||
import warnings
|
import warnings
|
||||||
import pandas as pd
|
import pandas as pd
|
||||||
from .core.scrapers import ScraperInput
|
from .core.scrapers import ScraperInput
|
||||||
from .utils import process_result, ordered_properties, validate_input, validate_dates
|
from .utils import process_result, ordered_properties, validate_input, validate_dates, validate_limit
|
||||||
from .core.scrapers.realtor import RealtorScraper
|
from .core.scrapers.realtor import RealtorScraper
|
||||||
from .core.scrapers.models import ListingType
|
from .core.scrapers.models import ListingType, SearchPropertyType, ReturnType, Property
|
||||||
|
|
||||||
|
|
||||||
def scrape_property(
|
def scrape_property(
|
||||||
location: str,
|
location: str,
|
||||||
listing_type: str = "for_sale",
|
listing_type: str = "for_sale",
|
||||||
|
return_type: str = "pandas",
|
||||||
|
property_type: list[str] | None = None,
|
||||||
radius: float = None,
|
radius: float = None,
|
||||||
mls_only: bool = False,
|
mls_only: bool = False,
|
||||||
past_days: int = None,
|
past_days: int = None,
|
||||||
|
@ -17,11 +19,15 @@ def scrape_property(
|
||||||
date_to: str = None,
|
date_to: str = None,
|
||||||
foreclosure: bool = None,
|
foreclosure: bool = None,
|
||||||
extra_property_data: bool = True,
|
extra_property_data: bool = True,
|
||||||
) -> pd.DataFrame:
|
exclude_pending: bool = False,
|
||||||
|
limit: int = 10000
|
||||||
|
) -> pd.DataFrame | list[dict] | list[Property]:
|
||||||
"""
|
"""
|
||||||
Scrape properties from Realtor.com based on a given location and listing type.
|
Scrape properties from Realtor.com based on a given location and listing type.
|
||||||
:param location: Location to search (e.g. "Dallas, TX", "85281", "2530 Al Lipscomb Way")
|
:param location: Location to search (e.g. "Dallas, TX", "85281", "2530 Al Lipscomb Way")
|
||||||
:param listing_type: Listing Type (for_sale, for_rent, sold)
|
:param listing_type: Listing Type (for_sale, for_rent, sold, pending)
|
||||||
|
:param return_type: Return type (pandas, pydantic, raw)
|
||||||
|
:param property_type: Property Type (single_family, multi_family, condos, condo_townhome_rowhome_coop, condo_townhome, townhomes, duplex_triplex, farm, land, mobile)
|
||||||
:param radius: Get properties within _ (e.g. 1.0) miles. Only applicable for individual addresses.
|
:param radius: Get properties within _ (e.g. 1.0) miles. Only applicable for individual addresses.
|
||||||
:param mls_only: If set, fetches only listings with MLS IDs.
|
:param mls_only: If set, fetches only listings with MLS IDs.
|
||||||
:param proxy: Proxy to use for scraping
|
:param proxy: Proxy to use for scraping
|
||||||
|
@ -29,13 +35,18 @@ def scrape_property(
|
||||||
:param date_from, date_to: Get properties sold or listed (dependent on your listing_type) between these dates. format: 2021-01-28
|
:param date_from, date_to: Get properties sold or listed (dependent on your listing_type) between these dates. format: 2021-01-28
|
||||||
:param foreclosure: If set, fetches only foreclosure listings.
|
:param foreclosure: If set, fetches only foreclosure listings.
|
||||||
:param extra_property_data: Increases requests by O(n). If set, this fetches additional property data (e.g. agent, broker, property evaluations etc.)
|
:param extra_property_data: Increases requests by O(n). If set, this fetches additional property data (e.g. agent, broker, property evaluations etc.)
|
||||||
|
:param exclude_pending: If true, this excludes pending or contingent properties from the results, unless listing type is pending.
|
||||||
|
:param limit: Limit the number of results returned. Maximum is 10,000.
|
||||||
"""
|
"""
|
||||||
validate_input(listing_type)
|
validate_input(listing_type)
|
||||||
validate_dates(date_from, date_to)
|
validate_dates(date_from, date_to)
|
||||||
|
validate_limit(limit)
|
||||||
|
|
||||||
scraper_input = ScraperInput(
|
scraper_input = ScraperInput(
|
||||||
location=location,
|
location=location,
|
||||||
listing_type=ListingType[listing_type.upper()],
|
listing_type=ListingType(listing_type.upper()),
|
||||||
|
return_type=ReturnType(return_type.lower()),
|
||||||
|
property_type=[SearchPropertyType[prop.upper()] for prop in property_type] if property_type else None,
|
||||||
proxy=proxy,
|
proxy=proxy,
|
||||||
radius=radius,
|
radius=radius,
|
||||||
mls_only=mls_only,
|
mls_only=mls_only,
|
||||||
|
@ -44,16 +55,23 @@ def scrape_property(
|
||||||
date_to=date_to,
|
date_to=date_to,
|
||||||
foreclosure=foreclosure,
|
foreclosure=foreclosure,
|
||||||
extra_property_data=extra_property_data,
|
extra_property_data=extra_property_data,
|
||||||
|
exclude_pending=exclude_pending,
|
||||||
|
limit=limit,
|
||||||
)
|
)
|
||||||
|
|
||||||
site = RealtorScraper(scraper_input)
|
site = RealtorScraper(scraper_input)
|
||||||
results = site.search()
|
results = site.search()
|
||||||
|
|
||||||
properties_dfs = [process_result(result) for result in results]
|
if scraper_input.return_type != ReturnType.pandas:
|
||||||
|
return results
|
||||||
|
|
||||||
|
properties_dfs = [df for result in results if not (df := process_result(result)).empty]
|
||||||
if not properties_dfs:
|
if not properties_dfs:
|
||||||
return pd.DataFrame()
|
return pd.DataFrame()
|
||||||
|
|
||||||
with warnings.catch_warnings():
|
with warnings.catch_warnings():
|
||||||
warnings.simplefilter("ignore", category=FutureWarning)
|
warnings.simplefilter("ignore", category=FutureWarning)
|
||||||
|
|
||||||
return pd.concat(properties_dfs, ignore_index=True, axis=0)[ordered_properties].replace({"None": "", None: ""})
|
return pd.concat(properties_dfs, ignore_index=True, axis=0)[ordered_properties].replace(
|
||||||
|
{"None": pd.NA, None: pd.NA, "": pd.NA}
|
||||||
|
)
|
||||||
|
|
|
@ -1,16 +1,21 @@
|
||||||
|
from __future__ import annotations
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
|
from typing import Union
|
||||||
|
|
||||||
import requests
|
import requests
|
||||||
from requests.adapters import HTTPAdapter
|
from requests.adapters import HTTPAdapter
|
||||||
from urllib3.util.retry import Retry
|
from urllib3.util.retry import Retry
|
||||||
import uuid
|
import uuid
|
||||||
from ...exceptions import AuthenticationError
|
from ...exceptions import AuthenticationError
|
||||||
from .models import Property, ListingType, SiteName
|
from .models import Property, ListingType, SiteName, SearchPropertyType, ReturnType
|
||||||
|
import json
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class ScraperInput:
|
class ScraperInput:
|
||||||
location: str
|
location: str
|
||||||
listing_type: ListingType
|
listing_type: ListingType
|
||||||
|
property_type: list[SearchPropertyType] | None = None
|
||||||
radius: float | None = None
|
radius: float | None = None
|
||||||
mls_only: bool | None = False
|
mls_only: bool | None = False
|
||||||
proxy: str | None = None
|
proxy: str | None = None
|
||||||
|
@ -19,6 +24,9 @@ class ScraperInput:
|
||||||
date_to: str | None = None
|
date_to: str | None = None
|
||||||
foreclosure: bool | None = False
|
foreclosure: bool | None = False
|
||||||
extra_property_data: bool | None = True
|
extra_property_data: bool | None = True
|
||||||
|
exclude_pending: bool | None = False
|
||||||
|
limit: int = 10000
|
||||||
|
return_type: ReturnType = ReturnType.pandas
|
||||||
|
|
||||||
|
|
||||||
class Scraper:
|
class Scraper:
|
||||||
|
@ -30,11 +38,12 @@ class Scraper:
|
||||||
):
|
):
|
||||||
self.location = scraper_input.location
|
self.location = scraper_input.location
|
||||||
self.listing_type = scraper_input.listing_type
|
self.listing_type = scraper_input.listing_type
|
||||||
|
self.property_type = scraper_input.property_type
|
||||||
|
|
||||||
if not self.session:
|
if not self.session:
|
||||||
Scraper.session = requests.Session()
|
Scraper.session = requests.Session()
|
||||||
retries = Retry(
|
retries = Retry(
|
||||||
total=3, backoff_factor=3, status_forcelist=[429, 403], allowed_methods=frozenset(["GET", "POST"])
|
total=3, backoff_factor=4, status_forcelist=[429, 403], allowed_methods=frozenset(["GET", "POST"])
|
||||||
)
|
)
|
||||||
|
|
||||||
adapter = HTTPAdapter(max_retries=retries)
|
adapter = HTTPAdapter(max_retries=retries)
|
||||||
|
@ -42,8 +51,21 @@ class Scraper:
|
||||||
Scraper.session.mount("https://", adapter)
|
Scraper.session.mount("https://", adapter)
|
||||||
Scraper.session.headers.update(
|
Scraper.session.headers.update(
|
||||||
{
|
{
|
||||||
"auth": f"Bearer {self.get_access_token()}",
|
"accept": "application/json, text/javascript",
|
||||||
"apollographql-client-name": "com.move.Realtor-apollo-ios",
|
"accept-language": "en-US,en;q=0.9",
|
||||||
|
"cache-control": "no-cache",
|
||||||
|
"content-type": "application/json",
|
||||||
|
"origin": "https://www.realtor.com",
|
||||||
|
"pragma": "no-cache",
|
||||||
|
"priority": "u=1, i",
|
||||||
|
"rdc-ab-tests": "commute_travel_time_variation:v1",
|
||||||
|
"sec-ch-ua": '"Not)A;Brand";v="99", "Google Chrome";v="127", "Chromium";v="127"',
|
||||||
|
"sec-ch-ua-mobile": "?0",
|
||||||
|
"sec-ch-ua-platform": '"Windows"',
|
||||||
|
"sec-fetch-dest": "empty",
|
||||||
|
"sec-fetch-mode": "cors",
|
||||||
|
"sec-fetch-site": "same-origin",
|
||||||
|
"user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/127.0.0.0 Safari/537.36",
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -60,8 +82,11 @@ class Scraper:
|
||||||
self.date_to = scraper_input.date_to
|
self.date_to = scraper_input.date_to
|
||||||
self.foreclosure = scraper_input.foreclosure
|
self.foreclosure = scraper_input.foreclosure
|
||||||
self.extra_property_data = scraper_input.extra_property_data
|
self.extra_property_data = scraper_input.extra_property_data
|
||||||
|
self.exclude_pending = scraper_input.exclude_pending
|
||||||
|
self.limit = scraper_input.limit
|
||||||
|
self.return_type = scraper_input.return_type
|
||||||
|
|
||||||
def search(self) -> list[Property]: ...
|
def search(self) -> list[Union[Property | dict]]: ...
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _parse_home(home) -> Property: ...
|
def _parse_home(home) -> Property: ...
|
||||||
|
@ -70,25 +95,34 @@ class Scraper:
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_access_token():
|
def get_access_token():
|
||||||
url = "https://graph.realtor.com/auth/token"
|
device_id = str(uuid.uuid4()).upper()
|
||||||
|
|
||||||
payload = f'{{"client_app_id":"rdc_mobile_native,24.20.4.149916,iphone","device_id":"{str(uuid.uuid4()).upper()}","grant_type":"device_mobile"}}'
|
response = requests.post(
|
||||||
|
"https://graph.realtor.com/auth/token",
|
||||||
headers={
|
headers={
|
||||||
"Host": "graph.realtor.com",
|
"Host": "graph.realtor.com",
|
||||||
"x-client-version": "24.20.4.149916",
|
"Accept": "*/*",
|
||||||
"accept": "*/*",
|
"Content-Type": "Application/json",
|
||||||
"content-type": "Application/json",
|
"X-Client-ID": "rdc_mobile_native,iphone",
|
||||||
"user-agent": "Realtor.com/24.20.4.149916 CFNetwork/1410.0.3 Darwin/22.6.0",
|
"X-Visitor-ID": device_id,
|
||||||
"accept-language": "en-US,en;q=0.9",
|
"X-Client-Version": "24.21.23.679885",
|
||||||
|
"Accept-Language": "en-US,en;q=0.9",
|
||||||
|
"User-Agent": "Realtor.com/24.21.23.679885 CFNetwork/1494.0.7 Darwin/23.4.0",
|
||||||
|
},
|
||||||
|
data=json.dumps(
|
||||||
|
{
|
||||||
|
"grant_type": "device_mobile",
|
||||||
|
"device_id": device_id,
|
||||||
|
"client_app_id": "rdc_mobile_native,24.21.23.679885,iphone",
|
||||||
}
|
}
|
||||||
response = requests.post(url, headers=headers, data=payload)
|
),
|
||||||
|
)
|
||||||
|
|
||||||
data = response.json()
|
data = response.json()
|
||||||
|
|
||||||
if not (access_token := data.get("access_token")):
|
if not (access_token := data.get("access_token")):
|
||||||
raise AuthenticationError(
|
raise AuthenticationError(
|
||||||
"Failed to get access token, use a proxy/vpn or wait a moment and try again.",
|
"Failed to get access token, use a proxy/vpn or wait a moment and try again.", response=response
|
||||||
response=response
|
|
||||||
)
|
)
|
||||||
|
|
||||||
return access_token
|
return access_token
|
||||||
|
|
|
@ -1,8 +1,15 @@
|
||||||
|
from __future__ import annotations
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from enum import Enum
|
from enum import Enum
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
|
|
||||||
|
class ReturnType(Enum):
|
||||||
|
pydantic = "pydantic"
|
||||||
|
pandas = "pandas"
|
||||||
|
raw = "raw"
|
||||||
|
|
||||||
|
|
||||||
class SiteName(Enum):
|
class SiteName(Enum):
|
||||||
ZILLOW = "zillow"
|
ZILLOW = "zillow"
|
||||||
REDFIN = "redfin"
|
REDFIN = "redfin"
|
||||||
|
@ -16,6 +23,20 @@ class SiteName(Enum):
|
||||||
raise ValueError(f"{value} not found in {cls}")
|
raise ValueError(f"{value} not found in {cls}")
|
||||||
|
|
||||||
|
|
||||||
|
class SearchPropertyType(Enum):
|
||||||
|
SINGLE_FAMILY = "single_family"
|
||||||
|
APARTMENT = "apartment"
|
||||||
|
CONDOS = "condos"
|
||||||
|
CONDO_TOWNHOME_ROWHOME_COOP = "condo_townhome_rowhome_coop"
|
||||||
|
CONDO_TOWNHOME = "condo_townhome"
|
||||||
|
TOWNHOMES = "townhomes"
|
||||||
|
DUPLEX_TRIPLEX = "duplex_triplex"
|
||||||
|
FARM = "farm"
|
||||||
|
LAND = "land"
|
||||||
|
MULTI_FAMILY = "multi_family"
|
||||||
|
MOBILE = "mobile"
|
||||||
|
|
||||||
|
|
||||||
class ListingType(Enum):
|
class ListingType(Enum):
|
||||||
FOR_SALE = "FOR_SALE"
|
FOR_SALE = "FOR_SALE"
|
||||||
FOR_RENT = "FOR_RENT"
|
FOR_RENT = "FOR_RENT"
|
||||||
|
@ -33,9 +54,12 @@ class PropertyType(Enum):
|
||||||
APARTMENT = "APARTMENT"
|
APARTMENT = "APARTMENT"
|
||||||
BUILDING = "BUILDING"
|
BUILDING = "BUILDING"
|
||||||
COMMERCIAL = "COMMERCIAL"
|
COMMERCIAL = "COMMERCIAL"
|
||||||
|
GOVERNMENT = "GOVERNMENT"
|
||||||
|
INDUSTRIAL = "INDUSTRIAL"
|
||||||
CONDO_TOWNHOME = "CONDO_TOWNHOME"
|
CONDO_TOWNHOME = "CONDO_TOWNHOME"
|
||||||
CONDO_TOWNHOME_ROWHOME_COOP = "CONDO_TOWNHOME_ROWHOME_COOP"
|
CONDO_TOWNHOME_ROWHOME_COOP = "CONDO_TOWNHOME_ROWHOME_COOP"
|
||||||
CONDO = "CONDO"
|
CONDO = "CONDO"
|
||||||
|
CONDOP = "CONDOP"
|
||||||
CONDOS = "CONDOS"
|
CONDOS = "CONDOS"
|
||||||
COOP = "COOP"
|
COOP = "COOP"
|
||||||
DUPLEX_TRIPLEX = "DUPLEX_TRIPLEX"
|
DUPLEX_TRIPLEX = "DUPLEX_TRIPLEX"
|
||||||
|
@ -86,44 +110,85 @@ class AgentPhone: #: For documentation purposes only (at the moment)
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class Agent:
|
class Entity:
|
||||||
name: str | None = None
|
name: str
|
||||||
|
uuid: str | None = None
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class Agent(Entity):
|
||||||
|
mls_set: str | None = None
|
||||||
|
nrds_id: str | None = None
|
||||||
phones: list[dict] | AgentPhone | None = None
|
phones: list[dict] | AgentPhone | None = None
|
||||||
email: str | None = None
|
email: str | None = None
|
||||||
href: str | None = None
|
href: str | None = None
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class Broker:
|
class Office(Entity):
|
||||||
name: str | None = None
|
mls_set: str | None = None
|
||||||
phone: str | None = None
|
email: str | None = None
|
||||||
website: str | None = None
|
href: str | None = None
|
||||||
|
phones: list[dict] | AgentPhone | None = None
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class Broker(Entity):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class Builder(Entity):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class Advertisers:
|
||||||
|
agent: Agent | None = None
|
||||||
|
broker: Broker | None = None
|
||||||
|
builder: Builder | None = None
|
||||||
|
office: Office | None = None
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class Property:
|
class Property:
|
||||||
property_url: str
|
property_url: str
|
||||||
|
|
||||||
|
property_id: str
|
||||||
|
#: allows_cats: bool
|
||||||
|
#: allows_dogs: bool
|
||||||
|
|
||||||
|
listing_id: str | None = None
|
||||||
|
|
||||||
mls: str | None = None
|
mls: str | None = None
|
||||||
mls_id: str | None = None
|
mls_id: str | None = None
|
||||||
status: str | None = None
|
status: str | None = None
|
||||||
address: Address | None = None
|
address: Address | None = None
|
||||||
|
|
||||||
list_price: int | None = None
|
list_price: int | None = None
|
||||||
|
list_price_min: int | None = None
|
||||||
|
list_price_max: int | None = None
|
||||||
|
|
||||||
list_date: str | None = None
|
list_date: str | None = None
|
||||||
pending_date: str | None = None
|
pending_date: str | None = None
|
||||||
last_sold_date: str | None = None
|
last_sold_date: str | None = None
|
||||||
prc_sqft: int | None = None
|
prc_sqft: int | None = None
|
||||||
|
new_construction: bool | None = None
|
||||||
hoa_fee: int | None = None
|
hoa_fee: int | None = None
|
||||||
days_on_mls: int | None = None
|
days_on_mls: int | None = None
|
||||||
description: Description | None = None
|
description: Description | None = None
|
||||||
|
tags: list[str] | None = None
|
||||||
|
details: list[dict] | None = None
|
||||||
|
|
||||||
latitude: float | None = None
|
latitude: float | None = None
|
||||||
longitude: float | None = None
|
longitude: float | None = None
|
||||||
neighborhoods: Optional[str] = None
|
neighborhoods: Optional[str] = None
|
||||||
county: Optional[str] = None
|
county: Optional[str] = None
|
||||||
fips_code: Optional[str] = None
|
fips_code: Optional[str] = None
|
||||||
agents: list[Agent] | None = None
|
|
||||||
brokers: list[Broker] | None = None
|
|
||||||
nearby_schools: list[str] = None
|
nearby_schools: list[str] = None
|
||||||
assessed_value: int | None = None
|
assessed_value: int | None = None
|
||||||
estimated_value: int | None = None
|
estimated_value: int | None = None
|
||||||
|
tax: int | None = None
|
||||||
|
tax_history: list[dict] | None = None
|
||||||
|
|
||||||
|
advertisers: Advertisers | None = None
|
||||||
|
|
|
@ -5,12 +5,36 @@ homeharvest.realtor.__init__
|
||||||
This module implements the scraper for realtor.com
|
This module implements the scraper for realtor.com
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import json
|
||||||
from concurrent.futures import ThreadPoolExecutor, as_completed
|
from concurrent.futures import ThreadPoolExecutor, as_completed
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
from json import JSONDecodeError
|
||||||
from typing import Dict, Union, Optional
|
from typing import Dict, Union, Optional
|
||||||
|
|
||||||
|
from tenacity import (
|
||||||
|
retry,
|
||||||
|
retry_if_exception_type,
|
||||||
|
wait_exponential,
|
||||||
|
stop_after_attempt,
|
||||||
|
)
|
||||||
|
|
||||||
from .. import Scraper
|
from .. import Scraper
|
||||||
from ..models import Property, Address, ListingType, Description, PropertyType, Agent, Broker
|
from ..models import (
|
||||||
|
Property,
|
||||||
|
Address,
|
||||||
|
ListingType,
|
||||||
|
Description,
|
||||||
|
PropertyType,
|
||||||
|
Agent,
|
||||||
|
Broker,
|
||||||
|
Builder,
|
||||||
|
Advertisers,
|
||||||
|
Office,
|
||||||
|
ReturnType
|
||||||
|
)
|
||||||
|
from .queries import GENERAL_RESULTS_QUERY, SEARCH_HOMES_DATA, HOMES_DATA, HOME_FRAGMENT
|
||||||
|
|
||||||
|
|
||||||
class RealtorScraper(Scraper):
|
class RealtorScraper(Scraper):
|
||||||
|
@ -19,6 +43,7 @@ class RealtorScraper(Scraper):
|
||||||
PROPERTY_GQL = "https://graph.realtor.com/graphql"
|
PROPERTY_GQL = "https://graph.realtor.com/graphql"
|
||||||
ADDRESS_AUTOCOMPLETE_URL = "https://parser-external.geo.moveaws.com/suggest"
|
ADDRESS_AUTOCOMPLETE_URL = "https://parser-external.geo.moveaws.com/suggest"
|
||||||
NUM_PROPERTY_WORKERS = 20
|
NUM_PROPERTY_WORKERS = 20
|
||||||
|
DEFAULT_PAGE_SIZE = 200
|
||||||
|
|
||||||
def __init__(self, scraper_input):
|
def __init__(self, scraper_input):
|
||||||
super().__init__(scraper_input)
|
super().__init__(scraper_input)
|
||||||
|
@ -44,151 +69,6 @@ class RealtorScraper(Scraper):
|
||||||
|
|
||||||
return result[0]
|
return result[0]
|
||||||
|
|
||||||
def handle_listing(self, listing_id: str) -> list[Property]:
|
|
||||||
query = """query Listing($listing_id: ID!) {
|
|
||||||
listing(id: $listing_id) {
|
|
||||||
source {
|
|
||||||
id
|
|
||||||
listing_id
|
|
||||||
}
|
|
||||||
address {
|
|
||||||
line
|
|
||||||
street_direction
|
|
||||||
street_number
|
|
||||||
street_name
|
|
||||||
street_suffix
|
|
||||||
unit
|
|
||||||
city
|
|
||||||
state_code
|
|
||||||
postal_code
|
|
||||||
location {
|
|
||||||
coordinate {
|
|
||||||
lat
|
|
||||||
lon
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
basic {
|
|
||||||
sqft
|
|
||||||
beds
|
|
||||||
baths_full
|
|
||||||
baths_half
|
|
||||||
lot_sqft
|
|
||||||
sold_price
|
|
||||||
sold_price
|
|
||||||
type
|
|
||||||
price
|
|
||||||
status
|
|
||||||
sold_date
|
|
||||||
list_date
|
|
||||||
}
|
|
||||||
details {
|
|
||||||
year_built
|
|
||||||
stories
|
|
||||||
garage
|
|
||||||
permalink
|
|
||||||
}
|
|
||||||
media {
|
|
||||||
photos {
|
|
||||||
href
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}"""
|
|
||||||
|
|
||||||
variables = {"listing_id": listing_id}
|
|
||||||
payload = {
|
|
||||||
"query": query,
|
|
||||||
"variables": variables,
|
|
||||||
}
|
|
||||||
|
|
||||||
response = self.session.post(self.SEARCH_GQL_URL, json=payload)
|
|
||||||
response_json = response.json()
|
|
||||||
|
|
||||||
property_info = response_json["data"]["listing"]
|
|
||||||
|
|
||||||
mls = (
|
|
||||||
property_info["source"].get("id")
|
|
||||||
if "source" in property_info and isinstance(property_info["source"], dict)
|
|
||||||
else None
|
|
||||||
)
|
|
||||||
|
|
||||||
able_to_get_lat_long = (
|
|
||||||
property_info
|
|
||||||
and property_info.get("address")
|
|
||||||
and property_info["address"].get("location")
|
|
||||||
and property_info["address"]["location"].get("coordinate")
|
|
||||||
)
|
|
||||||
list_date_str = (
|
|
||||||
property_info["basic"]["list_date"].split("T")[0] if property_info["basic"].get("list_date") else None
|
|
||||||
)
|
|
||||||
last_sold_date_str = (
|
|
||||||
property_info["basic"]["sold_date"].split("T")[0] if property_info["basic"].get("sold_date") else None
|
|
||||||
)
|
|
||||||
pending_date_str = property_info["pending_date"].split("T")[0] if property_info.get("pending_date") else None
|
|
||||||
|
|
||||||
list_date = datetime.strptime(list_date_str, "%Y-%m-%d") if list_date_str else None
|
|
||||||
last_sold_date = datetime.strptime(last_sold_date_str, "%Y-%m-%d") if last_sold_date_str else None
|
|
||||||
pending_date = datetime.strptime(pending_date_str, "%Y-%m-%d") if pending_date_str else None
|
|
||||||
today = datetime.now()
|
|
||||||
|
|
||||||
days_on_mls = None
|
|
||||||
status = property_info["basic"]["status"].lower()
|
|
||||||
if list_date:
|
|
||||||
if status == "sold" and last_sold_date:
|
|
||||||
days_on_mls = (last_sold_date - list_date).days
|
|
||||||
elif status in ("for_sale", "for_rent"):
|
|
||||||
days_on_mls = (today - list_date).days
|
|
||||||
if days_on_mls and days_on_mls < 0:
|
|
||||||
days_on_mls = None
|
|
||||||
|
|
||||||
property_id = property_info["details"]["permalink"]
|
|
||||||
prop_details = self.get_prop_details(property_id)
|
|
||||||
listing = Property(
|
|
||||||
mls=mls,
|
|
||||||
mls_id=(
|
|
||||||
property_info["source"].get("listing_id")
|
|
||||||
if "source" in property_info and isinstance(property_info["source"], dict)
|
|
||||||
else None
|
|
||||||
),
|
|
||||||
property_url=f"{self.PROPERTY_URL}{property_id}",
|
|
||||||
status=property_info["basic"]["status"].upper(),
|
|
||||||
list_price=property_info["basic"]["price"],
|
|
||||||
list_date=list_date,
|
|
||||||
prc_sqft=(
|
|
||||||
property_info["basic"].get("price") / property_info["basic"].get("sqft")
|
|
||||||
if property_info["basic"].get("price") and property_info["basic"].get("sqft")
|
|
||||||
else None
|
|
||||||
),
|
|
||||||
last_sold_date=last_sold_date,
|
|
||||||
pending_date=pending_date,
|
|
||||||
latitude=property_info["address"]["location"]["coordinate"].get("lat") if able_to_get_lat_long else None,
|
|
||||||
longitude=property_info["address"]["location"]["coordinate"].get("lon") if able_to_get_lat_long else None,
|
|
||||||
address=self._parse_address(property_info, search_type="handle_listing"),
|
|
||||||
description=Description(
|
|
||||||
alt_photos=self.process_alt_photos(property_info.get("media", {}).get("photos", [])),
|
|
||||||
style=property_info["basic"].get("type", "").upper(),
|
|
||||||
beds=property_info["basic"].get("beds"),
|
|
||||||
baths_full=property_info["basic"].get("baths_full"),
|
|
||||||
baths_half=property_info["basic"].get("baths_half"),
|
|
||||||
sqft=property_info["basic"].get("sqft"),
|
|
||||||
lot_sqft=property_info["basic"].get("lot_sqft"),
|
|
||||||
sold_price=property_info["basic"].get("sold_price"),
|
|
||||||
year_built=property_info["details"].get("year_built"),
|
|
||||||
garage=property_info["details"].get("garage"),
|
|
||||||
stories=property_info["details"].get("stories"),
|
|
||||||
text=property_info.get("description", {}).get("text"),
|
|
||||||
),
|
|
||||||
days_on_mls=days_on_mls,
|
|
||||||
agents=prop_details.get("agents"),
|
|
||||||
brokers=prop_details.get("brokers"),
|
|
||||||
nearby_schools=prop_details.get("schools"),
|
|
||||||
assessed_value=prop_details.get("assessed_value"),
|
|
||||||
estimated_value=prop_details.get("estimated_value"),
|
|
||||||
)
|
|
||||||
|
|
||||||
return [listing]
|
|
||||||
|
|
||||||
def get_latest_listing_id(self, property_id: str) -> str | None:
|
def get_latest_listing_id(self, property_id: str) -> str | None:
|
||||||
query = """query Property($property_id: ID!) {
|
query = """query Property($property_id: ID!) {
|
||||||
property(id: $property_id) {
|
property(id: $property_id) {
|
||||||
|
@ -222,65 +102,15 @@ class RealtorScraper(Scraper):
|
||||||
else:
|
else:
|
||||||
return property_info["listings"][0]["listing_id"]
|
return property_info["listings"][0]["listing_id"]
|
||||||
|
|
||||||
def handle_address(self, property_id: str) -> list[Property]:
|
def handle_home(self, property_id: str) -> list[Property]:
|
||||||
"""
|
query = (
|
||||||
Handles a specific address & returns one property
|
"""query Home($property_id: ID!) {
|
||||||
"""
|
home(property_id: $property_id) %s
|
||||||
query = """query Property($property_id: ID!) {
|
|
||||||
property(id: $property_id) {
|
|
||||||
property_id
|
|
||||||
details {
|
|
||||||
date_updated
|
|
||||||
garage
|
|
||||||
permalink
|
|
||||||
year_built
|
|
||||||
stories
|
|
||||||
}
|
|
||||||
address {
|
|
||||||
line
|
|
||||||
street_direction
|
|
||||||
street_number
|
|
||||||
street_name
|
|
||||||
street_suffix
|
|
||||||
unit
|
|
||||||
city
|
|
||||||
state_code
|
|
||||||
postal_code
|
|
||||||
location {
|
|
||||||
coordinate {
|
|
||||||
lat
|
|
||||||
lon
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
basic {
|
|
||||||
baths
|
|
||||||
beds
|
|
||||||
price
|
|
||||||
sqft
|
|
||||||
lot_sqft
|
|
||||||
type
|
|
||||||
sold_price
|
|
||||||
}
|
|
||||||
public_record {
|
|
||||||
lot_size
|
|
||||||
sqft
|
|
||||||
stories
|
|
||||||
units
|
|
||||||
year_built
|
|
||||||
}
|
|
||||||
primary_photo {
|
|
||||||
href
|
|
||||||
}
|
|
||||||
photos {
|
|
||||||
href
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}"""
|
}"""
|
||||||
|
% HOMES_DATA
|
||||||
|
)
|
||||||
|
|
||||||
variables = {"property_id": property_id}
|
variables = {"property_id": property_id}
|
||||||
prop_details = self.get_prop_details(property_id)
|
|
||||||
|
|
||||||
payload = {
|
payload = {
|
||||||
"query": query,
|
"query": query,
|
||||||
"variables": variables,
|
"variables": variables,
|
||||||
|
@ -289,101 +119,123 @@ class RealtorScraper(Scraper):
|
||||||
response = self.session.post(self.SEARCH_GQL_URL, json=payload)
|
response = self.session.post(self.SEARCH_GQL_URL, json=payload)
|
||||||
response_json = response.json()
|
response_json = response.json()
|
||||||
|
|
||||||
property_info = response_json["data"]["property"]
|
property_info = response_json["data"]["home"]
|
||||||
|
|
||||||
return [
|
return [self.process_property(property_info)]
|
||||||
Property(
|
|
||||||
mls_id=property_id,
|
@staticmethod
|
||||||
property_url=f"{self.PROPERTY_URL}{property_info['details']['permalink']}",
|
def process_advertisers(advertisers: list[dict] | None) -> Advertisers | None:
|
||||||
address=self._parse_address(property_info, search_type="handle_address"),
|
if not advertisers:
|
||||||
description=self._parse_description(property_info),
|
return None
|
||||||
agents=prop_details.get("agents"),
|
|
||||||
brokers=prop_details.get("brokers"),
|
def _parse_fulfillment_id(fulfillment_id: str | None) -> str | None:
|
||||||
|
return fulfillment_id if fulfillment_id and fulfillment_id != "0" else None
|
||||||
|
|
||||||
|
processed_advertisers = Advertisers()
|
||||||
|
|
||||||
|
for advertiser in advertisers:
|
||||||
|
advertiser_type = advertiser.get("type")
|
||||||
|
if advertiser_type == "seller": #: agent
|
||||||
|
processed_advertisers.agent = Agent(
|
||||||
|
uuid=_parse_fulfillment_id(advertiser.get("fulfillment_id")),
|
||||||
|
nrds_id=advertiser.get("nrds_id"),
|
||||||
|
mls_set=advertiser.get("mls_set"),
|
||||||
|
name=advertiser.get("name"),
|
||||||
|
email=advertiser.get("email"),
|
||||||
|
phones=advertiser.get("phones"),
|
||||||
|
)
|
||||||
|
|
||||||
|
if advertiser.get("broker") and advertiser["broker"].get("name"): #: has a broker
|
||||||
|
processed_advertisers.broker = Broker(
|
||||||
|
uuid=_parse_fulfillment_id(advertiser["broker"].get("fulfillment_id")),
|
||||||
|
name=advertiser["broker"].get("name"),
|
||||||
|
)
|
||||||
|
|
||||||
|
if advertiser.get("office"): #: has an office
|
||||||
|
processed_advertisers.office = Office(
|
||||||
|
uuid=_parse_fulfillment_id(advertiser["office"].get("fulfillment_id")),
|
||||||
|
mls_set=advertiser["office"].get("mls_set"),
|
||||||
|
name=advertiser["office"].get("name"),
|
||||||
|
email=advertiser["office"].get("email"),
|
||||||
|
phones=advertiser["office"].get("phones"),
|
||||||
|
)
|
||||||
|
|
||||||
|
if advertiser_type == "community": #: could be builder
|
||||||
|
if advertiser.get("builder"):
|
||||||
|
processed_advertisers.builder = Builder(
|
||||||
|
uuid=_parse_fulfillment_id(advertiser["builder"].get("fulfillment_id")),
|
||||||
|
name=advertiser["builder"].get("name"),
|
||||||
|
)
|
||||||
|
|
||||||
|
return processed_advertisers
|
||||||
|
|
||||||
|
def process_property(self, result: dict) -> Property | None:
|
||||||
|
mls = result["source"].get("id") if "source" in result and isinstance(result["source"], dict) else None
|
||||||
|
|
||||||
|
if not mls and self.mls_only:
|
||||||
|
return
|
||||||
|
|
||||||
|
able_to_get_lat_long = (
|
||||||
|
result
|
||||||
|
and result.get("location")
|
||||||
|
and result["location"].get("address")
|
||||||
|
and result["location"]["address"].get("coordinate")
|
||||||
|
)
|
||||||
|
|
||||||
|
is_pending = result["flags"].get("is_pending")
|
||||||
|
is_contingent = result["flags"].get("is_contingent")
|
||||||
|
|
||||||
|
if (is_pending or is_contingent) and (self.exclude_pending and self.listing_type != ListingType.PENDING):
|
||||||
|
return
|
||||||
|
|
||||||
|
property_id = result["property_id"]
|
||||||
|
prop_details = self.process_extra_property_details(result) if self.extra_property_data else {}
|
||||||
|
|
||||||
|
property_estimates_root = result.get("current_estimates") or result.get("estimates", {}).get("currentValues")
|
||||||
|
estimated_value = self.get_key(property_estimates_root, [0, "estimate"])
|
||||||
|
|
||||||
|
advertisers = self.process_advertisers(result.get("advertisers"))
|
||||||
|
|
||||||
|
realty_property = Property(
|
||||||
|
mls=mls,
|
||||||
|
mls_id=(
|
||||||
|
result["source"].get("listing_id")
|
||||||
|
if "source" in result and isinstance(result["source"], dict)
|
||||||
|
else None
|
||||||
|
),
|
||||||
|
property_url=result["href"],
|
||||||
|
property_id=property_id,
|
||||||
|
listing_id=result.get("listing_id"),
|
||||||
|
status=("PENDING" if is_pending else "CONTINGENT" if is_contingent else result["status"].upper()),
|
||||||
|
list_price=result["list_price"],
|
||||||
|
list_price_min=result["list_price_min"],
|
||||||
|
list_price_max=result["list_price_max"],
|
||||||
|
list_date=(result["list_date"].split("T")[0] if result.get("list_date") else None),
|
||||||
|
prc_sqft=result.get("price_per_sqft"),
|
||||||
|
last_sold_date=result.get("last_sold_date"),
|
||||||
|
new_construction=result["flags"].get("is_new_construction") is True,
|
||||||
|
hoa_fee=(result["hoa"]["fee"] if result.get("hoa") and isinstance(result["hoa"], dict) else None),
|
||||||
|
latitude=(result["location"]["address"]["coordinate"].get("lat") if able_to_get_lat_long else None),
|
||||||
|
longitude=(result["location"]["address"]["coordinate"].get("lon") if able_to_get_lat_long else None),
|
||||||
|
address=self._parse_address(result, search_type="general_search"),
|
||||||
|
description=self._parse_description(result),
|
||||||
|
neighborhoods=self._parse_neighborhoods(result),
|
||||||
|
county=(result["location"]["county"].get("name") if result["location"]["county"] else None),
|
||||||
|
fips_code=(result["location"]["county"].get("fips_code") if result["location"]["county"] else None),
|
||||||
|
days_on_mls=self.calculate_days_on_mls(result),
|
||||||
nearby_schools=prop_details.get("schools"),
|
nearby_schools=prop_details.get("schools"),
|
||||||
assessed_value=prop_details.get("assessed_value"),
|
assessed_value=prop_details.get("assessed_value"),
|
||||||
estimated_value=prop_details.get("estimated_value"),
|
estimated_value=estimated_value if estimated_value else None,
|
||||||
|
advertisers=advertisers,
|
||||||
|
tax=prop_details.get("tax"),
|
||||||
|
tax_history=prop_details.get("tax_history"),
|
||||||
)
|
)
|
||||||
]
|
return realty_property
|
||||||
|
|
||||||
def general_search(self, variables: dict, search_type: str) -> Dict[str, Union[int, list[Property]]]:
|
def general_search(self, variables: dict, search_type: str) -> Dict[str, Union[int, Union[list[Property], list[dict]]]]:
|
||||||
"""
|
"""
|
||||||
Handles a location area & returns a list of properties
|
Handles a location area & returns a list of properties
|
||||||
"""
|
"""
|
||||||
results_query = """{
|
|
||||||
count
|
|
||||||
total
|
|
||||||
results {
|
|
||||||
pending_date
|
|
||||||
property_id
|
|
||||||
list_date
|
|
||||||
status
|
|
||||||
last_sold_price
|
|
||||||
last_sold_date
|
|
||||||
list_price
|
|
||||||
price_per_sqft
|
|
||||||
flags {
|
|
||||||
is_contingent
|
|
||||||
is_pending
|
|
||||||
}
|
|
||||||
description {
|
|
||||||
type
|
|
||||||
sqft
|
|
||||||
beds
|
|
||||||
baths_full
|
|
||||||
baths_half
|
|
||||||
lot_sqft
|
|
||||||
sold_price
|
|
||||||
year_built
|
|
||||||
garage
|
|
||||||
sold_price
|
|
||||||
type
|
|
||||||
name
|
|
||||||
stories
|
|
||||||
text
|
|
||||||
}
|
|
||||||
source {
|
|
||||||
id
|
|
||||||
listing_id
|
|
||||||
}
|
|
||||||
hoa {
|
|
||||||
fee
|
|
||||||
}
|
|
||||||
location {
|
|
||||||
address {
|
|
||||||
street_direction
|
|
||||||
street_number
|
|
||||||
street_name
|
|
||||||
street_suffix
|
|
||||||
line
|
|
||||||
unit
|
|
||||||
city
|
|
||||||
state_code
|
|
||||||
postal_code
|
|
||||||
coordinate {
|
|
||||||
lon
|
|
||||||
lat
|
|
||||||
}
|
|
||||||
}
|
|
||||||
county {
|
|
||||||
name
|
|
||||||
fips_code
|
|
||||||
}
|
|
||||||
neighborhoods {
|
|
||||||
name
|
|
||||||
}
|
|
||||||
}
|
|
||||||
tax_record {
|
|
||||||
public_record_id
|
|
||||||
}
|
|
||||||
primary_photo {
|
|
||||||
href
|
|
||||||
}
|
|
||||||
photos {
|
|
||||||
href
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}"""
|
|
||||||
|
|
||||||
date_param = ""
|
date_param = ""
|
||||||
if self.listing_type == ListingType.SOLD:
|
if self.listing_type == ListingType.SOLD:
|
||||||
|
@ -397,10 +249,15 @@ class RealtorScraper(Scraper):
|
||||||
elif self.last_x_days:
|
elif self.last_x_days:
|
||||||
date_param = f'list_date: {{ min: "$today-{self.last_x_days}D" }}'
|
date_param = f'list_date: {{ min: "$today-{self.last_x_days}D" }}'
|
||||||
|
|
||||||
|
property_type_param = ""
|
||||||
|
if self.property_type:
|
||||||
|
property_types = [pt.value for pt in self.property_type]
|
||||||
|
property_type_param = f"type: {json.dumps(property_types)}"
|
||||||
|
|
||||||
sort_param = (
|
sort_param = (
|
||||||
"sort: [{ field: sold_date, direction: desc }]"
|
"sort: [{ field: sold_date, direction: desc }]"
|
||||||
if self.listing_type == ListingType.SOLD
|
if self.listing_type == ListingType.SOLD
|
||||||
else "sort: [{ field: list_date, direction: desc }]"
|
else "" #: "sort: [{ field: list_date, direction: desc }]" #: prioritize normal fractal sort from realtor
|
||||||
)
|
)
|
||||||
|
|
||||||
pending_or_contingent_param = (
|
pending_or_contingent_param = (
|
||||||
|
@ -431,47 +288,49 @@ class RealtorScraper(Scraper):
|
||||||
status: %s
|
status: %s
|
||||||
%s
|
%s
|
||||||
%s
|
%s
|
||||||
|
%s
|
||||||
}
|
}
|
||||||
%s
|
%s
|
||||||
limit: 200
|
limit: 200
|
||||||
offset: $offset
|
offset: $offset
|
||||||
) %s""" % (
|
) %s
|
||||||
|
}""" % (
|
||||||
is_foreclosure,
|
is_foreclosure,
|
||||||
listing_type.value.lower(),
|
listing_type.value.lower(),
|
||||||
date_param,
|
date_param,
|
||||||
|
property_type_param,
|
||||||
pending_or_contingent_param,
|
pending_or_contingent_param,
|
||||||
sort_param,
|
sort_param,
|
||||||
results_query,
|
GENERAL_RESULTS_QUERY,
|
||||||
)
|
)
|
||||||
elif search_type == "area": #: general search, came from a general location
|
elif search_type == "area": #: general search, came from a general location
|
||||||
query = """query Home_search(
|
query = """query Home_search(
|
||||||
$city: String,
|
$location: String!,
|
||||||
$county: [String],
|
|
||||||
$state_code: String,
|
|
||||||
$postal_code: String
|
|
||||||
$offset: Int,
|
$offset: Int,
|
||||||
) {
|
) {
|
||||||
home_search(
|
home_search(
|
||||||
query: {
|
query: {
|
||||||
%s
|
%s
|
||||||
city: $city
|
search_location: {location: $location}
|
||||||
county: $county
|
|
||||||
postal_code: $postal_code
|
|
||||||
state_code: $state_code
|
|
||||||
status: %s
|
status: %s
|
||||||
|
unique: true
|
||||||
|
%s
|
||||||
%s
|
%s
|
||||||
%s
|
%s
|
||||||
}
|
}
|
||||||
|
bucket: { sort: "fractal_v1.1.3_fr" }
|
||||||
%s
|
%s
|
||||||
limit: 200
|
limit: 200
|
||||||
offset: $offset
|
offset: $offset
|
||||||
) %s""" % (
|
) %s
|
||||||
|
}""" % (
|
||||||
is_foreclosure,
|
is_foreclosure,
|
||||||
listing_type.value.lower(),
|
listing_type.value.lower(),
|
||||||
date_param,
|
date_param,
|
||||||
|
property_type_param,
|
||||||
pending_or_contingent_param,
|
pending_or_contingent_param,
|
||||||
sort_param,
|
sort_param,
|
||||||
results_query,
|
GENERAL_RESULTS_QUERY,
|
||||||
)
|
)
|
||||||
else: #: general search, came from an address
|
else: #: general search, came from an address
|
||||||
query = (
|
query = (
|
||||||
|
@ -479,14 +338,15 @@ class RealtorScraper(Scraper):
|
||||||
$property_id: [ID]!
|
$property_id: [ID]!
|
||||||
$offset: Int!,
|
$offset: Int!,
|
||||||
) {
|
) {
|
||||||
property_search(
|
home_search(
|
||||||
query: {
|
query: {
|
||||||
property_id: $property_id
|
property_id: $property_id
|
||||||
}
|
}
|
||||||
limit: 1
|
limit: 1
|
||||||
offset: $offset
|
offset: $offset
|
||||||
) %s"""
|
) %s
|
||||||
% results_query
|
}"""
|
||||||
|
% GENERAL_RESULTS_QUERY
|
||||||
)
|
)
|
||||||
|
|
||||||
payload = {
|
payload = {
|
||||||
|
@ -498,7 +358,7 @@ class RealtorScraper(Scraper):
|
||||||
response_json = response.json()
|
response_json = response.json()
|
||||||
search_key = "home_search" if "home_search" in query else "property_search"
|
search_key = "home_search" if "home_search" in query else "property_search"
|
||||||
|
|
||||||
properties: list[Property] = []
|
properties: list[Union[Property, dict]] = []
|
||||||
|
|
||||||
if (
|
if (
|
||||||
response_json is None
|
response_json is None
|
||||||
|
@ -510,73 +370,34 @@ class RealtorScraper(Scraper):
|
||||||
):
|
):
|
||||||
return {"total": 0, "properties": []}
|
return {"total": 0, "properties": []}
|
||||||
|
|
||||||
def process_property(result: dict) -> Property | None:
|
properties_list = response_json["data"][search_key]["results"]
|
||||||
mls = result["source"].get("id") if "source" in result and isinstance(result["source"], dict) else None
|
total_properties = response_json["data"][search_key]["total"]
|
||||||
|
offset = variables.get("offset", 0)
|
||||||
|
|
||||||
if not mls and self.mls_only:
|
#: limit the number of properties to be processed
|
||||||
return
|
#: example, if your offset is 200, and your limit is 250, return 50
|
||||||
|
properties_list: list[dict] = properties_list[: self.limit - offset]
|
||||||
|
|
||||||
able_to_get_lat_long = (
|
if self.extra_property_data:
|
||||||
result
|
property_ids = [data["property_id"] for data in properties_list]
|
||||||
and result.get("location")
|
extra_property_details = self.get_bulk_prop_details(property_ids) or {}
|
||||||
and result["location"].get("address")
|
|
||||||
and result["location"]["address"].get("coordinate")
|
|
||||||
)
|
|
||||||
|
|
||||||
is_pending = result["flags"].get("is_pending") or result["flags"].get("is_contingent")
|
for result in properties_list:
|
||||||
|
result.update(extra_property_details.get(result["property_id"], {}))
|
||||||
if is_pending and self.listing_type != ListingType.PENDING:
|
|
||||||
return
|
|
||||||
|
|
||||||
property_id = result["property_id"]
|
|
||||||
prop_details = self.get_prop_details(property_id)
|
|
||||||
|
|
||||||
realty_property = Property(
|
|
||||||
mls=mls,
|
|
||||||
mls_id=(
|
|
||||||
result["source"].get("listing_id")
|
|
||||||
if "source" in result and isinstance(result["source"], dict)
|
|
||||||
else None
|
|
||||||
),
|
|
||||||
property_url=(
|
|
||||||
f"{self.PROPERTY_URL}{property_id}"
|
|
||||||
if self.listing_type != ListingType.FOR_RENT
|
|
||||||
else f"{self.PROPERTY_URL}M{property_id}?listing_status=rental"
|
|
||||||
),
|
|
||||||
status="PENDING" if is_pending else result["status"].upper(),
|
|
||||||
list_price=result["list_price"],
|
|
||||||
list_date=result["list_date"].split("T")[0] if result.get("list_date") else None,
|
|
||||||
prc_sqft=result.get("price_per_sqft"),
|
|
||||||
last_sold_date=result.get("last_sold_date"),
|
|
||||||
hoa_fee=result["hoa"]["fee"] if result.get("hoa") and isinstance(result["hoa"], dict) else None,
|
|
||||||
latitude=result["location"]["address"]["coordinate"].get("lat") if able_to_get_lat_long else None,
|
|
||||||
longitude=result["location"]["address"]["coordinate"].get("lon") if able_to_get_lat_long else None,
|
|
||||||
address=self._parse_address(result, search_type="general_search"),
|
|
||||||
description=self._parse_description(result),
|
|
||||||
neighborhoods=self._parse_neighborhoods(result),
|
|
||||||
county=result["location"]["county"].get("name") if result["location"]["county"] else None,
|
|
||||||
fips_code=result["location"]["county"].get("fips_code") if result["location"]["county"] else None,
|
|
||||||
days_on_mls=self.calculate_days_on_mls(result),
|
|
||||||
agents=prop_details.get("agents"),
|
|
||||||
brokers=prop_details.get("brokers"),
|
|
||||||
nearby_schools=prop_details.get("schools"),
|
|
||||||
assessed_value=prop_details.get("assessed_value"),
|
|
||||||
estimated_value=prop_details.get("estimated_value"),
|
|
||||||
)
|
|
||||||
return realty_property
|
|
||||||
|
|
||||||
|
if self.return_type != ReturnType.raw:
|
||||||
with ThreadPoolExecutor(max_workers=self.NUM_PROPERTY_WORKERS) as executor:
|
with ThreadPoolExecutor(max_workers=self.NUM_PROPERTY_WORKERS) as executor:
|
||||||
futures = [
|
futures = [executor.submit(self.process_property, result) for result in properties_list]
|
||||||
executor.submit(process_property, result) for result in response_json["data"][search_key]["results"]
|
|
||||||
]
|
|
||||||
|
|
||||||
for future in as_completed(futures):
|
for future in as_completed(futures):
|
||||||
result = future.result()
|
result = future.result()
|
||||||
if result:
|
if result:
|
||||||
properties.append(result)
|
properties.append(result)
|
||||||
|
else:
|
||||||
|
properties = properties_list
|
||||||
|
|
||||||
return {
|
return {
|
||||||
"total": response_json["data"][search_key]["total"],
|
"total": total_properties,
|
||||||
"properties": properties,
|
"properties": properties,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -599,17 +420,7 @@ class RealtorScraper(Scraper):
|
||||||
if location_type == "address":
|
if location_type == "address":
|
||||||
if not self.radius: #: single address search, non comps
|
if not self.radius: #: single address search, non comps
|
||||||
property_id = location_info["mpr_id"]
|
property_id = location_info["mpr_id"]
|
||||||
search_variables |= {"property_id": property_id}
|
return self.handle_home(property_id)
|
||||||
|
|
||||||
gql_results = self.general_search(search_variables, search_type=search_type)
|
|
||||||
if gql_results["total"] == 0:
|
|
||||||
listing_id = self.get_latest_listing_id(property_id)
|
|
||||||
if listing_id is None:
|
|
||||||
return self.handle_address(property_id)
|
|
||||||
else:
|
|
||||||
return self.handle_listing(listing_id)
|
|
||||||
else:
|
|
||||||
return gql_results["properties"]
|
|
||||||
|
|
||||||
else: #: general search, comps (radius)
|
else: #: general search, comps (radius)
|
||||||
if not location_info.get("centroid"):
|
if not location_info.get("centroid"):
|
||||||
|
@ -628,10 +439,7 @@ class RealtorScraper(Scraper):
|
||||||
|
|
||||||
else: #: general search, location
|
else: #: general search, location
|
||||||
search_variables |= {
|
search_variables |= {
|
||||||
"city": location_info.get("city"),
|
"location": self.location,
|
||||||
"county": location_info.get("county"),
|
|
||||||
"state_code": location_info.get("state_code"),
|
|
||||||
"postal_code": location_info.get("postal_code"),
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if self.foreclosure:
|
if self.foreclosure:
|
||||||
|
@ -641,14 +449,18 @@ class RealtorScraper(Scraper):
|
||||||
total = result["total"]
|
total = result["total"]
|
||||||
homes = result["properties"]
|
homes = result["properties"]
|
||||||
|
|
||||||
with ThreadPoolExecutor(max_workers=10) as executor:
|
with ThreadPoolExecutor() as executor:
|
||||||
futures = [
|
futures = [
|
||||||
executor.submit(
|
executor.submit(
|
||||||
self.general_search,
|
self.general_search,
|
||||||
variables=search_variables | {"offset": i},
|
variables=search_variables | {"offset": i},
|
||||||
search_type=search_type,
|
search_type=search_type,
|
||||||
)
|
)
|
||||||
for i in range(200, min(total, 10000), 200)
|
for i in range(
|
||||||
|
self.DEFAULT_PAGE_SIZE,
|
||||||
|
min(total, self.limit),
|
||||||
|
self.DEFAULT_PAGE_SIZE,
|
||||||
|
)
|
||||||
]
|
]
|
||||||
|
|
||||||
for future in as_completed(futures):
|
for future in as_completed(futures):
|
||||||
|
@ -656,55 +468,8 @@ class RealtorScraper(Scraper):
|
||||||
|
|
||||||
return homes
|
return homes
|
||||||
|
|
||||||
def get_prop_details(self, property_id: str) -> dict:
|
@staticmethod
|
||||||
if not self.extra_property_data:
|
def get_key(data: dict, keys: list):
|
||||||
return {}
|
|
||||||
|
|
||||||
#: TODO: migrate "advertisers" and "estimates" to general query
|
|
||||||
|
|
||||||
query = """query GetHome($property_id: ID!) {
|
|
||||||
home(property_id: $property_id) {
|
|
||||||
__typename
|
|
||||||
|
|
||||||
advertisers {
|
|
||||||
__typename
|
|
||||||
type
|
|
||||||
name
|
|
||||||
email
|
|
||||||
phones { number type ext primary }
|
|
||||||
}
|
|
||||||
|
|
||||||
consumer_advertisers {
|
|
||||||
name
|
|
||||||
phone
|
|
||||||
href
|
|
||||||
type
|
|
||||||
}
|
|
||||||
|
|
||||||
nearbySchools: nearby_schools(radius: 5.0, limit_per_level: 3) {
|
|
||||||
__typename schools { district { __typename id name } }
|
|
||||||
}
|
|
||||||
taxHistory: tax_history { __typename tax year assessment { __typename building land total } }
|
|
||||||
estimates {
|
|
||||||
__typename
|
|
||||||
currentValues: current_values {
|
|
||||||
__typename
|
|
||||||
source { __typename type name }
|
|
||||||
estimate
|
|
||||||
estimateHigh: estimate_high
|
|
||||||
estimateLow: estimate_low
|
|
||||||
date
|
|
||||||
isBestHomeValue: isbest_homevalue
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}"""
|
|
||||||
|
|
||||||
variables = {"property_id": property_id}
|
|
||||||
response = self.session.post(self.PROPERTY_GQL, json={"query": query, "variables": variables})
|
|
||||||
data = response.json()
|
|
||||||
|
|
||||||
def get_key(keys: list):
|
|
||||||
try:
|
try:
|
||||||
value = data
|
value = data
|
||||||
for key in keys:
|
for key in keys:
|
||||||
|
@ -714,34 +479,79 @@ class RealtorScraper(Scraper):
|
||||||
except (KeyError, TypeError, IndexError):
|
except (KeyError, TypeError, IndexError):
|
||||||
return {}
|
return {}
|
||||||
|
|
||||||
agents = get_key(["data", "home", "advertisers"])
|
def process_extra_property_details(self, result: dict) -> dict:
|
||||||
advertisers = get_key(["data", "home", "consumer_advertisers"])
|
schools = self.get_key(result, ["nearbySchools", "schools"])
|
||||||
|
assessed_value = self.get_key(result, ["taxHistory", 0, "assessment", "total"])
|
||||||
|
tax_history = self.get_key(result, ["taxHistory"])
|
||||||
|
|
||||||
schools = get_key(["data", "home", "nearbySchools", "schools"])
|
schools = [school["district"]["name"] for school in schools if school["district"].get("name")]
|
||||||
assessed_value = get_key(["data", "home", "taxHistory", 0, "assessment", "total"])
|
|
||||||
estimated_value = get_key(["data", "home", "estimates", "currentValues", 0, "estimate"])
|
|
||||||
|
|
||||||
agents = [Agent(
|
# Process tax history
|
||||||
name=ad["name"],
|
latest_tax = None
|
||||||
email=ad["email"],
|
processed_tax_history = None
|
||||||
phones=ad["phones"]
|
if tax_history and isinstance(tax_history, list):
|
||||||
) for ad in agents]
|
tax_history = sorted(tax_history, key=lambda x: x.get("year", 0), reverse=True)
|
||||||
|
|
||||||
brokers = [Broker(
|
if tax_history and "tax" in tax_history[0]:
|
||||||
name=ad["name"],
|
latest_tax = tax_history[0]["tax"]
|
||||||
phone=ad["phone"],
|
|
||||||
website=ad["href"]
|
processed_tax_history = []
|
||||||
) for ad in advertisers if ad.get("type") != "Agent"]
|
for entry in tax_history:
|
||||||
|
if "year" in entry and "tax" in entry:
|
||||||
|
processed_entry = {
|
||||||
|
"year": entry["year"],
|
||||||
|
"tax": entry["tax"],
|
||||||
|
}
|
||||||
|
if "assessment" in entry and isinstance(entry["assessment"], dict):
|
||||||
|
processed_entry["assessment"] = {
|
||||||
|
"building": entry["assessment"].get("building"),
|
||||||
|
"land": entry["assessment"].get("land"),
|
||||||
|
"total": entry["assessment"].get("total"),
|
||||||
|
}
|
||||||
|
processed_tax_history.append(processed_entry)
|
||||||
|
|
||||||
schools = [school["district"]["name"] for school in schools if school['district'].get('name')]
|
|
||||||
return {
|
return {
|
||||||
"agents": agents if agents else None,
|
|
||||||
"brokers": brokers if brokers else None,
|
|
||||||
"schools": schools if schools else None,
|
"schools": schools if schools else None,
|
||||||
"assessed_value": assessed_value if assessed_value else None,
|
"assessed_value": assessed_value if assessed_value else None,
|
||||||
"estimated_value": estimated_value if estimated_value else None,
|
"tax": latest_tax,
|
||||||
|
"tax_history": processed_tax_history,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@retry(
|
||||||
|
retry=retry_if_exception_type(JSONDecodeError),
|
||||||
|
wait=wait_exponential(min=4, max=10),
|
||||||
|
stop=stop_after_attempt(3),
|
||||||
|
)
|
||||||
|
def get_bulk_prop_details(self, property_ids: list[str]) -> dict:
|
||||||
|
"""
|
||||||
|
Fetch extra property details for multiple properties in a single GraphQL query.
|
||||||
|
Returns a map of property_id to its details.
|
||||||
|
"""
|
||||||
|
if not self.extra_property_data or not property_ids:
|
||||||
|
return {}
|
||||||
|
|
||||||
|
property_ids = list(set(property_ids))
|
||||||
|
|
||||||
|
# Construct the bulk query
|
||||||
|
fragments = "\n".join(
|
||||||
|
f'home_{property_id}: home(property_id: {property_id}) {{ ...HomeData }}'
|
||||||
|
for property_id in property_ids
|
||||||
|
)
|
||||||
|
query = f"""{HOME_FRAGMENT}
|
||||||
|
|
||||||
|
query GetHomes {{
|
||||||
|
{fragments}
|
||||||
|
}}"""
|
||||||
|
|
||||||
|
response = self.session.post(self.SEARCH_GQL_URL, json={"query": query})
|
||||||
|
data = response.json()
|
||||||
|
|
||||||
|
if "data" not in data:
|
||||||
|
return {}
|
||||||
|
|
||||||
|
properties = data["data"]
|
||||||
|
return {data.replace('home_', ''): properties[data] for data in properties if properties[data]}
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _parse_neighborhoods(result: dict) -> Optional[str]:
|
def _parse_neighborhoods(result: dict) -> Optional[str]:
|
||||||
neighborhoods_list = []
|
neighborhoods_list = []
|
||||||
|
@ -772,12 +582,14 @@ class RealtorScraper(Scraper):
|
||||||
return Address(
|
return Address(
|
||||||
full_line=address.get("line"),
|
full_line=address.get("line"),
|
||||||
street=" ".join(
|
street=" ".join(
|
||||||
part for part in [
|
part
|
||||||
|
for part in [
|
||||||
address.get("street_number"),
|
address.get("street_number"),
|
||||||
address.get("street_direction"),
|
address.get("street_direction"),
|
||||||
address.get("street_name"),
|
address.get("street_name"),
|
||||||
address.get("street_suffix"),
|
address.get("street_suffix"),
|
||||||
] if part is not None
|
]
|
||||||
|
if part is not None
|
||||||
).strip(),
|
).strip(),
|
||||||
unit=address["unit"],
|
unit=address["unit"],
|
||||||
city=address["city"],
|
city=address["city"],
|
||||||
|
@ -786,7 +598,10 @@ class RealtorScraper(Scraper):
|
||||||
)
|
)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _parse_description(result: dict) -> Description:
|
def _parse_description(result: dict) -> Description | None:
|
||||||
|
if not result:
|
||||||
|
return None
|
||||||
|
|
||||||
description_data = result.get("description", {})
|
description_data = result.get("description", {})
|
||||||
|
|
||||||
if description_data is None or not isinstance(description_data, dict):
|
if description_data is None or not isinstance(description_data, dict):
|
||||||
|
@ -797,22 +612,25 @@ class RealtorScraper(Scraper):
|
||||||
style = style.upper()
|
style = style.upper()
|
||||||
|
|
||||||
primary_photo = ""
|
primary_photo = ""
|
||||||
if result and "primary_photo" in result:
|
if (primary_photo_info := result.get("primary_photo")) and (
|
||||||
primary_photo_info = result["primary_photo"]
|
primary_photo_href := primary_photo_info.get("href")
|
||||||
if primary_photo_info and "href" in primary_photo_info:
|
):
|
||||||
primary_photo_href = primary_photo_info["href"]
|
|
||||||
primary_photo = primary_photo_href.replace("s.jpg", "od-w480_h360_x2.webp?w=1080&q=75")
|
primary_photo = primary_photo_href.replace("s.jpg", "od-w480_h360_x2.webp?w=1080&q=75")
|
||||||
|
|
||||||
return Description(
|
return Description(
|
||||||
primary_photo=primary_photo,
|
primary_photo=primary_photo,
|
||||||
alt_photos=RealtorScraper.process_alt_photos(result.get("photos")),
|
alt_photos=RealtorScraper.process_alt_photos(result.get("photos", [])),
|
||||||
style=PropertyType(style) if style else None,
|
style=(PropertyType.__getitem__(style) if style and style in PropertyType.__members__ else None),
|
||||||
beds=description_data.get("beds"),
|
beds=description_data.get("beds"),
|
||||||
baths_full=description_data.get("baths_full"),
|
baths_full=description_data.get("baths_full"),
|
||||||
baths_half=description_data.get("baths_half"),
|
baths_half=description_data.get("baths_half"),
|
||||||
sqft=description_data.get("sqft"),
|
sqft=description_data.get("sqft"),
|
||||||
lot_sqft=description_data.get("lot_sqft"),
|
lot_sqft=description_data.get("lot_sqft"),
|
||||||
sold_price=description_data.get("sold_price") if result.get('last_sold_date') or result["list_price"] != description_data.get("sold_price") else None, #: has a sold date or list and sold price are different
|
sold_price=(
|
||||||
|
result.get("last_sold_price") or description_data.get("sold_price")
|
||||||
|
if result.get("last_sold_date") or result["list_price"] != description_data.get("sold_price")
|
||||||
|
else None
|
||||||
|
), #: has a sold date or list and sold price are different
|
||||||
year_built=description_data.get("year_built"),
|
year_built=description_data.get("year_built"),
|
||||||
garage=description_data.get("garage"),
|
garage=description_data.get("garage"),
|
||||||
stories=description_data.get("stories"),
|
stories=description_data.get("stories"),
|
||||||
|
@ -839,14 +657,12 @@ class RealtorScraper(Scraper):
|
||||||
return days
|
return days
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def process_alt_photos(photos_info):
|
def process_alt_photos(photos_info: list[dict]) -> list[str] | None:
|
||||||
try:
|
if not photos_info:
|
||||||
alt_photos = []
|
return None
|
||||||
if photos_info:
|
|
||||||
for photo_info in photos_info:
|
return [
|
||||||
href = photo_info.get("href", "")
|
photo_info["href"].replace("s.jpg", "od-w480_h360_x2.webp?w=1080&q=75")
|
||||||
alt_photo_href = href.replace("s.jpg", "od-w480_h360_x2.webp?w=1080&q=75")
|
for photo_info in photos_info
|
||||||
alt_photos.append(alt_photo_href)
|
if photo_info.get("href")
|
||||||
return alt_photos
|
]
|
||||||
except Exception:
|
|
||||||
pass
|
|
||||||
|
|
|
@ -0,0 +1,242 @@
|
||||||
|
_SEARCH_HOMES_DATA_BASE = """{
|
||||||
|
pending_date
|
||||||
|
listing_id
|
||||||
|
property_id
|
||||||
|
href
|
||||||
|
list_date
|
||||||
|
status
|
||||||
|
last_sold_price
|
||||||
|
last_sold_date
|
||||||
|
list_price
|
||||||
|
list_price_max
|
||||||
|
list_price_min
|
||||||
|
price_per_sqft
|
||||||
|
tags
|
||||||
|
details {
|
||||||
|
category
|
||||||
|
text
|
||||||
|
parent_category
|
||||||
|
}
|
||||||
|
pet_policy {
|
||||||
|
cats
|
||||||
|
dogs
|
||||||
|
dogs_small
|
||||||
|
dogs_large
|
||||||
|
__typename
|
||||||
|
}
|
||||||
|
units {
|
||||||
|
availability {
|
||||||
|
date
|
||||||
|
__typename
|
||||||
|
}
|
||||||
|
description {
|
||||||
|
baths_consolidated
|
||||||
|
baths
|
||||||
|
beds
|
||||||
|
sqft
|
||||||
|
__typename
|
||||||
|
}
|
||||||
|
list_price
|
||||||
|
__typename
|
||||||
|
}
|
||||||
|
flags {
|
||||||
|
is_contingent
|
||||||
|
is_pending
|
||||||
|
is_new_construction
|
||||||
|
}
|
||||||
|
description {
|
||||||
|
type
|
||||||
|
sqft
|
||||||
|
beds
|
||||||
|
baths_full
|
||||||
|
baths_half
|
||||||
|
lot_sqft
|
||||||
|
year_built
|
||||||
|
garage
|
||||||
|
type
|
||||||
|
name
|
||||||
|
stories
|
||||||
|
text
|
||||||
|
}
|
||||||
|
source {
|
||||||
|
id
|
||||||
|
listing_id
|
||||||
|
}
|
||||||
|
hoa {
|
||||||
|
fee
|
||||||
|
}
|
||||||
|
location {
|
||||||
|
address {
|
||||||
|
street_direction
|
||||||
|
street_number
|
||||||
|
street_name
|
||||||
|
street_suffix
|
||||||
|
line
|
||||||
|
unit
|
||||||
|
city
|
||||||
|
state_code
|
||||||
|
postal_code
|
||||||
|
coordinate {
|
||||||
|
lon
|
||||||
|
lat
|
||||||
|
}
|
||||||
|
}
|
||||||
|
county {
|
||||||
|
name
|
||||||
|
fips_code
|
||||||
|
}
|
||||||
|
neighborhoods {
|
||||||
|
name
|
||||||
|
}
|
||||||
|
}
|
||||||
|
tax_record {
|
||||||
|
public_record_id
|
||||||
|
}
|
||||||
|
primary_photo(https: true) {
|
||||||
|
href
|
||||||
|
}
|
||||||
|
photos(https: true) {
|
||||||
|
href
|
||||||
|
tags {
|
||||||
|
label
|
||||||
|
}
|
||||||
|
}
|
||||||
|
advertisers {
|
||||||
|
email
|
||||||
|
broker {
|
||||||
|
name
|
||||||
|
fulfillment_id
|
||||||
|
}
|
||||||
|
type
|
||||||
|
name
|
||||||
|
fulfillment_id
|
||||||
|
builder {
|
||||||
|
name
|
||||||
|
fulfillment_id
|
||||||
|
}
|
||||||
|
phones {
|
||||||
|
ext
|
||||||
|
primary
|
||||||
|
type
|
||||||
|
number
|
||||||
|
}
|
||||||
|
office {
|
||||||
|
name
|
||||||
|
email
|
||||||
|
fulfillment_id
|
||||||
|
href
|
||||||
|
phones {
|
||||||
|
number
|
||||||
|
type
|
||||||
|
primary
|
||||||
|
ext
|
||||||
|
}
|
||||||
|
mls_set
|
||||||
|
}
|
||||||
|
corporation {
|
||||||
|
specialties
|
||||||
|
name
|
||||||
|
bio
|
||||||
|
href
|
||||||
|
fulfillment_id
|
||||||
|
}
|
||||||
|
mls_set
|
||||||
|
nrds_id
|
||||||
|
rental_corporation {
|
||||||
|
fulfillment_id
|
||||||
|
}
|
||||||
|
rental_management {
|
||||||
|
name
|
||||||
|
href
|
||||||
|
fulfillment_id
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
HOME_FRAGMENT = """
|
||||||
|
fragment HomeData on Home {
|
||||||
|
property_id
|
||||||
|
nearbySchools: nearby_schools(radius: 5.0, limit_per_level: 3) {
|
||||||
|
__typename schools { district { __typename id name } }
|
||||||
|
}
|
||||||
|
taxHistory: tax_history { __typename tax year assessment { __typename building land total } }
|
||||||
|
monthly_fees {
|
||||||
|
description
|
||||||
|
display_amount
|
||||||
|
}
|
||||||
|
one_time_fees {
|
||||||
|
description
|
||||||
|
display_amount
|
||||||
|
}
|
||||||
|
parking {
|
||||||
|
unassigned_space_rent
|
||||||
|
assigned_spaces_available
|
||||||
|
description
|
||||||
|
assigned_space_rent
|
||||||
|
}
|
||||||
|
terms {
|
||||||
|
text
|
||||||
|
category
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"""
|
||||||
|
|
||||||
|
HOMES_DATA = """%s
|
||||||
|
nearbySchools: nearby_schools(radius: 5.0, limit_per_level: 3) {
|
||||||
|
__typename schools { district { __typename id name } }
|
||||||
|
}
|
||||||
|
monthly_fees {
|
||||||
|
description
|
||||||
|
display_amount
|
||||||
|
}
|
||||||
|
one_time_fees {
|
||||||
|
description
|
||||||
|
display_amount
|
||||||
|
}
|
||||||
|
parking {
|
||||||
|
unassigned_space_rent
|
||||||
|
assigned_spaces_available
|
||||||
|
description
|
||||||
|
assigned_space_rent
|
||||||
|
}
|
||||||
|
terms {
|
||||||
|
text
|
||||||
|
category
|
||||||
|
}
|
||||||
|
taxHistory: tax_history { __typename tax year assessment { __typename building land total } }
|
||||||
|
estimates {
|
||||||
|
__typename
|
||||||
|
currentValues: current_values {
|
||||||
|
__typename
|
||||||
|
source { __typename type name }
|
||||||
|
estimate
|
||||||
|
estimateHigh: estimate_high
|
||||||
|
estimateLow: estimate_low
|
||||||
|
date
|
||||||
|
isBestHomeValue: isbest_homevalue
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}""" % _SEARCH_HOMES_DATA_BASE
|
||||||
|
|
||||||
|
SEARCH_HOMES_DATA = """%s
|
||||||
|
current_estimates {
|
||||||
|
__typename
|
||||||
|
source {
|
||||||
|
__typename
|
||||||
|
type
|
||||||
|
name
|
||||||
|
}
|
||||||
|
estimate
|
||||||
|
estimateHigh: estimate_high
|
||||||
|
estimateLow: estimate_low
|
||||||
|
date
|
||||||
|
isBestHomeValue: isbest_homevalue
|
||||||
|
}
|
||||||
|
}""" % _SEARCH_HOMES_DATA_BASE
|
||||||
|
|
||||||
|
GENERAL_RESULTS_QUERY = """{
|
||||||
|
count
|
||||||
|
total
|
||||||
|
results %s
|
||||||
|
}""" % SEARCH_HOMES_DATA
|
|
@ -1,10 +1,13 @@
|
||||||
|
from __future__ import annotations
|
||||||
import pandas as pd
|
import pandas as pd
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from .core.scrapers.models import Property, ListingType, Agent
|
from .core.scrapers.models import Property, ListingType, Advertisers
|
||||||
from .exceptions import InvalidListingType, InvalidDate
|
from .exceptions import InvalidListingType, InvalidDate
|
||||||
|
|
||||||
ordered_properties = [
|
ordered_properties = [
|
||||||
"property_url",
|
"property_url",
|
||||||
|
"property_id",
|
||||||
|
"listing_id",
|
||||||
"mls",
|
"mls",
|
||||||
"mls_id",
|
"mls_id",
|
||||||
"status",
|
"status",
|
||||||
|
@ -23,11 +26,16 @@ ordered_properties = [
|
||||||
"year_built",
|
"year_built",
|
||||||
"days_on_mls",
|
"days_on_mls",
|
||||||
"list_price",
|
"list_price",
|
||||||
|
"list_price_min",
|
||||||
|
"list_price_max",
|
||||||
"list_date",
|
"list_date",
|
||||||
"sold_price",
|
"sold_price",
|
||||||
"last_sold_date",
|
"last_sold_date",
|
||||||
"assessed_value",
|
"assessed_value",
|
||||||
"estimated_value",
|
"estimated_value",
|
||||||
|
"tax",
|
||||||
|
"tax_history",
|
||||||
|
"new_construction",
|
||||||
"lot_sqft",
|
"lot_sqft",
|
||||||
"price_per_sqft",
|
"price_per_sqft",
|
||||||
"latitude",
|
"latitude",
|
||||||
|
@ -38,12 +46,21 @@ ordered_properties = [
|
||||||
"stories",
|
"stories",
|
||||||
"hoa_fee",
|
"hoa_fee",
|
||||||
"parking_garage",
|
"parking_garage",
|
||||||
"agent",
|
"agent_id",
|
||||||
|
"agent_name",
|
||||||
"agent_email",
|
"agent_email",
|
||||||
"agent_phones",
|
"agent_phones",
|
||||||
"broker",
|
"agent_mls_set",
|
||||||
"broker_phone",
|
"agent_nrds_id",
|
||||||
"broker_website",
|
"broker_id",
|
||||||
|
"broker_name",
|
||||||
|
"builder_id",
|
||||||
|
"builder_name",
|
||||||
|
"office_id",
|
||||||
|
"office_mls_set",
|
||||||
|
"office_name",
|
||||||
|
"office_email",
|
||||||
|
"office_phones",
|
||||||
"nearby_schools",
|
"nearby_schools",
|
||||||
"primary_photo",
|
"primary_photo",
|
||||||
"alt_photos",
|
"alt_photos",
|
||||||
|
@ -63,28 +80,48 @@ def process_result(result: Property) -> pd.DataFrame:
|
||||||
prop_data["state"] = address_data.state
|
prop_data["state"] = address_data.state
|
||||||
prop_data["zip_code"] = address_data.zip
|
prop_data["zip_code"] = address_data.zip
|
||||||
|
|
||||||
if "agents" in prop_data:
|
if "advertisers" in prop_data and prop_data.get("advertisers"):
|
||||||
agents: list[Agent] | None = prop_data["agents"]
|
advertiser_data: Advertisers | None = prop_data["advertisers"]
|
||||||
if agents:
|
if advertiser_data.agent:
|
||||||
prop_data["agent"] = agents[0].name
|
agent_data = advertiser_data.agent
|
||||||
prop_data["agent_email"] = agents[0].email
|
prop_data["agent_id"] = agent_data.uuid
|
||||||
prop_data["agent_phones"] = agents[0].phones
|
prop_data["agent_name"] = agent_data.name
|
||||||
|
prop_data["agent_email"] = agent_data.email
|
||||||
|
prop_data["agent_phones"] = agent_data.phones
|
||||||
|
prop_data["agent_mls_set"] = agent_data.mls_set
|
||||||
|
prop_data["agent_nrds_id"] = agent_data.nrds_id
|
||||||
|
|
||||||
if "brokers" in prop_data:
|
if advertiser_data.broker:
|
||||||
brokers = prop_data["brokers"]
|
broker_data = advertiser_data.broker
|
||||||
if brokers:
|
prop_data["broker_id"] = broker_data.uuid
|
||||||
prop_data["broker"] = brokers[0].name
|
prop_data["broker_name"] = broker_data.name
|
||||||
prop_data["broker_phone"] = brokers[0].phone
|
|
||||||
prop_data["broker_website"] = brokers[0].website
|
if advertiser_data.builder:
|
||||||
|
builder_data = advertiser_data.builder
|
||||||
|
prop_data["builder_id"] = builder_data.uuid
|
||||||
|
prop_data["builder_name"] = builder_data.name
|
||||||
|
|
||||||
|
if advertiser_data.office:
|
||||||
|
office_data = advertiser_data.office
|
||||||
|
prop_data["office_id"] = office_data.uuid
|
||||||
|
prop_data["office_name"] = office_data.name
|
||||||
|
prop_data["office_email"] = office_data.email
|
||||||
|
prop_data["office_phones"] = office_data.phones
|
||||||
|
prop_data["office_mls_set"] = office_data.mls_set
|
||||||
|
|
||||||
prop_data["price_per_sqft"] = prop_data["prc_sqft"]
|
prop_data["price_per_sqft"] = prop_data["prc_sqft"]
|
||||||
prop_data["nearby_schools"] = filter(None, prop_data["nearby_schools"]) if prop_data["nearby_schools"] else None
|
prop_data["nearby_schools"] = filter(None, prop_data["nearby_schools"]) if prop_data["nearby_schools"] else None
|
||||||
prop_data["nearby_schools"] = ", ".join(set(prop_data["nearby_schools"])) if prop_data["nearby_schools"] else None
|
prop_data["nearby_schools"] = ", ".join(set(prop_data["nearby_schools"])) if prop_data["nearby_schools"] else None
|
||||||
|
|
||||||
description = result.description
|
description = result.description
|
||||||
|
if description:
|
||||||
prop_data["primary_photo"] = description.primary_photo
|
prop_data["primary_photo"] = description.primary_photo
|
||||||
prop_data["alt_photos"] = ", ".join(description.alt_photos)
|
prop_data["alt_photos"] = ", ".join(description.alt_photos) if description.alt_photos else None
|
||||||
prop_data["style"] = description.style if type(description.style) == str else description.style.value
|
prop_data["style"] = (
|
||||||
|
description.style
|
||||||
|
if isinstance(description.style, str)
|
||||||
|
else description.style.value if description.style else None
|
||||||
|
)
|
||||||
prop_data["beds"] = description.beds
|
prop_data["beds"] = description.beds
|
||||||
prop_data["full_baths"] = description.baths_full
|
prop_data["full_baths"] = description.baths_full
|
||||||
prop_data["half_baths"] = description.baths_half
|
prop_data["half_baths"] = description.baths_half
|
||||||
|
@ -108,7 +145,7 @@ def validate_input(listing_type: str) -> None:
|
||||||
|
|
||||||
|
|
||||||
def validate_dates(date_from: str | None, date_to: str | None) -> None:
|
def validate_dates(date_from: str | None, date_to: str | None) -> None:
|
||||||
if (date_from is not None and date_to is None) or (date_from is None and date_to is not None):
|
if isinstance(date_from, str) != isinstance(date_to, str):
|
||||||
raise InvalidDate("Both date_from and date_to must be provided.")
|
raise InvalidDate("Both date_from and date_to must be provided.")
|
||||||
|
|
||||||
if date_from and date_to:
|
if date_from and date_to:
|
||||||
|
@ -120,3 +157,10 @@ def validate_dates(date_from: str | None, date_to: str | None) -> None:
|
||||||
raise InvalidDate("date_to must be after date_from.")
|
raise InvalidDate("date_to must be after date_from.")
|
||||||
except ValueError:
|
except ValueError:
|
||||||
raise InvalidDate(f"Invalid date format or range")
|
raise InvalidDate(f"Invalid date format or range")
|
||||||
|
|
||||||
|
|
||||||
|
def validate_limit(limit: int) -> None:
|
||||||
|
#: 1 -> 10000 limit
|
||||||
|
|
||||||
|
if limit is not None and (limit < 1 or limit > 10000):
|
||||||
|
raise ValueError("Property limit must be between 1 and 10,000.")
|
||||||
|
|
|
@ -1,4 +1,15 @@
|
||||||
# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand.
|
# This file is automatically @generated by Poetry 1.8.4 and should not be changed by hand.
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "annotated-types"
|
||||||
|
version = "0.7.0"
|
||||||
|
description = "Reusable constraint types to use with typing.Annotated"
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.8"
|
||||||
|
files = [
|
||||||
|
{file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"},
|
||||||
|
{file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"},
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "certifi"
|
name = "certifi"
|
||||||
|
@ -391,6 +402,116 @@ nodeenv = ">=0.11.1"
|
||||||
pyyaml = ">=5.1"
|
pyyaml = ">=5.1"
|
||||||
virtualenv = ">=20.10.0"
|
virtualenv = ">=20.10.0"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "pydantic"
|
||||||
|
version = "2.7.4"
|
||||||
|
description = "Data validation using Python type hints"
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.8"
|
||||||
|
files = [
|
||||||
|
{file = "pydantic-2.7.4-py3-none-any.whl", hash = "sha256:ee8538d41ccb9c0a9ad3e0e5f07bf15ed8015b481ced539a1759d8cc89ae90d0"},
|
||||||
|
{file = "pydantic-2.7.4.tar.gz", hash = "sha256:0c84efd9548d545f63ac0060c1e4d39bb9b14db8b3c0652338aecc07b5adec52"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[package.dependencies]
|
||||||
|
annotated-types = ">=0.4.0"
|
||||||
|
pydantic-core = "2.18.4"
|
||||||
|
typing-extensions = ">=4.6.1"
|
||||||
|
|
||||||
|
[package.extras]
|
||||||
|
email = ["email-validator (>=2.0.0)"]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "pydantic-core"
|
||||||
|
version = "2.18.4"
|
||||||
|
description = "Core functionality for Pydantic validation and serialization"
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.8"
|
||||||
|
files = [
|
||||||
|
{file = "pydantic_core-2.18.4-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:f76d0ad001edd426b92233d45c746fd08f467d56100fd8f30e9ace4b005266e4"},
|
||||||
|
{file = "pydantic_core-2.18.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:59ff3e89f4eaf14050c8022011862df275b552caef8082e37b542b066ce1ff26"},
|
||||||
|
{file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a55b5b16c839df1070bc113c1f7f94a0af4433fcfa1b41799ce7606e5c79ce0a"},
|
||||||
|
{file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4d0dcc59664fcb8974b356fe0a18a672d6d7cf9f54746c05f43275fc48636851"},
|
||||||
|
{file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8951eee36c57cd128f779e641e21eb40bc5073eb28b2d23f33eb0ef14ffb3f5d"},
|
||||||
|
{file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4701b19f7e3a06ea655513f7938de6f108123bf7c86bbebb1196eb9bd35cf724"},
|
||||||
|
{file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e00a3f196329e08e43d99b79b286d60ce46bed10f2280d25a1718399457e06be"},
|
||||||
|
{file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:97736815b9cc893b2b7f663628e63f436018b75f44854c8027040e05230eeddb"},
|
||||||
|
{file = "pydantic_core-2.18.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6891a2ae0e8692679c07728819b6e2b822fb30ca7445f67bbf6509b25a96332c"},
|
||||||
|
{file = "pydantic_core-2.18.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bc4ff9805858bd54d1a20efff925ccd89c9d2e7cf4986144b30802bf78091c3e"},
|
||||||
|
{file = "pydantic_core-2.18.4-cp310-none-win32.whl", hash = "sha256:1b4de2e51bbcb61fdebd0ab86ef28062704f62c82bbf4addc4e37fa4b00b7cbc"},
|
||||||
|
{file = "pydantic_core-2.18.4-cp310-none-win_amd64.whl", hash = "sha256:6a750aec7bf431517a9fd78cb93c97b9b0c496090fee84a47a0d23668976b4b0"},
|
||||||
|
{file = "pydantic_core-2.18.4-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:942ba11e7dfb66dc70f9ae66b33452f51ac7bb90676da39a7345e99ffb55402d"},
|
||||||
|
{file = "pydantic_core-2.18.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b2ebef0e0b4454320274f5e83a41844c63438fdc874ea40a8b5b4ecb7693f1c4"},
|
||||||
|
{file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a642295cd0c8df1b86fc3dced1d067874c353a188dc8e0f744626d49e9aa51c4"},
|
||||||
|
{file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5f09baa656c904807e832cf9cce799c6460c450c4ad80803517032da0cd062e2"},
|
||||||
|
{file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:98906207f29bc2c459ff64fa007afd10a8c8ac080f7e4d5beff4c97086a3dabd"},
|
||||||
|
{file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:19894b95aacfa98e7cb093cd7881a0c76f55731efad31073db4521e2b6ff5b7d"},
|
||||||
|
{file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0fbbdc827fe5e42e4d196c746b890b3d72876bdbf160b0eafe9f0334525119c8"},
|
||||||
|
{file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f85d05aa0918283cf29a30b547b4df2fbb56b45b135f9e35b6807cb28bc47951"},
|
||||||
|
{file = "pydantic_core-2.18.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e85637bc8fe81ddb73fda9e56bab24560bdddfa98aa64f87aaa4e4b6730c23d2"},
|
||||||
|
{file = "pydantic_core-2.18.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:2f5966897e5461f818e136b8451d0551a2e77259eb0f73a837027b47dc95dab9"},
|
||||||
|
{file = "pydantic_core-2.18.4-cp311-none-win32.whl", hash = "sha256:44c7486a4228413c317952e9d89598bcdfb06399735e49e0f8df643e1ccd0558"},
|
||||||
|
{file = "pydantic_core-2.18.4-cp311-none-win_amd64.whl", hash = "sha256:8a7164fe2005d03c64fd3b85649891cd4953a8de53107940bf272500ba8a788b"},
|
||||||
|
{file = "pydantic_core-2.18.4-cp311-none-win_arm64.whl", hash = "sha256:4e99bc050fe65c450344421017f98298a97cefc18c53bb2f7b3531eb39bc7805"},
|
||||||
|
{file = "pydantic_core-2.18.4-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:6f5c4d41b2771c730ea1c34e458e781b18cc668d194958e0112455fff4e402b2"},
|
||||||
|
{file = "pydantic_core-2.18.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2fdf2156aa3d017fddf8aea5adfba9f777db1d6022d392b682d2a8329e087cef"},
|
||||||
|
{file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4748321b5078216070b151d5271ef3e7cc905ab170bbfd27d5c83ee3ec436695"},
|
||||||
|
{file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:847a35c4d58721c5dc3dba599878ebbdfd96784f3fb8bb2c356e123bdcd73f34"},
|
||||||
|
{file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3c40d4eaad41f78e3bbda31b89edc46a3f3dc6e171bf0ecf097ff7a0ffff7cb1"},
|
||||||
|
{file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:21a5e440dbe315ab9825fcd459b8814bb92b27c974cbc23c3e8baa2b76890077"},
|
||||||
|
{file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:01dd777215e2aa86dfd664daed5957704b769e726626393438f9c87690ce78c3"},
|
||||||
|
{file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4b06beb3b3f1479d32befd1f3079cc47b34fa2da62457cdf6c963393340b56e9"},
|
||||||
|
{file = "pydantic_core-2.18.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:564d7922e4b13a16b98772441879fcdcbe82ff50daa622d681dd682175ea918c"},
|
||||||
|
{file = "pydantic_core-2.18.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:0eb2a4f660fcd8e2b1c90ad566db2b98d7f3f4717c64fe0a83e0adb39766d5b8"},
|
||||||
|
{file = "pydantic_core-2.18.4-cp312-none-win32.whl", hash = "sha256:8b8bab4c97248095ae0c4455b5a1cd1cdd96e4e4769306ab19dda135ea4cdb07"},
|
||||||
|
{file = "pydantic_core-2.18.4-cp312-none-win_amd64.whl", hash = "sha256:14601cdb733d741b8958224030e2bfe21a4a881fb3dd6fbb21f071cabd48fa0a"},
|
||||||
|
{file = "pydantic_core-2.18.4-cp312-none-win_arm64.whl", hash = "sha256:c1322d7dd74713dcc157a2b7898a564ab091ca6c58302d5c7b4c07296e3fd00f"},
|
||||||
|
{file = "pydantic_core-2.18.4-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:823be1deb01793da05ecb0484d6c9e20baebb39bd42b5d72636ae9cf8350dbd2"},
|
||||||
|
{file = "pydantic_core-2.18.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ebef0dd9bf9b812bf75bda96743f2a6c5734a02092ae7f721c048d156d5fabae"},
|
||||||
|
{file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ae1d6df168efb88d7d522664693607b80b4080be6750c913eefb77e34c12c71a"},
|
||||||
|
{file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f9899c94762343f2cc2fc64c13e7cae4c3cc65cdfc87dd810a31654c9b7358cc"},
|
||||||
|
{file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99457f184ad90235cfe8461c4d70ab7dd2680e28821c29eca00252ba90308c78"},
|
||||||
|
{file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18f469a3d2a2fdafe99296a87e8a4c37748b5080a26b806a707f25a902c040a8"},
|
||||||
|
{file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b7cdf28938ac6b8b49ae5e92f2735056a7ba99c9b110a474473fd71185c1af5d"},
|
||||||
|
{file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:938cb21650855054dc54dfd9120a851c974f95450f00683399006aa6e8abb057"},
|
||||||
|
{file = "pydantic_core-2.18.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:44cd83ab6a51da80fb5adbd9560e26018e2ac7826f9626bc06ca3dc074cd198b"},
|
||||||
|
{file = "pydantic_core-2.18.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:972658f4a72d02b8abfa2581d92d59f59897d2e9f7e708fdabe922f9087773af"},
|
||||||
|
{file = "pydantic_core-2.18.4-cp38-none-win32.whl", hash = "sha256:1d886dc848e60cb7666f771e406acae54ab279b9f1e4143babc9c2258213daa2"},
|
||||||
|
{file = "pydantic_core-2.18.4-cp38-none-win_amd64.whl", hash = "sha256:bb4462bd43c2460774914b8525f79b00f8f407c945d50881568f294c1d9b4443"},
|
||||||
|
{file = "pydantic_core-2.18.4-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:44a688331d4a4e2129140a8118479443bd6f1905231138971372fcde37e43528"},
|
||||||
|
{file = "pydantic_core-2.18.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a2fdd81edd64342c85ac7cf2753ccae0b79bf2dfa063785503cb85a7d3593223"},
|
||||||
|
{file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:86110d7e1907ab36691f80b33eb2da87d780f4739ae773e5fc83fb272f88825f"},
|
||||||
|
{file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:46387e38bd641b3ee5ce247563b60c5ca098da9c56c75c157a05eaa0933ed154"},
|
||||||
|
{file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:123c3cec203e3f5ac7b000bd82235f1a3eced8665b63d18be751f115588fea30"},
|
||||||
|
{file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dc1803ac5c32ec324c5261c7209e8f8ce88e83254c4e1aebdc8b0a39f9ddb443"},
|
||||||
|
{file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53db086f9f6ab2b4061958d9c276d1dbe3690e8dd727d6abf2321d6cce37fa94"},
|
||||||
|
{file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:abc267fa9837245cc28ea6929f19fa335f3dc330a35d2e45509b6566dc18be23"},
|
||||||
|
{file = "pydantic_core-2.18.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a0d829524aaefdebccb869eed855e2d04c21d2d7479b6cada7ace5448416597b"},
|
||||||
|
{file = "pydantic_core-2.18.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:509daade3b8649f80d4e5ff21aa5673e4ebe58590b25fe42fac5f0f52c6f034a"},
|
||||||
|
{file = "pydantic_core-2.18.4-cp39-none-win32.whl", hash = "sha256:ca26a1e73c48cfc54c4a76ff78df3727b9d9f4ccc8dbee4ae3f73306a591676d"},
|
||||||
|
{file = "pydantic_core-2.18.4-cp39-none-win_amd64.whl", hash = "sha256:c67598100338d5d985db1b3d21f3619ef392e185e71b8d52bceacc4a7771ea7e"},
|
||||||
|
{file = "pydantic_core-2.18.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:574d92eac874f7f4db0ca653514d823a0d22e2354359d0759e3f6a406db5d55d"},
|
||||||
|
{file = "pydantic_core-2.18.4-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:1f4d26ceb5eb9eed4af91bebeae4b06c3fb28966ca3a8fb765208cf6b51102ab"},
|
||||||
|
{file = "pydantic_core-2.18.4-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77450e6d20016ec41f43ca4a6c63e9fdde03f0ae3fe90e7c27bdbeaece8b1ed4"},
|
||||||
|
{file = "pydantic_core-2.18.4-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d323a01da91851a4f17bf592faf46149c9169d68430b3146dcba2bb5e5719abc"},
|
||||||
|
{file = "pydantic_core-2.18.4-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:43d447dd2ae072a0065389092a231283f62d960030ecd27565672bd40746c507"},
|
||||||
|
{file = "pydantic_core-2.18.4-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:578e24f761f3b425834f297b9935e1ce2e30f51400964ce4801002435a1b41ef"},
|
||||||
|
{file = "pydantic_core-2.18.4-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:81b5efb2f126454586d0f40c4d834010979cb80785173d1586df845a632e4e6d"},
|
||||||
|
{file = "pydantic_core-2.18.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ab86ce7c8f9bea87b9d12c7f0af71102acbf5ecbc66c17796cff45dae54ef9a5"},
|
||||||
|
{file = "pydantic_core-2.18.4-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:90afc12421df2b1b4dcc975f814e21bc1754640d502a2fbcc6d41e77af5ec312"},
|
||||||
|
{file = "pydantic_core-2.18.4-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:51991a89639a912c17bef4b45c87bd83593aee0437d8102556af4885811d59f5"},
|
||||||
|
{file = "pydantic_core-2.18.4-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:293afe532740370aba8c060882f7d26cfd00c94cae32fd2e212a3a6e3b7bc15e"},
|
||||||
|
{file = "pydantic_core-2.18.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b48ece5bde2e768197a2d0f6e925f9d7e3e826f0ad2271120f8144a9db18d5c8"},
|
||||||
|
{file = "pydantic_core-2.18.4-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:eae237477a873ab46e8dd748e515c72c0c804fb380fbe6c85533c7de51f23a8f"},
|
||||||
|
{file = "pydantic_core-2.18.4-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:834b5230b5dfc0c1ec37b2fda433b271cbbc0e507560b5d1588e2cc1148cf1ce"},
|
||||||
|
{file = "pydantic_core-2.18.4-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e858ac0a25074ba4bce653f9b5d0a85b7456eaddadc0ce82d3878c22489fa4ee"},
|
||||||
|
{file = "pydantic_core-2.18.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2fd41f6eff4c20778d717af1cc50eca52f5afe7805ee530a4fbd0bae284f16e9"},
|
||||||
|
{file = "pydantic_core-2.18.4.tar.gz", hash = "sha256:ec3beeada09ff865c344ff3bc2f427f5e6c26401cc6113d77e372c3fdac73864"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[package.dependencies]
|
||||||
|
typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pytest"
|
name = "pytest"
|
||||||
version = "7.4.2"
|
version = "7.4.2"
|
||||||
|
@ -546,6 +667,21 @@ files = [
|
||||||
{file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"},
|
{file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"},
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "tenacity"
|
||||||
|
version = "9.0.0"
|
||||||
|
description = "Retry code until it succeeds"
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.8"
|
||||||
|
files = [
|
||||||
|
{file = "tenacity-9.0.0-py3-none-any.whl", hash = "sha256:93de0c98785b27fcf659856aa9f54bfbd399e29969b0621bc7f762bd441b4539"},
|
||||||
|
{file = "tenacity-9.0.0.tar.gz", hash = "sha256:807f37ca97d62aa361264d497b0e31e92b8027044942bfa756160d908320d73b"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[package.extras]
|
||||||
|
doc = ["reno", "sphinx"]
|
||||||
|
test = ["pytest", "tornado (>=4.5)", "typeguard"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "tomli"
|
name = "tomli"
|
||||||
version = "2.0.1"
|
version = "2.0.1"
|
||||||
|
@ -557,6 +693,17 @@ files = [
|
||||||
{file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"},
|
{file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"},
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "typing-extensions"
|
||||||
|
version = "4.12.2"
|
||||||
|
description = "Backported and Experimental Type Hints for Python 3.8+"
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.8"
|
||||||
|
files = [
|
||||||
|
{file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"},
|
||||||
|
{file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"},
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "tzdata"
|
name = "tzdata"
|
||||||
version = "2023.3"
|
version = "2023.3"
|
||||||
|
@ -607,5 +754,5 @@ test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess
|
||||||
|
|
||||||
[metadata]
|
[metadata]
|
||||||
lock-version = "2.0"
|
lock-version = "2.0"
|
||||||
python-versions = ">=3.10,<3.13"
|
python-versions = ">=3.9,<3.13"
|
||||||
content-hash = "371781da268d5f61d6e798c023777f337b620e9b07a48c316825d7b998b63f02"
|
content-hash = "cefc11b1bf5ad99d628f6d08f6f03003522cc1b6e48b519230d99d716a5c165c"
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
[tool.poetry]
|
[tool.poetry]
|
||||||
name = "homeharvest"
|
name = "homeharvest"
|
||||||
version = "0.3.23"
|
version = "0.4.7"
|
||||||
description = "Real estate scraping library"
|
description = "Real estate scraping library"
|
||||||
authors = ["Zachary Hampton <zachary@bunsly.com>", "Cullen Watson <cullen@bunsly.com>"]
|
authors = ["Zachary Hampton <zachary@bunsly.com>", "Cullen Watson <cullen@bunsly.com>"]
|
||||||
homepage = "https://github.com/Bunsly/HomeHarvest"
|
homepage = "https://github.com/Bunsly/HomeHarvest"
|
||||||
|
@ -10,9 +10,11 @@ readme = "README.md"
|
||||||
homeharvest = "homeharvest.cli:main"
|
homeharvest = "homeharvest.cli:main"
|
||||||
|
|
||||||
[tool.poetry.dependencies]
|
[tool.poetry.dependencies]
|
||||||
python = ">=3.10,<3.13"
|
python = ">=3.9,<3.13"
|
||||||
requests = "^2.31.0"
|
requests = "^2.31.0"
|
||||||
pandas = "^2.1.1"
|
pandas = "^2.1.1"
|
||||||
|
pydantic = "^2.7.4"
|
||||||
|
tenacity = "^9.0.0"
|
||||||
|
|
||||||
|
|
||||||
[tool.poetry.group.dev.dependencies]
|
[tool.poetry.group.dev.dependencies]
|
||||||
|
|
|
@ -1,10 +1,11 @@
|
||||||
from homeharvest import scrape_property
|
from homeharvest import scrape_property, Property
|
||||||
|
import pandas as pd
|
||||||
|
|
||||||
|
|
||||||
def test_realtor_pending_or_contingent():
|
def test_realtor_pending_or_contingent():
|
||||||
pending_or_contingent_result = scrape_property(location="Surprise, AZ", listing_type="pending")
|
pending_or_contingent_result = scrape_property(location="Surprise, AZ", listing_type="pending")
|
||||||
|
|
||||||
regular_result = scrape_property(location="Surprise, AZ", listing_type="for_sale")
|
regular_result = scrape_property(location="Surprise, AZ", listing_type="for_sale", exclude_pending=True)
|
||||||
|
|
||||||
assert all([result is not None for result in [pending_or_contingent_result, regular_result]])
|
assert all([result is not None for result in [pending_or_contingent_result, regular_result]])
|
||||||
assert len(pending_or_contingent_result) != len(regular_result)
|
assert len(pending_or_contingent_result) != len(regular_result)
|
||||||
|
@ -105,8 +106,12 @@ def test_realtor():
|
||||||
location="2530 Al Lipscomb Way",
|
location="2530 Al Lipscomb Way",
|
||||||
listing_type="for_sale",
|
listing_type="for_sale",
|
||||||
),
|
),
|
||||||
scrape_property(location="Phoenix, AZ", listing_type="for_rent"), #: does not support "city, state, USA" format
|
scrape_property(
|
||||||
scrape_property(location="Dallas, TX", listing_type="sold"), #: does not support "city, state, USA" format
|
location="Phoenix, AZ", listing_type="for_rent", limit=1000
|
||||||
|
), #: does not support "city, state, USA" format
|
||||||
|
scrape_property(
|
||||||
|
location="Dallas, TX", listing_type="sold", limit=1000
|
||||||
|
), #: does not support "city, state, USA" format
|
||||||
scrape_property(location="85281"),
|
scrape_property(location="85281"),
|
||||||
]
|
]
|
||||||
|
|
||||||
|
@ -114,10 +119,13 @@ def test_realtor():
|
||||||
|
|
||||||
|
|
||||||
def test_realtor_city():
|
def test_realtor_city():
|
||||||
results = scrape_property(
|
results = scrape_property(location="Atlanta, GA", listing_type="for_sale", limit=1000)
|
||||||
location="Atlanta, GA",
|
|
||||||
listing_type="for_sale",
|
assert results is not None and len(results) > 0
|
||||||
)
|
|
||||||
|
|
||||||
|
def test_realtor_land():
|
||||||
|
results = scrape_property(location="Atlanta, GA", listing_type="for_sale", property_type=["land"], limit=1000)
|
||||||
|
|
||||||
assert results is not None and len(results) > 0
|
assert results is not None and len(results) > 0
|
||||||
|
|
||||||
|
@ -127,6 +135,7 @@ def test_realtor_bad_address():
|
||||||
location="abceefg ju098ot498hh9",
|
location="abceefg ju098ot498hh9",
|
||||||
listing_type="for_sale",
|
listing_type="for_sale",
|
||||||
)
|
)
|
||||||
|
|
||||||
if len(bad_results) == 0:
|
if len(bad_results) == 0:
|
||||||
assert True
|
assert True
|
||||||
|
|
||||||
|
@ -140,22 +149,27 @@ def test_realtor_foreclosed():
|
||||||
|
|
||||||
|
|
||||||
def test_realtor_agent():
|
def test_realtor_agent():
|
||||||
scraped = scrape_property(location="Detroit, MI", listing_type="for_sale")
|
scraped = scrape_property(location="Detroit, MI", listing_type="for_sale", limit=1000, extra_property_data=False)
|
||||||
assert scraped["agent"].nunique() > 1
|
assert scraped["agent_name"].nunique() > 1
|
||||||
|
|
||||||
|
|
||||||
def test_realtor_without_extra_details():
|
def test_realtor_without_extra_details():
|
||||||
results = [
|
results = [
|
||||||
scrape_property(
|
scrape_property(
|
||||||
location="15509 N 172nd Dr, Surprise, AZ 85388",
|
location="00741",
|
||||||
|
listing_type="sold",
|
||||||
|
limit=10,
|
||||||
extra_property_data=False,
|
extra_property_data=False,
|
||||||
),
|
),
|
||||||
scrape_property(
|
scrape_property(
|
||||||
location="15509 N 172nd Dr, Surprise, AZ 85388",
|
location="00741",
|
||||||
|
listing_type="sold",
|
||||||
|
limit=10,
|
||||||
|
extra_property_data=True,
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
|
||||||
assert results[0] != results[1]
|
assert not results[0].equals(results[1])
|
||||||
|
|
||||||
|
|
||||||
def test_pr_zip_code():
|
def test_pr_zip_code():
|
||||||
|
@ -165,3 +179,124 @@ def test_pr_zip_code():
|
||||||
)
|
)
|
||||||
|
|
||||||
assert results is not None and len(results) > 0
|
assert results is not None and len(results) > 0
|
||||||
|
|
||||||
|
|
||||||
|
def test_exclude_pending():
|
||||||
|
results = scrape_property(
|
||||||
|
location="33567",
|
||||||
|
listing_type="pending",
|
||||||
|
exclude_pending=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
assert results is not None and len(results) > 0
|
||||||
|
|
||||||
|
|
||||||
|
def test_style_value_error():
|
||||||
|
results = scrape_property(
|
||||||
|
location="Alaska, AK",
|
||||||
|
listing_type="sold",
|
||||||
|
extra_property_data=False,
|
||||||
|
limit=1000,
|
||||||
|
)
|
||||||
|
|
||||||
|
assert results is not None and len(results) > 0
|
||||||
|
|
||||||
|
|
||||||
|
def test_primary_image_error():
|
||||||
|
results = scrape_property(
|
||||||
|
location="Spokane, PA",
|
||||||
|
listing_type="for_rent", # or (for_sale, for_rent, pending)
|
||||||
|
past_days=360,
|
||||||
|
radius=3,
|
||||||
|
extra_property_data=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
assert results is not None and len(results) > 0
|
||||||
|
|
||||||
|
|
||||||
|
def test_limit():
|
||||||
|
over_limit = 876
|
||||||
|
extra_params = {"limit": over_limit}
|
||||||
|
|
||||||
|
over_results = scrape_property(
|
||||||
|
location="Waddell, AZ",
|
||||||
|
listing_type="for_sale",
|
||||||
|
**extra_params,
|
||||||
|
)
|
||||||
|
|
||||||
|
assert over_results is not None and len(over_results) <= over_limit
|
||||||
|
|
||||||
|
under_limit = 1
|
||||||
|
under_results = scrape_property(
|
||||||
|
location="Waddell, AZ",
|
||||||
|
listing_type="for_sale",
|
||||||
|
limit=under_limit,
|
||||||
|
)
|
||||||
|
|
||||||
|
assert under_results is not None and len(under_results) == under_limit
|
||||||
|
|
||||||
|
|
||||||
|
def test_apartment_list_price():
|
||||||
|
results = scrape_property(
|
||||||
|
location="Spokane, WA",
|
||||||
|
listing_type="for_rent", # or (for_sale, for_rent, pending)
|
||||||
|
extra_property_data=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
assert results is not None
|
||||||
|
|
||||||
|
results = results[results["style"] == "APARTMENT"]
|
||||||
|
|
||||||
|
#: get percentage of results with atleast 1 of any column not none, list_price, list_price_min, list_price_max
|
||||||
|
assert (
|
||||||
|
len(results[results[["list_price", "list_price_min", "list_price_max"]].notnull().any(axis=1)]) / len(results)
|
||||||
|
> 0.5
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_builder_exists():
|
||||||
|
listing = scrape_property(
|
||||||
|
location="18149 W Poston Dr, Surprise, AZ 85387",
|
||||||
|
extra_property_data=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
assert listing is not None
|
||||||
|
assert listing["builder_name"].nunique() > 0
|
||||||
|
|
||||||
|
|
||||||
|
def test_phone_number_matching():
|
||||||
|
searches = [
|
||||||
|
scrape_property(
|
||||||
|
location="Phoenix, AZ",
|
||||||
|
listing_type="for_sale",
|
||||||
|
limit=100,
|
||||||
|
),
|
||||||
|
scrape_property(
|
||||||
|
location="Phoenix, AZ",
|
||||||
|
listing_type="for_sale",
|
||||||
|
limit=100,
|
||||||
|
),
|
||||||
|
]
|
||||||
|
|
||||||
|
assert all([search is not None for search in searches])
|
||||||
|
|
||||||
|
#: random row
|
||||||
|
row = searches[0][searches[0]["agent_phones"].notnull()].sample()
|
||||||
|
|
||||||
|
#: find matching row
|
||||||
|
matching_row = searches[1].loc[searches[1]["property_url"] == row["property_url"].values[0]]
|
||||||
|
|
||||||
|
#: assert phone numbers are the same
|
||||||
|
assert row["agent_phones"].values[0] == matching_row["agent_phones"].values[0]
|
||||||
|
|
||||||
|
|
||||||
|
def test_return_type():
|
||||||
|
results = {
|
||||||
|
"pandas": scrape_property(location="Surprise, AZ", listing_type="for_rent", limit=100),
|
||||||
|
"pydantic": scrape_property(location="Surprise, AZ", listing_type="for_rent", limit=100, return_type="pydantic"),
|
||||||
|
"raw": scrape_property(location="Surprise, AZ", listing_type="for_rent", limit=100, return_type="raw"),
|
||||||
|
}
|
||||||
|
|
||||||
|
assert isinstance(results["pandas"], pd.DataFrame)
|
||||||
|
assert isinstance(results["pydantic"][0], Property)
|
||||||
|
assert isinstance(results["raw"][0], dict)
|
||||||
|
|
Loading…
Reference in New Issue