HomeHarvest/homeharvest/__init__.py

60 lines
2.4 KiB
Python
Raw Normal View History

import warnings
import pandas as pd
from .core.scrapers import ScraperInput
2023-11-03 16:35:41 -07:00
from .utils import process_result, ordered_properties, validate_input, validate_dates
2023-09-15 20:58:54 -07:00
from .core.scrapers.realtor import RealtorScraper
from .core.scrapers.models import ListingType
2023-09-15 15:17:37 -07:00
def scrape_property(
location: str,
listing_type: str = "for_sale",
radius: float = None,
mls_only: bool = False,
2023-10-04 21:35:14 -07:00
past_days: int = None,
proxy: str = None,
date_from: str = None, #: TODO: Switch to one parameter, Date, with date_from and date_to, pydantic validation
2023-11-03 16:35:41 -07:00
date_to: str = None,
2024-03-03 09:45:28 -08:00
foreclosure: bool = None,
extra_property_data: bool = True,
) -> pd.DataFrame:
2023-09-18 08:37:07 -07:00
"""
Scrape properties from Realtor.com based on a given location and listing type.
:param location: Location to search (e.g. "Dallas, TX", "85281", "2530 Al Lipscomb Way")
:param listing_type: Listing Type (for_sale, for_rent, sold)
:param radius: Get properties within _ (e.g. 1.0) miles. Only applicable for individual addresses.
:param mls_only: If set, fetches only listings with MLS IDs.
:param proxy: Proxy to use for scraping
2023-10-04 21:35:14 -07:00
:param past_days: Get properties sold or listed (dependent on your listing_type) in the last _ days.
2023-11-03 16:35:41 -07:00
:param date_from, date_to: Get properties sold or listed (dependent on your listing_type) between these dates. format: 2021-01-28
:param foreclosure: If set, fetches only foreclosure listings.
:param extra_property_data: Increases requests by O(n). If set, this fetches additional property data (e.g. agent, broker, property evaluations etc.)
2023-09-18 08:37:07 -07:00
"""
validate_input(listing_type)
2023-11-03 16:35:41 -07:00
validate_dates(date_from, date_to)
2023-09-17 16:52:34 -07:00
2023-09-15 15:17:37 -07:00
scraper_input = ScraperInput(
location=location,
listing_type=ListingType[listing_type.upper()],
2023-09-19 13:43:24 -07:00
proxy=proxy,
2023-10-02 13:58:47 -07:00
radius=radius,
mls_only=mls_only,
2023-10-04 21:35:14 -07:00
last_x_days=past_days,
2023-11-03 16:35:41 -07:00
date_from=date_from,
date_to=date_to,
2024-03-03 09:45:28 -08:00
foreclosure=foreclosure,
extra_property_data=extra_property_data,
2023-09-15 15:17:37 -07:00
)
site = RealtorScraper(scraper_input)
2023-09-17 16:30:37 -07:00
results = site.search()
2023-09-15 15:17:37 -07:00
2023-10-03 22:21:16 -07:00
properties_dfs = [process_result(result) for result in results]
2023-09-18 11:38:17 -07:00
if not properties_dfs:
2023-11-03 16:35:41 -07:00
return pd.DataFrame()
with warnings.catch_warnings():
warnings.simplefilter("ignore", category=FutureWarning)
return pd.concat(properties_dfs, ignore_index=True, axis=0)[ordered_properties].replace({"None": "", None: ""})