chore: version

pull/43/head
Cullen 2023-11-24 13:41:46 -06:00
parent a74c1a9950
commit 6dd0b058d3
5 changed files with 75 additions and 1 deletions

View File

@ -34,6 +34,8 @@ class Address:
@dataclass
class Description:
primary_photo: str | None = None
alt_photos: list[str] | None = None
style: str | None = None
beds: int | None = None
baths_full: int | None = None

View File

@ -84,6 +84,12 @@ class RealtorScraper(Scraper):
garage
permalink
}
primary_photo {
href
}
photos {
href
}
}
}"""
@ -152,6 +158,8 @@ class RealtorScraper(Scraper):
else None,
address=self._parse_address(property_info, search_type="handle_listing"),
description=Description(
primary_photo=property_info["primary_photo"].get("href", "").replace("s.jpg", "od-w480_h360_x2.webp?w=1080&q=75"),
alt_photos=self.process_alt_photos(property_info.get("photos", [])),
style=property_info["basic"].get("type", "").upper(),
beds=property_info["basic"].get("beds"),
baths_full=property_info["basic"].get("baths_full"),
@ -247,6 +255,12 @@ class RealtorScraper(Scraper):
units
year_built
}
primary_photo {
href
}
photos {
href
}
}
}"""
@ -334,6 +348,12 @@ class RealtorScraper(Scraper):
name
}
}
primary_photo {
href
}
photos {
href
}
}
}
}"""
@ -621,6 +641,7 @@ class RealtorScraper(Scraper):
@staticmethod
def _parse_description(result: dict) -> Description:
description_data = result.get("description", {})
if description_data is None or not isinstance(description_data, dict):
@ -630,7 +651,16 @@ class RealtorScraper(Scraper):
if style is not None:
style = style.upper()
primary_photo = ""
if result and "primary_photo" in result:
primary_photo_info = result["primary_photo"]
if primary_photo_info and "href" in primary_photo_info:
primary_photo_href = primary_photo_info["href"]
primary_photo = primary_photo_href.replace("s.jpg", "od-w480_h360_x2.webp?w=1080&q=75")
return Description(
primary_photo=primary_photo,
alt_photos=RealtorScraper.process_alt_photos(result.get("photos")),
style=style,
beds=description_data.get("beds"),
baths_full=description_data.get("baths_full"),
@ -643,6 +673,7 @@ class RealtorScraper(Scraper):
stories=description_data.get("stories"),
)
@staticmethod
def calculate_days_on_mls(result: dict) -> Optional[int]:
list_date_str = result.get("list_date")
@ -661,3 +692,16 @@ class RealtorScraper(Scraper):
days = (today - list_date).days
if days >= 0:
return days
@staticmethod
def process_alt_photos(photos_info):
try:
alt_photos = []
if photos_info:
for photo_info in photos_info:
href = photo_info.get("href", "")
alt_photo_href = href.replace("s.jpg", "od-w480_h360_x2.webp?w=1080&q=75")
alt_photos.append(alt_photo_href)
return alt_photos
except Exception:
pass

View File

@ -5,6 +5,8 @@ from .exceptions import InvalidListingType, InvalidDate
ordered_properties = [
"property_url",
"primary_photo",
"alt_photos",
"mls",
"mls_id",
"status",
@ -49,6 +51,8 @@ def process_result(result: Property) -> pd.DataFrame:
prop_data["price_per_sqft"] = prop_data["prc_sqft"]
description = result.description
prop_data["primary_photo"] = description.primary_photo
prop_data["alt_photos"] = ", ".join(description.alt_photos)
prop_data["style"] = description.style
prop_data["beds"] = description.beds
prop_data["full_baths"] = description.baths_full

View File

@ -1,6 +1,6 @@
[tool.poetry]
name = "homeharvest"
version = "0.3.9"
version = "0.3.10"
description = "Real estate scraping library supporting Zillow, Realtor.com & Redfin."
authors = ["Zachary Hampton <zachary@zacharysproducts.com>", "Cullen Watson <cullen@cullen.ai>"]
homepage = "https://github.com/Bunsly/HomeHarvest"

24
test.py
View File

@ -0,0 +1,24 @@
from homeharvest import scrape_property
from datetime import datetime
# Generate filename based on current timestamp
current_timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
filename = f"HomeHarvest_{current_timestamp}.csv"
properties = scrape_property(
location="8134 midway rd dallas tx 75209",
listing_type="pending", # or (for_sale, for_rent, pending)
radius=0.5
# past_days=30, # sold in last 30 days - listed in last 30 days if (for_sale, for_rent)
# date_from="2023-05-01", # alternative to past_days
# date_to="2023-05-28",
# mls_only=True, # only fetch MLS listings
# proxy="http://user:pass@host:port" # use a proxy to change your IP address
)
print(f"Number of properties: {len(properties)}")
# Export to csv
properties.to_csv(filename, index=False)
print(properties.head())