mirror of
https://github.com/Bunsly/HomeHarvest.git
synced 2026-03-05 03:54:29 -08:00
Compare commits
3 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c2f01df1ad | ||
|
|
9b61a89c77 | ||
|
|
7065f8a0d4 |
15
README.md
15
README.md
@@ -94,6 +94,17 @@ properties = scrape_property(
|
|||||||
)
|
)
|
||||||
```
|
```
|
||||||
|
|
||||||
|
#### Pagination Control
|
||||||
|
```py
|
||||||
|
# Sequential mode with early termination (more efficient for narrow filters)
|
||||||
|
properties = scrape_property(
|
||||||
|
location="Los Angeles, CA",
|
||||||
|
listing_type="for_sale",
|
||||||
|
updated_in_past_hours=2, # Narrow time window
|
||||||
|
parallel=False # Fetch pages sequentially, stop when filters no longer match
|
||||||
|
)
|
||||||
|
```
|
||||||
|
|
||||||
## Output
|
## Output
|
||||||
```plaintext
|
```plaintext
|
||||||
>>> properties.head()
|
>>> properties.head()
|
||||||
@@ -234,7 +245,9 @@ Optional
|
|||||||
│
|
│
|
||||||
├── limit (integer): Limit the number of properties to fetch. Max & default is 10000.
|
├── limit (integer): Limit the number of properties to fetch. Max & default is 10000.
|
||||||
│
|
│
|
||||||
└── offset (integer): Starting position for pagination within the 10k limit. Use with limit to fetch results in chunks.
|
├── offset (integer): Starting position for pagination within the 10k limit. Use with limit to fetch results in chunks.
|
||||||
|
│
|
||||||
|
└── parallel (True/False): Controls pagination strategy. Default is True (fetch pages in parallel for speed). Set to False for sequential fetching with early termination (useful for rate limiting or narrow time windows).
|
||||||
```
|
```
|
||||||
|
|
||||||
### Property Schema
|
### Property Schema
|
||||||
|
|||||||
@@ -48,6 +48,8 @@ def scrape_property(
|
|||||||
# New sorting parameters
|
# New sorting parameters
|
||||||
sort_by: str = None,
|
sort_by: str = None,
|
||||||
sort_direction: str = "desc",
|
sort_direction: str = "desc",
|
||||||
|
# Pagination control
|
||||||
|
parallel: bool = True,
|
||||||
) -> Union[pd.DataFrame, list[dict], list[Property]]:
|
) -> Union[pd.DataFrame, list[dict], list[Property]]:
|
||||||
"""
|
"""
|
||||||
Scrape properties from Realtor.com based on a given location and listing type.
|
Scrape properties from Realtor.com based on a given location and listing type.
|
||||||
@@ -72,6 +74,8 @@ def scrape_property(
|
|||||||
- date objects: date(2025, 1, 20) (day-level precision)
|
- date objects: date(2025, 1, 20) (day-level precision)
|
||||||
- datetime objects: datetime(2025, 1, 20, 14, 30) (hour-level precision)
|
- datetime objects: datetime(2025, 1, 20, 14, 30) (hour-level precision)
|
||||||
The precision is automatically detected based on the input format.
|
The precision is automatically detected based on the input format.
|
||||||
|
Timezone handling: Naive datetimes are treated as local time and automatically converted to UTC.
|
||||||
|
Timezone-aware datetimes are converted to UTC. For best results, use timezone-aware datetimes.
|
||||||
:param foreclosure: If set, fetches only foreclosure listings.
|
:param foreclosure: If set, fetches only foreclosure listings.
|
||||||
:param extra_property_data: Increases requests by O(n). If set, this fetches additional property data (e.g. agent, broker, property evaluations etc.)
|
:param extra_property_data: Increases requests by O(n). If set, this fetches additional property data (e.g. agent, broker, property evaluations etc.)
|
||||||
:param exclude_pending: If true, this excludes pending or contingent properties from the results, unless listing type is pending.
|
:param exclude_pending: If true, this excludes pending or contingent properties from the results, unless listing type is pending.
|
||||||
@@ -80,7 +84,11 @@ def scrape_property(
|
|||||||
|
|
||||||
New parameters:
|
New parameters:
|
||||||
:param past_hours: Get properties in the last _ hours (requires client-side filtering). Accepts int or timedelta.
|
:param past_hours: Get properties in the last _ hours (requires client-side filtering). Accepts int or timedelta.
|
||||||
:param updated_since: Filter by last_update_date (when property was last updated). Accepts datetime object or ISO 8601 string (client-side filtering)
|
:param updated_since: Filter by last_update_date (when property was last updated). Accepts datetime object or ISO 8601 string (client-side filtering).
|
||||||
|
Timezone handling: Naive datetimes (like datetime.now()) are treated as local time and automatically converted to UTC.
|
||||||
|
Timezone-aware datetimes are converted to UTC. Examples:
|
||||||
|
- datetime.now() - uses your local timezone
|
||||||
|
- datetime.now(timezone.utc) - uses UTC explicitly
|
||||||
:param updated_in_past_hours: Filter by properties updated in the last _ hours. Accepts int or timedelta (client-side filtering)
|
:param updated_in_past_hours: Filter by properties updated in the last _ hours. Accepts int or timedelta (client-side filtering)
|
||||||
:param beds_min, beds_max: Filter by number of bedrooms
|
:param beds_min, beds_max: Filter by number of bedrooms
|
||||||
:param baths_min, baths_max: Filter by number of bathrooms
|
:param baths_min, baths_max: Filter by number of bathrooms
|
||||||
@@ -90,6 +98,9 @@ def scrape_property(
|
|||||||
:param year_built_min, year_built_max: Filter by year built
|
:param year_built_min, year_built_max: Filter by year built
|
||||||
:param sort_by: Sort results by field (list_date, sold_date, list_price, sqft, beds, baths, last_update_date)
|
:param sort_by: Sort results by field (list_date, sold_date, list_price, sqft, beds, baths, last_update_date)
|
||||||
:param sort_direction: Sort direction (asc, desc)
|
:param sort_direction: Sort direction (asc, desc)
|
||||||
|
:param parallel: Controls pagination strategy. True (default) = fetch all pages in parallel for maximum speed.
|
||||||
|
False = fetch pages sequentially with early termination checks (useful for rate limiting or narrow time windows).
|
||||||
|
Sequential mode will stop paginating as soon as time-based filters indicate no more matches are possible.
|
||||||
|
|
||||||
Note: past_days and past_hours also accept timedelta objects for more Pythonic usage.
|
Note: past_days and past_hours also accept timedelta objects for more Pythonic usage.
|
||||||
"""
|
"""
|
||||||
@@ -129,6 +140,22 @@ def scrape_property(
|
|||||||
converted_updated_since = convert_to_datetime_string(updated_since)
|
converted_updated_since = convert_to_datetime_string(updated_since)
|
||||||
converted_updated_in_past_hours = extract_timedelta_hours(updated_in_past_hours)
|
converted_updated_in_past_hours = extract_timedelta_hours(updated_in_past_hours)
|
||||||
|
|
||||||
|
# Auto-apply optimal sort for time-based filters (unless user specified different sort)
|
||||||
|
if (converted_updated_since or converted_updated_in_past_hours) and not sort_by:
|
||||||
|
sort_by = "last_update_date"
|
||||||
|
if not sort_direction:
|
||||||
|
sort_direction = "desc" # Most recent first
|
||||||
|
|
||||||
|
# Auto-apply optimal sort for PENDING listings with date filters
|
||||||
|
# PENDING API filtering is broken, so we rely on client-side filtering
|
||||||
|
# Sorting by pending_date ensures efficient pagination with early termination
|
||||||
|
elif (converted_listing_type == ListingType.PENDING and
|
||||||
|
(converted_past_days or converted_past_hours or converted_date_from) and
|
||||||
|
not sort_by):
|
||||||
|
sort_by = "pending_date"
|
||||||
|
if not sort_direction:
|
||||||
|
sort_direction = "desc" # Most recent first
|
||||||
|
|
||||||
scraper_input = ScraperInput(
|
scraper_input = ScraperInput(
|
||||||
location=location,
|
location=location,
|
||||||
listing_type=converted_listing_type,
|
listing_type=converted_listing_type,
|
||||||
@@ -168,6 +195,8 @@ def scrape_property(
|
|||||||
# New sorting
|
# New sorting
|
||||||
sort_by=sort_by,
|
sort_by=sort_by,
|
||||||
sort_direction=sort_direction,
|
sort_direction=sort_direction,
|
||||||
|
# Pagination control
|
||||||
|
parallel=parallel,
|
||||||
)
|
)
|
||||||
|
|
||||||
site = RealtorScraper(scraper_input)
|
site = RealtorScraper(scraper_input)
|
||||||
|
|||||||
@@ -55,6 +55,9 @@ class ScraperInput(BaseModel):
|
|||||||
sort_by: str | None = None
|
sort_by: str | None = None
|
||||||
sort_direction: str = "desc"
|
sort_direction: str = "desc"
|
||||||
|
|
||||||
|
# Pagination control
|
||||||
|
parallel: bool = True
|
||||||
|
|
||||||
|
|
||||||
class Scraper:
|
class Scraper:
|
||||||
session = None
|
session = None
|
||||||
@@ -141,6 +144,9 @@ class Scraper:
|
|||||||
self.sort_by = scraper_input.sort_by
|
self.sort_by = scraper_input.sort_by
|
||||||
self.sort_direction = scraper_input.sort_direction
|
self.sort_direction = scraper_input.sort_direction
|
||||||
|
|
||||||
|
# Pagination control
|
||||||
|
self.parallel = scraper_input.parallel
|
||||||
|
|
||||||
def search(self) -> list[Union[Property | dict]]: ...
|
def search(self) -> list[Union[Property | dict]]: ...
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
|
|||||||
@@ -526,31 +526,49 @@ class RealtorScraper(Scraper):
|
|||||||
total = result["total"]
|
total = result["total"]
|
||||||
homes = result["properties"]
|
homes = result["properties"]
|
||||||
|
|
||||||
with ThreadPoolExecutor() as executor:
|
# Fetch remaining pages based on parallel parameter
|
||||||
# Store futures with their offsets to maintain proper sort order
|
if self.offset + self.DEFAULT_PAGE_SIZE < min(total, self.offset + self.limit):
|
||||||
# Start from offset + page_size and go up to offset + limit
|
if self.parallel:
|
||||||
futures_with_offsets = [
|
# Parallel mode: Fetch all remaining pages in parallel
|
||||||
(i, executor.submit(
|
with ThreadPoolExecutor() as executor:
|
||||||
self.general_search,
|
futures_with_offsets = [
|
||||||
variables=search_variables | {"offset": i},
|
(i, executor.submit(
|
||||||
search_type=search_type,
|
self.general_search,
|
||||||
))
|
variables=search_variables | {"offset": i},
|
||||||
for i in range(
|
search_type=search_type,
|
||||||
|
))
|
||||||
|
for i in range(
|
||||||
|
self.offset + self.DEFAULT_PAGE_SIZE,
|
||||||
|
min(total, self.offset + self.limit),
|
||||||
|
self.DEFAULT_PAGE_SIZE,
|
||||||
|
)
|
||||||
|
]
|
||||||
|
|
||||||
|
# Collect results and sort by offset to preserve API sort order
|
||||||
|
results = []
|
||||||
|
for offset, future in futures_with_offsets:
|
||||||
|
results.append((offset, future.result()["properties"]))
|
||||||
|
|
||||||
|
results.sort(key=lambda x: x[0])
|
||||||
|
for offset, properties in results:
|
||||||
|
homes.extend(properties)
|
||||||
|
else:
|
||||||
|
# Sequential mode: Fetch pages one by one with early termination checks
|
||||||
|
for current_offset in range(
|
||||||
self.offset + self.DEFAULT_PAGE_SIZE,
|
self.offset + self.DEFAULT_PAGE_SIZE,
|
||||||
min(total, self.offset + self.limit),
|
min(total, self.offset + self.limit),
|
||||||
self.DEFAULT_PAGE_SIZE,
|
self.DEFAULT_PAGE_SIZE,
|
||||||
)
|
):
|
||||||
]
|
# Check if we should continue based on time-based filters
|
||||||
|
if not self._should_fetch_more_pages(homes):
|
||||||
|
break
|
||||||
|
|
||||||
# Collect results and sort by offset to preserve API sort order across pages
|
result = self.general_search(
|
||||||
results = []
|
variables=search_variables | {"offset": current_offset},
|
||||||
for offset, future in futures_with_offsets:
|
search_type=search_type,
|
||||||
results.append((offset, future.result()["properties"]))
|
)
|
||||||
|
page_properties = result["properties"]
|
||||||
# Sort by offset and concatenate in correct order
|
homes.extend(page_properties)
|
||||||
results.sort(key=lambda x: x[0])
|
|
||||||
for offset, properties in results:
|
|
||||||
homes.extend(properties)
|
|
||||||
|
|
||||||
# Apply client-side hour-based filtering if needed
|
# Apply client-side hour-based filtering if needed
|
||||||
# (API only supports day-level filtering, so we post-filter for hour precision)
|
# (API only supports day-level filtering, so we post-filter for hour precision)
|
||||||
@@ -747,13 +765,14 @@ class RealtorScraper(Scraper):
|
|||||||
if not homes:
|
if not homes:
|
||||||
return homes
|
return homes
|
||||||
|
|
||||||
from datetime import datetime, timedelta
|
from datetime import datetime, timedelta, timezone
|
||||||
|
|
||||||
# Determine date range for last_update_date filtering
|
# Determine date range for last_update_date filtering
|
||||||
date_range = None
|
date_range = None
|
||||||
|
|
||||||
if self.updated_in_past_hours:
|
if self.updated_in_past_hours:
|
||||||
cutoff_datetime = datetime.now() - timedelta(hours=self.updated_in_past_hours)
|
# Use UTC now, strip timezone to match naive property dates
|
||||||
|
cutoff_datetime = (datetime.now(timezone.utc) - timedelta(hours=self.updated_in_past_hours)).replace(tzinfo=None)
|
||||||
date_range = {'type': 'since', 'date': cutoff_datetime}
|
date_range = {'type': 'since', 'date': cutoff_datetime}
|
||||||
elif self.updated_since:
|
elif self.updated_since:
|
||||||
try:
|
try:
|
||||||
@@ -784,15 +803,19 @@ class RealtorScraper(Scraper):
|
|||||||
|
|
||||||
def _get_date_range(self):
|
def _get_date_range(self):
|
||||||
"""Get the date range for filtering based on instance parameters."""
|
"""Get the date range for filtering based on instance parameters."""
|
||||||
from datetime import datetime, timedelta
|
from datetime import datetime, timedelta, timezone
|
||||||
|
|
||||||
if self.last_x_days:
|
if self.last_x_days:
|
||||||
cutoff_date = datetime.now() - timedelta(days=self.last_x_days)
|
# Use UTC now, strip timezone to match naive property dates
|
||||||
|
cutoff_date = (datetime.now(timezone.utc) - timedelta(days=self.last_x_days)).replace(tzinfo=None)
|
||||||
return {'type': 'since', 'date': cutoff_date}
|
return {'type': 'since', 'date': cutoff_date}
|
||||||
elif self.date_from and self.date_to:
|
elif self.date_from and self.date_to:
|
||||||
try:
|
try:
|
||||||
from_date = datetime.fromisoformat(self.date_from)
|
# Parse and strip timezone to match naive property dates
|
||||||
to_date = datetime.fromisoformat(self.date_to)
|
from_date_str = self.date_from.replace('Z', '+00:00') if self.date_from.endswith('Z') else self.date_from
|
||||||
|
to_date_str = self.date_to.replace('Z', '+00:00') if self.date_to.endswith('Z') else self.date_to
|
||||||
|
from_date = datetime.fromisoformat(from_date_str).replace(tzinfo=None)
|
||||||
|
to_date = datetime.fromisoformat(to_date_str).replace(tzinfo=None)
|
||||||
return {'type': 'range', 'from_date': from_date, 'to_date': to_date}
|
return {'type': 'range', 'from_date': from_date, 'to_date': to_date}
|
||||||
except ValueError:
|
except ValueError:
|
||||||
return None
|
return None
|
||||||
@@ -844,6 +867,74 @@ class RealtorScraper(Scraper):
|
|||||||
return date_range['from_date'] <= date_obj <= date_range['to_date']
|
return date_range['from_date'] <= date_obj <= date_range['to_date']
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
def _should_fetch_more_pages(self, first_page):
|
||||||
|
"""Determine if we should continue pagination based on first page results.
|
||||||
|
|
||||||
|
This optimization prevents unnecessary API calls when using time-based filters
|
||||||
|
with date sorting. If the last property on page 1 is already outside the time
|
||||||
|
window, all future pages will also be outside (due to sort order).
|
||||||
|
|
||||||
|
Args:
|
||||||
|
first_page: List of properties from the first page
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool: True if we should continue pagination, False to stop early
|
||||||
|
"""
|
||||||
|
from datetime import datetime, timedelta, timezone
|
||||||
|
|
||||||
|
# Check for last_update_date filters
|
||||||
|
if (self.updated_since or self.updated_in_past_hours) and self.sort_by == "last_update_date":
|
||||||
|
if not first_page:
|
||||||
|
return False
|
||||||
|
|
||||||
|
last_property = first_page[-1]
|
||||||
|
last_date = self._extract_date_from_home(last_property, 'last_update_date')
|
||||||
|
|
||||||
|
if not last_date:
|
||||||
|
return True
|
||||||
|
|
||||||
|
# Build date range for last_update_date filter
|
||||||
|
if self.updated_since:
|
||||||
|
try:
|
||||||
|
cutoff_datetime = datetime.fromisoformat(self.updated_since.replace('Z', '+00:00') if self.updated_since.endswith('Z') else self.updated_since)
|
||||||
|
# Strip timezone to match naive datetimes from _parse_date_value
|
||||||
|
cutoff_datetime = cutoff_datetime.replace(tzinfo=None)
|
||||||
|
date_range = {'type': 'since', 'date': cutoff_datetime}
|
||||||
|
except ValueError:
|
||||||
|
return True
|
||||||
|
elif self.updated_in_past_hours:
|
||||||
|
# Use UTC now, strip timezone to match naive property dates
|
||||||
|
cutoff_datetime = (datetime.now(timezone.utc) - timedelta(hours=self.updated_in_past_hours)).replace(tzinfo=None)
|
||||||
|
date_range = {'type': 'since', 'date': cutoff_datetime}
|
||||||
|
else:
|
||||||
|
return True
|
||||||
|
|
||||||
|
return self._is_datetime_in_range(last_date, date_range)
|
||||||
|
|
||||||
|
# Check for PENDING date filters
|
||||||
|
if (self.listing_type == ListingType.PENDING and
|
||||||
|
(self.last_x_days or self.past_hours or self.date_from) and
|
||||||
|
self.sort_by == "pending_date"):
|
||||||
|
|
||||||
|
if not first_page:
|
||||||
|
return False
|
||||||
|
|
||||||
|
last_property = first_page[-1]
|
||||||
|
last_date = self._extract_date_from_home(last_property, 'pending_date')
|
||||||
|
|
||||||
|
if not last_date:
|
||||||
|
return True
|
||||||
|
|
||||||
|
# Build date range for pending date filter
|
||||||
|
date_range = self._get_date_range()
|
||||||
|
if not date_range:
|
||||||
|
return True
|
||||||
|
|
||||||
|
return self._is_datetime_in_range(last_date, date_range)
|
||||||
|
|
||||||
|
# No optimization applicable, continue pagination
|
||||||
|
return True
|
||||||
|
|
||||||
def _apply_sort(self, homes):
|
def _apply_sort(self, homes):
|
||||||
"""Apply client-side sorting to ensure results are properly ordered.
|
"""Apply client-side sorting to ensure results are properly ordered.
|
||||||
|
|
||||||
@@ -862,6 +953,8 @@ class RealtorScraper(Scraper):
|
|||||||
|
|
||||||
def get_sort_key(home):
|
def get_sort_key(home):
|
||||||
"""Extract the sort field value from a home (handles both dict and Property object)."""
|
"""Extract the sort field value from a home (handles both dict and Property object)."""
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
if isinstance(home, dict):
|
if isinstance(home, dict):
|
||||||
value = home.get(self.sort_by)
|
value = home.get(self.sort_by)
|
||||||
else:
|
else:
|
||||||
@@ -877,20 +970,23 @@ class RealtorScraper(Scraper):
|
|||||||
if self.sort_by in ['list_date', 'sold_date', 'pending_date', 'last_update_date']:
|
if self.sort_by in ['list_date', 'sold_date', 'pending_date', 'last_update_date']:
|
||||||
if isinstance(value, str):
|
if isinstance(value, str):
|
||||||
try:
|
try:
|
||||||
from datetime import datetime
|
|
||||||
# Handle timezone indicators
|
# Handle timezone indicators
|
||||||
date_value = value
|
date_value = value
|
||||||
if date_value.endswith('Z'):
|
if date_value.endswith('Z'):
|
||||||
date_value = date_value[:-1] + '+00:00'
|
date_value = date_value[:-1] + '+00:00'
|
||||||
parsed_date = datetime.fromisoformat(date_value)
|
parsed_date = datetime.fromisoformat(date_value)
|
||||||
return (0, parsed_date)
|
# Normalize to timezone-naive for consistent comparison
|
||||||
|
return 0, parsed_date.replace(tzinfo=None)
|
||||||
except (ValueError, AttributeError):
|
except (ValueError, AttributeError):
|
||||||
# If parsing fails, treat as None
|
# If parsing fails, treat as None
|
||||||
return (1, 0) if self.sort_direction == "desc" else (1, float('inf'))
|
return (1, 0) if self.sort_direction == "desc" else (1, float('inf'))
|
||||||
return (0, value)
|
# Handle datetime objects directly (normalize timezone)
|
||||||
|
if isinstance(value, datetime):
|
||||||
|
return 0, value.replace(tzinfo=None)
|
||||||
|
return 0, value
|
||||||
|
|
||||||
# For numeric fields, ensure we can compare
|
# For numeric fields, ensure we can compare
|
||||||
return (0, value)
|
return 0, value
|
||||||
|
|
||||||
# Sort the homes
|
# Sort the homes
|
||||||
reverse = (self.sort_direction == "desc")
|
reverse = (self.sort_direction == "desc")
|
||||||
|
|||||||
@@ -331,15 +331,26 @@ def validate_sort(sort_by: str | None, sort_direction: str | None = "desc") -> N
|
|||||||
|
|
||||||
def convert_to_datetime_string(value) -> str | None:
|
def convert_to_datetime_string(value) -> str | None:
|
||||||
"""
|
"""
|
||||||
Convert datetime object or string to ISO 8601 string format.
|
Convert datetime object or string to ISO 8601 string format with UTC timezone.
|
||||||
|
|
||||||
Accepts:
|
Accepts:
|
||||||
- datetime.datetime objects
|
- datetime.datetime objects (naive or timezone-aware)
|
||||||
- datetime.date objects
|
- Naive datetimes are treated as local time and converted to UTC
|
||||||
|
- Timezone-aware datetimes are converted to UTC
|
||||||
|
- datetime.date objects (treated as midnight UTC)
|
||||||
- ISO 8601 strings (returned as-is)
|
- ISO 8601 strings (returned as-is)
|
||||||
- None (returns None)
|
- None (returns None)
|
||||||
|
|
||||||
Returns ISO 8601 formatted string or None.
|
Returns ISO 8601 formatted string with UTC timezone or None.
|
||||||
|
|
||||||
|
Examples:
|
||||||
|
>>> # Naive datetime (treated as local time)
|
||||||
|
>>> convert_to_datetime_string(datetime(2025, 1, 20, 14, 30))
|
||||||
|
'2025-01-20T22:30:00+00:00' # Assuming PST (UTC-8)
|
||||||
|
|
||||||
|
>>> # Timezone-aware datetime
|
||||||
|
>>> convert_to_datetime_string(datetime(2025, 1, 20, 14, 30, tzinfo=timezone.utc))
|
||||||
|
'2025-01-20T14:30:00+00:00'
|
||||||
"""
|
"""
|
||||||
if value is None:
|
if value is None:
|
||||||
return None
|
return None
|
||||||
@@ -349,13 +360,23 @@ def convert_to_datetime_string(value) -> str | None:
|
|||||||
return value
|
return value
|
||||||
|
|
||||||
# datetime.datetime object
|
# datetime.datetime object
|
||||||
from datetime import datetime, date
|
from datetime import datetime, date, timezone
|
||||||
if isinstance(value, datetime):
|
if isinstance(value, datetime):
|
||||||
return value.isoformat()
|
# Handle naive datetime - treat as local time and convert to UTC
|
||||||
|
if value.tzinfo is None:
|
||||||
|
# Convert naive datetime to aware local time, then to UTC
|
||||||
|
local_aware = value.astimezone()
|
||||||
|
utc_aware = local_aware.astimezone(timezone.utc)
|
||||||
|
return utc_aware.isoformat()
|
||||||
|
else:
|
||||||
|
# Already timezone-aware, convert to UTC
|
||||||
|
utc_aware = value.astimezone(timezone.utc)
|
||||||
|
return utc_aware.isoformat()
|
||||||
|
|
||||||
# datetime.date object (convert to datetime at midnight)
|
# datetime.date object (convert to datetime at midnight UTC)
|
||||||
if isinstance(value, date):
|
if isinstance(value, date):
|
||||||
return datetime.combine(value, datetime.min.time()).isoformat()
|
utc_datetime = datetime.combine(value, datetime.min.time()).replace(tzinfo=timezone.utc)
|
||||||
|
return utc_datetime.isoformat()
|
||||||
|
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
f"Invalid datetime value. Expected datetime object, date object, or ISO 8601 string. "
|
f"Invalid datetime value. Expected datetime object, date object, or ISO 8601 string. "
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
[tool.poetry]
|
[tool.poetry]
|
||||||
name = "homeharvest"
|
name = "homeharvest"
|
||||||
version = "0.8.1"
|
version = "0.8.4"
|
||||||
description = "Real estate scraping library"
|
description = "Real estate scraping library"
|
||||||
authors = ["Zachary Hampton <zachary@bunsly.com>", "Cullen Watson <cullen@bunsly.com>"]
|
authors = ["Zachary Hampton <zachary@bunsly.com>", "Cullen Watson <cullen@bunsly.com>"]
|
||||||
homepage = "https://github.com/ZacharyHampton/HomeHarvest"
|
homepage = "https://github.com/ZacharyHampton/HomeHarvest"
|
||||||
|
|||||||
@@ -1,3 +1,5 @@
|
|||||||
|
import pytz
|
||||||
|
|
||||||
from homeharvest import scrape_property, Property
|
from homeharvest import scrape_property, Property
|
||||||
import pandas as pd
|
import pandas as pd
|
||||||
|
|
||||||
@@ -1357,4 +1359,238 @@ def test_combined_filters_with_raw_data():
|
|||||||
mls_id = source.get('id') if source else None
|
mls_id = source.get('id') if source else None
|
||||||
|
|
||||||
assert mls_id is not None and mls_id != "", \
|
assert mls_id is not None and mls_id != "", \
|
||||||
f"Property {prop.get('property_id')} should have an MLS ID (source.id)"
|
f"Property {prop.get('property_id')} should have an MLS ID (source.id)"
|
||||||
|
|
||||||
|
|
||||||
|
def test_updated_since_filtering():
|
||||||
|
"""Test the updated_since parameter for filtering by last_update_date"""
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
|
||||||
|
# Test 1: Filter by last update in past 10 minutes (user's example)
|
||||||
|
cutoff_time = datetime.now() - timedelta(minutes=10)
|
||||||
|
result_10min = scrape_property(
|
||||||
|
location="California",
|
||||||
|
updated_since=cutoff_time,
|
||||||
|
sort_by="last_update_date",
|
||||||
|
sort_direction="desc",
|
||||||
|
limit=100
|
||||||
|
)
|
||||||
|
|
||||||
|
assert result_10min is not None
|
||||||
|
print(f"\n10-minute window returned {len(result_10min)} properties")
|
||||||
|
|
||||||
|
# Test 2: Verify all results have last_update_date within range
|
||||||
|
if len(result_10min) > 0:
|
||||||
|
for idx in range(min(10, len(result_10min))):
|
||||||
|
update_date_str = result_10min.iloc[idx]["last_update_date"]
|
||||||
|
if pd.notna(update_date_str):
|
||||||
|
try:
|
||||||
|
# Handle timezone-aware datetime strings
|
||||||
|
date_str = str(update_date_str)
|
||||||
|
if '+' in date_str or date_str.endswith('Z'):
|
||||||
|
# Remove timezone for comparison with naive cutoff_time
|
||||||
|
date_str = date_str.replace('+00:00', '').replace('Z', '')
|
||||||
|
update_date = datetime.strptime(date_str, "%Y-%m-%d %H:%M:%S")
|
||||||
|
|
||||||
|
assert update_date >= cutoff_time, \
|
||||||
|
f"Property last_update_date {update_date} should be >= {cutoff_time}"
|
||||||
|
print(f"Property {idx}: last_update_date = {update_date} (valid)")
|
||||||
|
except (ValueError, TypeError) as e:
|
||||||
|
print(f"Warning: Could not parse date {update_date_str}: {e}")
|
||||||
|
|
||||||
|
# Test 3: Compare different time windows
|
||||||
|
result_1hour = scrape_property(
|
||||||
|
location="California",
|
||||||
|
updated_since=datetime.now() - timedelta(hours=1),
|
||||||
|
limit=50
|
||||||
|
)
|
||||||
|
|
||||||
|
result_24hours = scrape_property(
|
||||||
|
location="California",
|
||||||
|
updated_since=datetime.now() - timedelta(hours=24),
|
||||||
|
limit=50
|
||||||
|
)
|
||||||
|
|
||||||
|
print(f"1-hour window: {len(result_1hour)} properties")
|
||||||
|
print(f"24-hour window: {len(result_24hours)} properties")
|
||||||
|
|
||||||
|
# Longer time window should return same or more results
|
||||||
|
if len(result_1hour) > 0 and len(result_24hours) > 0:
|
||||||
|
assert len(result_1hour) <= len(result_24hours), \
|
||||||
|
"1-hour filter should return <= 24-hour results"
|
||||||
|
|
||||||
|
# Test 4: Verify sorting works with filtering
|
||||||
|
if len(result_10min) > 1:
|
||||||
|
# Get non-null dates
|
||||||
|
dates = []
|
||||||
|
for idx in range(len(result_10min)):
|
||||||
|
date_str = result_10min.iloc[idx]["last_update_date"]
|
||||||
|
if pd.notna(date_str):
|
||||||
|
try:
|
||||||
|
# Handle timezone-aware datetime strings
|
||||||
|
clean_date_str = str(date_str)
|
||||||
|
if '+' in clean_date_str or clean_date_str.endswith('Z'):
|
||||||
|
clean_date_str = clean_date_str.replace('+00:00', '').replace('Z', '')
|
||||||
|
dates.append(datetime.strptime(clean_date_str, "%Y-%m-%d %H:%M:%S"))
|
||||||
|
except (ValueError, TypeError):
|
||||||
|
pass
|
||||||
|
|
||||||
|
if len(dates) > 1:
|
||||||
|
# Check if sorted descending
|
||||||
|
for i in range(len(dates) - 1):
|
||||||
|
assert dates[i] >= dates[i + 1], \
|
||||||
|
f"Results should be sorted by last_update_date descending: {dates[i]} >= {dates[i+1]}"
|
||||||
|
|
||||||
|
|
||||||
|
def test_updated_since_optimization():
|
||||||
|
"""Test that updated_since optimization works (auto-sort + early termination)"""
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
import time
|
||||||
|
|
||||||
|
# Test 1: Verify auto-sort is applied when using updated_since without explicit sort
|
||||||
|
start_time = time.time()
|
||||||
|
result = scrape_property(
|
||||||
|
location="California",
|
||||||
|
updated_since=datetime.now() - timedelta(minutes=5),
|
||||||
|
# NO sort_by specified - should auto-apply sort_by="last_update_date"
|
||||||
|
limit=50
|
||||||
|
)
|
||||||
|
elapsed_time = time.time() - start_time
|
||||||
|
|
||||||
|
print(f"\nAuto-sort test: {len(result)} properties in {elapsed_time:.2f}s")
|
||||||
|
|
||||||
|
# Should complete quickly due to early termination optimization (<5 seconds)
|
||||||
|
assert elapsed_time < 5.0, f"Query should be fast with optimization, took {elapsed_time:.2f}s"
|
||||||
|
|
||||||
|
# Verify results are sorted by last_update_date (proving auto-sort worked)
|
||||||
|
if len(result) > 1:
|
||||||
|
dates = []
|
||||||
|
for idx in range(min(10, len(result))):
|
||||||
|
date_str = result.iloc[idx]["last_update_date"]
|
||||||
|
if pd.notna(date_str):
|
||||||
|
try:
|
||||||
|
clean_date_str = str(date_str)
|
||||||
|
if '+' in clean_date_str or clean_date_str.endswith('Z'):
|
||||||
|
clean_date_str = clean_date_str.replace('+00:00', '').replace('Z', '')
|
||||||
|
dates.append(datetime.strptime(clean_date_str, "%Y-%m-%d %H:%M:%S"))
|
||||||
|
except (ValueError, TypeError):
|
||||||
|
pass
|
||||||
|
|
||||||
|
if len(dates) > 1:
|
||||||
|
# Verify descending order (most recent first)
|
||||||
|
for i in range(len(dates) - 1):
|
||||||
|
assert dates[i] >= dates[i + 1], \
|
||||||
|
"Auto-applied sort should order by last_update_date descending"
|
||||||
|
|
||||||
|
print("Auto-sort optimization verified ✓")
|
||||||
|
|
||||||
|
|
||||||
|
def test_pending_date_optimization():
|
||||||
|
"""Test that PENDING + date filters get auto-sort and early termination"""
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
import time
|
||||||
|
|
||||||
|
# Test: Verify auto-sort is applied for PENDING with past_days
|
||||||
|
start_time = time.time()
|
||||||
|
result = scrape_property(
|
||||||
|
location="California",
|
||||||
|
listing_type="pending",
|
||||||
|
past_days=7,
|
||||||
|
# NO sort_by specified - should auto-apply sort_by="pending_date"
|
||||||
|
limit=50
|
||||||
|
)
|
||||||
|
elapsed_time = time.time() - start_time
|
||||||
|
|
||||||
|
print(f"\nPENDING auto-sort test: {len(result)} properties in {elapsed_time:.2f}s")
|
||||||
|
|
||||||
|
# Should complete quickly due to optimization (<10 seconds)
|
||||||
|
assert elapsed_time < 10.0, f"PENDING query should be fast with optimization, took {elapsed_time:.2f}s"
|
||||||
|
|
||||||
|
# Verify results are sorted by pending_date (proving auto-sort worked)
|
||||||
|
if len(result) > 1:
|
||||||
|
dates = []
|
||||||
|
for idx in range(min(10, len(result))):
|
||||||
|
date_str = result.iloc[idx]["pending_date"]
|
||||||
|
if pd.notna(date_str):
|
||||||
|
try:
|
||||||
|
clean_date_str = str(date_str)
|
||||||
|
if '+' in clean_date_str or clean_date_str.endswith('Z'):
|
||||||
|
clean_date_str = clean_date_str.replace('+00:00', '').replace('Z', '')
|
||||||
|
dates.append(datetime.strptime(clean_date_str, "%Y-%m-%d %H:%M:%S"))
|
||||||
|
except (ValueError, TypeError):
|
||||||
|
pass
|
||||||
|
|
||||||
|
if len(dates) > 1:
|
||||||
|
# Verify descending order (most recent first)
|
||||||
|
for i in range(len(dates) - 1):
|
||||||
|
assert dates[i] >= dates[i + 1], \
|
||||||
|
"PENDING auto-applied sort should order by pending_date descending"
|
||||||
|
|
||||||
|
print("PENDING optimization verified ✓")
|
||||||
|
|
||||||
|
|
||||||
|
def test_basic_last_update_date():
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
|
||||||
|
# Test with naive datetime (treated as local time)
|
||||||
|
now = datetime.now()
|
||||||
|
|
||||||
|
properties = scrape_property(
|
||||||
|
"California",
|
||||||
|
updated_since=now - timedelta(minutes=10),
|
||||||
|
sort_by="last_update_date",
|
||||||
|
sort_direction="desc"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Convert now to timezone-aware for comparison with UTC dates in DataFrame
|
||||||
|
now_utc = now.astimezone(tz=pytz.timezone("UTC"))
|
||||||
|
|
||||||
|
# Check all last_update_date values are <= now
|
||||||
|
assert (properties["last_update_date"] <= now_utc).all()
|
||||||
|
|
||||||
|
# Verify we got some results
|
||||||
|
assert len(properties) > 0
|
||||||
|
|
||||||
|
|
||||||
|
def test_timezone_aware_last_update_date():
|
||||||
|
"""Test that timezone-aware datetimes work correctly for updated_since"""
|
||||||
|
from datetime import datetime, timedelta, timezone
|
||||||
|
|
||||||
|
# Test with timezone-aware datetime (explicit UTC)
|
||||||
|
now_utc = datetime.now(timezone.utc)
|
||||||
|
|
||||||
|
properties = scrape_property(
|
||||||
|
"California",
|
||||||
|
updated_since=now_utc - timedelta(minutes=10),
|
||||||
|
sort_by="last_update_date",
|
||||||
|
sort_direction="desc"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Check all last_update_date values are <= now
|
||||||
|
assert (properties["last_update_date"] <= now_utc).all()
|
||||||
|
|
||||||
|
# Verify we got some results
|
||||||
|
assert len(properties) > 0
|
||||||
|
|
||||||
|
|
||||||
|
def test_timezone_handling_date_range():
|
||||||
|
"""Test timezone handling for date_from and date_to parameters"""
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
|
||||||
|
# Test with naive datetimes for date range (PENDING properties)
|
||||||
|
now = datetime.now()
|
||||||
|
three_days_ago = now - timedelta(days=3)
|
||||||
|
|
||||||
|
properties = scrape_property(
|
||||||
|
"California",
|
||||||
|
listing_type="pending",
|
||||||
|
date_from=three_days_ago,
|
||||||
|
date_to=now
|
||||||
|
)
|
||||||
|
|
||||||
|
# Verify we got results and they're within the date range
|
||||||
|
if len(properties) > 0:
|
||||||
|
# Convert now to UTC for comparison
|
||||||
|
now_utc = now.astimezone(tz=pytz.timezone("UTC"))
|
||||||
|
assert (properties["pending_date"] <= now_utc).all()
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user