mirror of
https://github.com/Bunsly/HomeHarvest.git
synced 2026-03-05 03:54:29 -08:00
Compare commits
6 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
8a6ac96db4 | ||
|
|
129ab37dff | ||
|
|
9a0cac650e | ||
|
|
a1c1bcc822 | ||
|
|
6f3faceb27 | ||
|
|
cab0216f29 |
@@ -2,8 +2,6 @@ from __future__ import annotations
|
|||||||
from typing import Union
|
from typing import Union
|
||||||
|
|
||||||
import requests
|
import requests
|
||||||
from requests.adapters import HTTPAdapter
|
|
||||||
from urllib3.util.retry import Retry
|
|
||||||
import uuid
|
import uuid
|
||||||
from ...exceptions import AuthenticationError
|
from ...exceptions import AuthenticationError
|
||||||
from .models import Property, ListingType, SiteName, SearchPropertyType, ReturnType
|
from .models import Property, ListingType, SiteName, SearchPropertyType, ReturnType
|
||||||
@@ -11,6 +9,27 @@ import json
|
|||||||
from pydantic import BaseModel
|
from pydantic import BaseModel
|
||||||
|
|
||||||
|
|
||||||
|
DEFAULT_HEADERS = {
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
'Accept': '*/*',
|
||||||
|
'Accept-Language': 'en-US,en;q=0.9',
|
||||||
|
'Cache-Control': 'no-cache',
|
||||||
|
'Origin': 'https://www.realtor.com',
|
||||||
|
'Pragma': 'no-cache',
|
||||||
|
'Referer': 'https://www.realtor.com/',
|
||||||
|
'rdc-client-name': 'RDC_WEB_SRP_FS_PAGE',
|
||||||
|
'rdc-client-version': '3.0.2515',
|
||||||
|
'sec-ch-ua': '"Google Chrome";v="135", "Not-A.Brand";v="8", "Chromium";v="135"',
|
||||||
|
'sec-ch-ua-mobile': '?0',
|
||||||
|
'sec-ch-ua-platform': '"macOS"',
|
||||||
|
'sec-fetch-dest': 'empty',
|
||||||
|
'sec-fetch-mode': 'cors',
|
||||||
|
'sec-fetch-site': 'same-site',
|
||||||
|
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/135.0.0.0 Safari/537.36',
|
||||||
|
'x-is-bot': 'false',
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
class ScraperInput(BaseModel):
|
class ScraperInput(BaseModel):
|
||||||
location: str
|
location: str
|
||||||
listing_type: ListingType | list[ListingType] | None
|
listing_type: ListingType | list[ListingType] | None
|
||||||
@@ -60,8 +79,6 @@ class ScraperInput(BaseModel):
|
|||||||
|
|
||||||
|
|
||||||
class Scraper:
|
class Scraper:
|
||||||
session = None
|
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
scraper_input: ScraperInput,
|
scraper_input: ScraperInput,
|
||||||
@@ -69,35 +86,8 @@ class Scraper:
|
|||||||
self.location = scraper_input.location
|
self.location = scraper_input.location
|
||||||
self.listing_type = scraper_input.listing_type
|
self.listing_type = scraper_input.listing_type
|
||||||
self.property_type = scraper_input.property_type
|
self.property_type = scraper_input.property_type
|
||||||
|
|
||||||
if not self.session:
|
|
||||||
Scraper.session = requests.Session()
|
|
||||||
retries = Retry(
|
|
||||||
total=3, backoff_factor=4, status_forcelist=[429], allowed_methods=frozenset(["GET", "POST"])
|
|
||||||
)
|
|
||||||
|
|
||||||
adapter = HTTPAdapter(max_retries=retries, pool_connections=10, pool_maxsize=20)
|
|
||||||
Scraper.session.mount("http://", adapter)
|
|
||||||
Scraper.session.mount("https://", adapter)
|
|
||||||
Scraper.session.headers.update(
|
|
||||||
{
|
|
||||||
'Content-Type': 'application/json',
|
|
||||||
'apollographql-client-version': '26.11.1-26.11.1.1106489',
|
|
||||||
'Accept': '*/*',
|
|
||||||
'Accept-Language': 'en-US,en;q=0.9',
|
|
||||||
'rdc-client-version': '26.11.1',
|
|
||||||
'X-APOLLO-OPERATION-TYPE': 'query',
|
|
||||||
'X-APOLLO-OPERATION-ID': 'null',
|
|
||||||
'rdc-client-name': 'RDC_NATIVE_MOBILE-iPhone-com.move.Realtor',
|
|
||||||
'apollographql-client-name': 'com.move.Realtor-apollo-ios',
|
|
||||||
'User-Agent': 'Realtor.com/26.11.1.1106489 CFNetwork/3860.200.71 Darwin/25.1.0',
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
self.proxy = scraper_input.proxy
|
self.proxy = scraper_input.proxy
|
||||||
if self.proxy:
|
self.proxies = {"http": self.proxy, "https": self.proxy} if self.proxy else None
|
||||||
proxies = {"http": self.proxy, "https": self.proxy}
|
|
||||||
self.session.proxies.update(proxies)
|
|
||||||
|
|
||||||
self.listing_type = scraper_input.listing_type
|
self.listing_type = scraper_input.listing_type
|
||||||
self.radius = scraper_input.radius
|
self.radius = scraper_input.radius
|
||||||
@@ -108,7 +98,7 @@ class Scraper:
|
|||||||
self.date_from_precision = scraper_input.date_from_precision
|
self.date_from_precision = scraper_input.date_from_precision
|
||||||
self.date_to_precision = scraper_input.date_to_precision
|
self.date_to_precision = scraper_input.date_to_precision
|
||||||
self.foreclosure = scraper_input.foreclosure
|
self.foreclosure = scraper_input.foreclosure
|
||||||
self.extra_property_data = scraper_input.extra_property_data
|
self.extra_property_data = False # TODO: temporarily disabled
|
||||||
self.exclude_pending = scraper_input.exclude_pending
|
self.exclude_pending = scraper_input.exclude_pending
|
||||||
self.limit = scraper_input.limit
|
self.limit = scraper_input.limit
|
||||||
self.offset = scraper_input.offset
|
self.offset = scraper_input.offset
|
||||||
|
|||||||
@@ -8,27 +8,27 @@ This module implements the scraper for realtor.com
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import json
|
import json
|
||||||
import re
|
import requests
|
||||||
from concurrent.futures import ThreadPoolExecutor, as_completed
|
from concurrent.futures import ThreadPoolExecutor, as_completed
|
||||||
from datetime import datetime
|
|
||||||
from json import JSONDecodeError
|
from json import JSONDecodeError
|
||||||
from typing import Dict, Union
|
from typing import Dict, Union
|
||||||
|
|
||||||
from tenacity import (
|
from tenacity import (
|
||||||
retry,
|
retry,
|
||||||
retry_if_exception_type,
|
retry_if_exception_type,
|
||||||
|
retry_if_not_exception_type,
|
||||||
wait_exponential,
|
wait_exponential,
|
||||||
stop_after_attempt,
|
stop_after_attempt,
|
||||||
)
|
)
|
||||||
|
|
||||||
from .. import Scraper
|
from .. import Scraper, DEFAULT_HEADERS
|
||||||
from ....exceptions import AuthenticationError
|
from ....exceptions import AuthenticationError
|
||||||
from ..models import (
|
from ..models import (
|
||||||
Property,
|
Property,
|
||||||
ListingType,
|
ListingType,
|
||||||
ReturnType
|
ReturnType
|
||||||
)
|
)
|
||||||
from .queries import GENERAL_RESULTS_QUERY, SEARCH_HOMES_DATA, HOMES_DATA, HOME_FRAGMENT, SEARCH_RESULTS_FRAGMENT, LISTING_PHOTOS_FRAGMENT, MORPHEUS_SUGGESTIONS_QUERY
|
from .queries import GENERAL_RESULTS_QUERY, HOMES_DATA, SEARCH_SUGGESTIONS_QUERY
|
||||||
from .processors import (
|
from .processors import (
|
||||||
process_property,
|
process_property,
|
||||||
process_extra_property_details,
|
process_extra_property_details,
|
||||||
@@ -37,7 +37,7 @@ from .processors import (
|
|||||||
|
|
||||||
|
|
||||||
class RealtorScraper(Scraper):
|
class RealtorScraper(Scraper):
|
||||||
SEARCH_GQL_URL = "https://api.frontdoor.realtor.com/graphql"
|
SEARCH_GQL_URL = "https://www.realtor.com/frontdoor/graphql"
|
||||||
NUM_PROPERTY_WORKERS = 20
|
NUM_PROPERTY_WORKERS = 20
|
||||||
DEFAULT_PAGE_SIZE = 200
|
DEFAULT_PAGE_SIZE = 200
|
||||||
|
|
||||||
@@ -52,26 +52,28 @@ class RealtorScraper(Scraper):
|
|||||||
|
|
||||||
def _graphql_post(self, query: str, variables: dict, operation_name: str) -> dict:
|
def _graphql_post(self, query: str, variables: dict, operation_name: str) -> dict:
|
||||||
"""
|
"""
|
||||||
Execute a GraphQL query with operation-specific headers.
|
Execute a GraphQL query.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
query: GraphQL query string (must include operationName matching operation_name param)
|
query: GraphQL query string (must include operationName matching operation_name param)
|
||||||
variables: Query variables dictionary
|
variables: Query variables dictionary
|
||||||
operation_name: Name of the GraphQL operation for Apollo headers
|
operation_name: Name of the GraphQL operation
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
Response JSON dictionary
|
Response JSON dictionary
|
||||||
"""
|
"""
|
||||||
# Set operation-specific header (must match query's operationName)
|
|
||||||
self.session.headers['X-APOLLO-OPERATION-NAME'] = operation_name
|
|
||||||
|
|
||||||
payload = {
|
payload = {
|
||||||
"operationName": operation_name, # Include in payload
|
"operationName": operation_name,
|
||||||
"query": self._minify_query(query),
|
"query": self._minify_query(query),
|
||||||
"variables": variables,
|
"variables": variables,
|
||||||
}
|
}
|
||||||
|
|
||||||
response = self.session.post(self.SEARCH_GQL_URL, data=json.dumps(payload, separators=(',', ':')))
|
response = requests.post(
|
||||||
|
self.SEARCH_GQL_URL,
|
||||||
|
headers=DEFAULT_HEADERS,
|
||||||
|
data=json.dumps(payload, separators=(',', ':')),
|
||||||
|
proxies=self.proxies
|
||||||
|
)
|
||||||
|
|
||||||
if response.status_code == 403:
|
if response.status_code == 403:
|
||||||
if not self.proxy:
|
if not self.proxy:
|
||||||
@@ -96,7 +98,7 @@ class RealtorScraper(Scraper):
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
response_json = self._graphql_post(MORPHEUS_SUGGESTIONS_QUERY, variables, "GetMorpheusSuggestions")
|
response_json = self._graphql_post(SEARCH_SUGGESTIONS_QUERY, variables, "Search_suggestions")
|
||||||
|
|
||||||
if (
|
if (
|
||||||
response_json is None
|
response_json is None
|
||||||
@@ -128,6 +130,11 @@ class RealtorScraper(Scraper):
|
|||||||
}
|
}
|
||||||
|
|
||||||
if geo.get("area_type") == "address":
|
if geo.get("area_type") == "address":
|
||||||
|
# Try to get mpr_id directly from API response first
|
||||||
|
if geo.get("mpr_id"):
|
||||||
|
result["mpr_id"] = geo.get("mpr_id")
|
||||||
|
else:
|
||||||
|
# Fallback: extract from _id field if it has addr: prefix
|
||||||
geo_id = geo.get("_id", "")
|
geo_id = geo.get("_id", "")
|
||||||
if geo_id.startswith("addr:"):
|
if geo_id.startswith("addr:"):
|
||||||
result["mpr_id"] = geo_id.replace("addr:", "")
|
result["mpr_id"] = geo_id.replace("addr:", "")
|
||||||
@@ -168,13 +175,10 @@ class RealtorScraper(Scraper):
|
|||||||
def handle_home(self, property_id: str) -> list[Property]:
|
def handle_home(self, property_id: str) -> list[Property]:
|
||||||
"""Fetch single home with proper error handling."""
|
"""Fetch single home with proper error handling."""
|
||||||
query = (
|
query = (
|
||||||
"""%s
|
"""query GetHomeDetails($property_id: ID!) {
|
||||||
query GetHomeDetails($property_id: ID!) {
|
home(property_id: $property_id) %s
|
||||||
home(property_id: $property_id) {
|
|
||||||
...HomeDetailsFragment
|
|
||||||
}
|
|
||||||
}"""
|
}"""
|
||||||
% HOME_FRAGMENT
|
% HOMES_DATA
|
||||||
)
|
)
|
||||||
|
|
||||||
variables = {"property_id": property_id}
|
variables = {"property_id": property_id}
|
||||||
@@ -421,9 +425,7 @@ class RealtorScraper(Scraper):
|
|||||||
limit: 200
|
limit: 200
|
||||||
offset: $offset
|
offset: $offset
|
||||||
) %s
|
) %s
|
||||||
}
|
}""" % (
|
||||||
%s
|
|
||||||
%s""" % (
|
|
||||||
is_foreclosure,
|
is_foreclosure,
|
||||||
status_param,
|
status_param,
|
||||||
date_param,
|
date_param,
|
||||||
@@ -432,13 +434,11 @@ class RealtorScraper(Scraper):
|
|||||||
pending_or_contingent_param,
|
pending_or_contingent_param,
|
||||||
sort_param,
|
sort_param,
|
||||||
GENERAL_RESULTS_QUERY,
|
GENERAL_RESULTS_QUERY,
|
||||||
SEARCH_RESULTS_FRAGMENT,
|
|
||||||
LISTING_PHOTOS_FRAGMENT,
|
|
||||||
)
|
)
|
||||||
elif search_type == "area": #: general search, came from a general location
|
elif search_type == "area": #: general search, came from a general location
|
||||||
query = """query GetHomeSearch(
|
query = """query GetHomeSearch(
|
||||||
$search_location: SearchLocation,
|
$search_location: SearchLocation,
|
||||||
$offset: Int,
|
$offset: Int
|
||||||
) {
|
) {
|
||||||
homeSearch: home_search(
|
homeSearch: home_search(
|
||||||
query: {
|
query: {
|
||||||
@@ -455,9 +455,7 @@ class RealtorScraper(Scraper):
|
|||||||
limit: 200
|
limit: 200
|
||||||
offset: $offset
|
offset: $offset
|
||||||
) %s
|
) %s
|
||||||
}
|
}""" % (
|
||||||
%s
|
|
||||||
%s""" % (
|
|
||||||
is_foreclosure,
|
is_foreclosure,
|
||||||
status_param,
|
status_param,
|
||||||
date_param,
|
date_param,
|
||||||
@@ -467,8 +465,6 @@ class RealtorScraper(Scraper):
|
|||||||
bucket_param,
|
bucket_param,
|
||||||
sort_param,
|
sort_param,
|
||||||
GENERAL_RESULTS_QUERY,
|
GENERAL_RESULTS_QUERY,
|
||||||
SEARCH_RESULTS_FRAGMENT,
|
|
||||||
LISTING_PHOTOS_FRAGMENT,
|
|
||||||
)
|
)
|
||||||
else: #: general search, came from an address
|
else: #: general search, came from an address
|
||||||
query = (
|
query = (
|
||||||
@@ -483,10 +479,8 @@ class RealtorScraper(Scraper):
|
|||||||
limit: 1
|
limit: 1
|
||||||
offset: $offset
|
offset: $offset
|
||||||
) %s
|
) %s
|
||||||
}
|
}"""
|
||||||
%s
|
% GENERAL_RESULTS_QUERY
|
||||||
%s"""
|
|
||||||
% (GENERAL_RESULTS_QUERY, SEARCH_RESULTS_FRAGMENT, LISTING_PHOTOS_FRAGMENT)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
response_json = self._graphql_post(query, variables, "GetHomeSearch")
|
response_json = self._graphql_post(query, variables, "GetHomeSearch")
|
||||||
@@ -1110,7 +1104,7 @@ class RealtorScraper(Scraper):
|
|||||||
|
|
||||||
|
|
||||||
@retry(
|
@retry(
|
||||||
retry=retry_if_exception_type((JSONDecodeError, Exception)),
|
retry=retry_if_exception_type((JSONDecodeError, Exception)) & retry_if_not_exception_type(AuthenticationError),
|
||||||
wait=wait_exponential(multiplier=1, min=1, max=10),
|
wait=wait_exponential(multiplier=1, min=1, max=10),
|
||||||
stop=stop_after_attempt(3),
|
stop=stop_after_attempt(3),
|
||||||
)
|
)
|
||||||
@@ -1125,16 +1119,14 @@ class RealtorScraper(Scraper):
|
|||||||
property_ids = list(set(property_ids))
|
property_ids = list(set(property_ids))
|
||||||
|
|
||||||
fragments = "\n".join(
|
fragments = "\n".join(
|
||||||
f'home_{property_id}: home(property_id: {property_id}) {{ ...HomeDetailsFragment }}'
|
f'home_{property_id}: home(property_id: {property_id}) {HOMES_DATA}'
|
||||||
for property_id in property_ids
|
for property_id in property_ids
|
||||||
)
|
)
|
||||||
query = f"""{HOME_FRAGMENT}
|
query = f"""query GetHome {{
|
||||||
|
|
||||||
query GetHomeDetails {{
|
|
||||||
{fragments}
|
{fragments}
|
||||||
}}"""
|
}}"""
|
||||||
|
|
||||||
data = self._graphql_post(query, {}, "GetHomeDetails")
|
data = self._graphql_post(query, {}, "GetHome")
|
||||||
|
|
||||||
if "data" not in data or data["data"] is None:
|
if "data" not in data or data["data"] is None:
|
||||||
# If we got a 400 error with "Required parameter is missing", raise to trigger retry
|
# If we got a 400 error with "Required parameter is missing", raise to trigger retry
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
SEARCH_RESULTS_FRAGMENT = """
|
SEARCH_RESULTS_FRAGMENT = """
|
||||||
fragment SearchFragment on SearchHome {
|
fragment PropertyResult on SearchHome {
|
||||||
__typename
|
__typename
|
||||||
pending_date
|
pending_date
|
||||||
listing_id
|
listing_id
|
||||||
@@ -371,7 +371,7 @@ _SEARCH_HOMES_DATA_BASE = """{
|
|||||||
|
|
||||||
|
|
||||||
HOME_FRAGMENT = """
|
HOME_FRAGMENT = """
|
||||||
fragment HomeDetailsFragment on Home {
|
fragment PropertyResult on Home {
|
||||||
__typename
|
__typename
|
||||||
pending_date
|
pending_date
|
||||||
listing_id
|
listing_id
|
||||||
@@ -689,12 +689,8 @@ GENERAL_RESULTS_QUERY = """{
|
|||||||
__typename
|
__typename
|
||||||
count
|
count
|
||||||
total
|
total
|
||||||
results {
|
results %s
|
||||||
__typename
|
}""" % SEARCH_HOMES_DATA
|
||||||
...SearchFragment
|
|
||||||
...ListingPhotosFragment
|
|
||||||
}
|
|
||||||
}"""
|
|
||||||
|
|
||||||
LISTING_PHOTOS_FRAGMENT = """
|
LISTING_PHOTOS_FRAGMENT = """
|
||||||
fragment ListingPhotosFragment on SearchHome {
|
fragment ListingPhotosFragment on SearchHome {
|
||||||
@@ -712,4 +708,98 @@ fragment ListingPhotosFragment on SearchHome {
|
|||||||
}
|
}
|
||||||
"""
|
"""
|
||||||
|
|
||||||
MORPHEUS_SUGGESTIONS_QUERY = """query GetMorpheusSuggestions($searchInput: SearchSuggestionsInput!) { search_suggestions(search_input: $searchInput) { __typename geo_results { __typename type text geo { __typename _id _score mpr_id area_type city state_code postal_code country lat lon county counties { __typename name fips state_code } slug_id geo_id score name city_slug_id centroid { __typename lat lon } county_needed_for_uniq street line school school_id school_district school_district_id has_catchment university university_id neighborhood park } } no_matches has_results filter_criteria { __typename property_type { __typename type } price { __typename min max pattern } bed { __typename min max pattern } bath { __typename min max pattern } feature_tags { __typename tags } listing_status { __typename new_construction existing_homes foreclosures recently_sold fifty_five_plus open_house hide_new_construction hide_existing_homes hide_foreclosures hide_recently_sold hide_fifty_five_plus hide_open_house virtual_tour three_d_tour contingent hide_contingent pending hide_pending } keyword { __typename keywords } garage { __typename min max pattern } age { __typename min max pattern } stories { __typename min max pattern } lot_size { __typename min max pattern } square_feet { __typename min max pattern } home_size { __typename min max pattern } basement finished_basement pool waterfront fireplace detached_garage expand { __typename radius } hoa { __typename type fee } } message_data { __typename property_type pool waterfront fireplace basement finished_basement detached_garage listing_status { __typename new_construction existing_homes foreclosures recently_sold fifty_five_plus open_house hide_new_construction hide_existing_homes hide_foreclosures hide_recently_sold hide_fifty_five_plus hide_open_house } keywords price { __typename min max pattern } bed { __typename min max pattern } bath { __typename min max pattern } garage { __typename min max pattern } stories { __typename min max pattern } age { __typename min max pattern } lot_size { __typename min max pattern } square_feet { __typename min max pattern } } original_string morpheus_context } }"""
|
SEARCH_SUGGESTIONS_QUERY = """query Search_suggestions($searchInput: SearchSuggestionsInput!) {
|
||||||
|
search_suggestions(search_input: $searchInput) {
|
||||||
|
raw_input_parser_result
|
||||||
|
typeahead_results {
|
||||||
|
display_string
|
||||||
|
display_geo
|
||||||
|
geo {
|
||||||
|
_id
|
||||||
|
_score
|
||||||
|
mpr_id
|
||||||
|
area_type
|
||||||
|
city
|
||||||
|
state_code
|
||||||
|
state
|
||||||
|
postal_code
|
||||||
|
country
|
||||||
|
lat
|
||||||
|
lon
|
||||||
|
county
|
||||||
|
counties {
|
||||||
|
name
|
||||||
|
fips
|
||||||
|
state_code
|
||||||
|
}
|
||||||
|
slug_id
|
||||||
|
geo_id
|
||||||
|
score
|
||||||
|
name
|
||||||
|
city_slug_id
|
||||||
|
centroid {
|
||||||
|
lat
|
||||||
|
lon
|
||||||
|
}
|
||||||
|
county_needed_for_uniq
|
||||||
|
street
|
||||||
|
line
|
||||||
|
school
|
||||||
|
school_id
|
||||||
|
school_district
|
||||||
|
has_catchment
|
||||||
|
university
|
||||||
|
university_id
|
||||||
|
neighborhood
|
||||||
|
park
|
||||||
|
}
|
||||||
|
url
|
||||||
|
}
|
||||||
|
geo_results {
|
||||||
|
type
|
||||||
|
text
|
||||||
|
geo {
|
||||||
|
_id
|
||||||
|
_score
|
||||||
|
mpr_id
|
||||||
|
area_type
|
||||||
|
city
|
||||||
|
state_code
|
||||||
|
state
|
||||||
|
postal_code
|
||||||
|
country
|
||||||
|
lat
|
||||||
|
lon
|
||||||
|
county
|
||||||
|
counties {
|
||||||
|
name
|
||||||
|
fips
|
||||||
|
state_code
|
||||||
|
}
|
||||||
|
slug_id
|
||||||
|
geo_id
|
||||||
|
score
|
||||||
|
name
|
||||||
|
city_slug_id
|
||||||
|
centroid {
|
||||||
|
lat
|
||||||
|
lon
|
||||||
|
}
|
||||||
|
county_needed_for_uniq
|
||||||
|
street
|
||||||
|
line
|
||||||
|
school
|
||||||
|
school_id
|
||||||
|
school_district
|
||||||
|
has_catchment
|
||||||
|
university
|
||||||
|
university_id
|
||||||
|
neighborhood
|
||||||
|
park
|
||||||
|
}
|
||||||
|
}
|
||||||
|
no_matches
|
||||||
|
has_results
|
||||||
|
original_string
|
||||||
|
}
|
||||||
|
}"""
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
[tool.poetry]
|
[tool.poetry]
|
||||||
name = "homeharvest"
|
name = "homeharvest"
|
||||||
version = "0.8.12"
|
version = "0.8.18"
|
||||||
description = "Real estate scraping library"
|
description = "Real estate scraping library"
|
||||||
authors = ["Zachary Hampton <zachary@bunsly.com>", "Cullen Watson <cullen@bunsly.com>"]
|
authors = ["Zachary Hampton <zachary@bunsly.com>", "Cullen Watson <cullen@bunsly.com>"]
|
||||||
homepage = "https://github.com/ZacharyHampton/HomeHarvest"
|
homepage = "https://github.com/ZacharyHampton/HomeHarvest"
|
||||||
|
|||||||
Reference in New Issue
Block a user