- rename to property

pull/1/head
Zachary Hampton 2023-09-16 10:11:39 -07:00
parent 4764b6bd37
commit a772fe45aa
5 changed files with 10 additions and 10 deletions

View File

@ -1,6 +1,6 @@
from .core.scrapers.redfin import RedfinScraper from .core.scrapers.redfin import RedfinScraper
from .core.scrapers.realtor import RealtorScraper from .core.scrapers.realtor import RealtorScraper
from .core.scrapers.types import ListingType, Home from .core.scrapers.types import ListingType, Property
from .core.scrapers import ScraperInput from .core.scrapers import ScraperInput
from .exceptions import InvalidSite, InvalidListingType from .exceptions import InvalidSite, InvalidListingType
@ -15,7 +15,7 @@ def scrape_property(
location: str, location: str,
site_name: str, site_name: str,
listing_type: str = "for_sale", #: for_sale, for_rent, sold listing_type: str = "for_sale", #: for_sale, for_rent, sold
) -> list[Home]: #: eventually, return pandas dataframe ) -> list[Property]: #: eventually, return pandas dataframe
if site_name.lower() not in _scrapers: if site_name.lower() not in _scrapers:
raise InvalidSite(f"Provided site, '{site_name}', does not exist.") raise InvalidSite(f"Provided site, '{site_name}', does not exist.")

View File

@ -1,6 +1,6 @@
from dataclasses import dataclass from dataclasses import dataclass
import requests import requests
from .types import Home, ListingType from .types import Property, ListingType
@dataclass @dataclass
@ -21,9 +21,9 @@ class Scraper:
"https": scraper_input.proxy_url, "https": scraper_input.proxy_url,
} }
def search(self) -> list[Home]: ... def search(self) -> list[Property]: ...
@staticmethod @staticmethod
def parse_home(home) -> Home: ... def parse_home(home) -> Property: ...
def handle_location(self): ... def handle_location(self): ...

View File

@ -1,5 +1,5 @@
import json import json
from ..types import Home, Address from ..types import Property, Address
from .. import Scraper from .. import Scraper
from typing import Any from typing import Any

View File

@ -1,5 +1,5 @@
import json import json
from ..types import Home, Address from ..types import Property, Address
from .. import Scraper from .. import Scraper
from typing import Any from typing import Any
@ -20,7 +20,7 @@ class RedfinScraper(Scraper):
return response_json['payload']['sections'][0]['rows'][0].split('_')[1] return response_json['payload']['sections'][0]['rows'][0].split('_')[1]
@staticmethod @staticmethod
def parse_home(home: dict) -> Home: def parse_home(home: dict) -> Property:
address = Address( address = Address(
address_one=home['streetLine']['value'], address_one=home['streetLine']['value'],
city=home['city'], city=home['city'],
@ -34,7 +34,7 @@ class RedfinScraper(Scraper):
if key in home and 'value' in home[key]: if key in home and 'value' in home[key]:
return home[key]['value'] return home[key]['value']
return Home( return Property(
address=address, address=address,
url=url, url=url,
beds=home['beds'] if 'beds' in home else None, beds=home['beds'] if 'beds' in home else None,

View File

@ -19,7 +19,7 @@ class Address:
@dataclass @dataclass
class Home: class Property:
address: Address address: Address
url: str url: str