refactor: merge master
commit
4726764482
|
@ -120,7 +120,7 @@ def _scrape_single_site(
|
|||
|
||||
def scrape_property(
|
||||
location: str,
|
||||
site_name: Union[str, list[str]] = list(_scrapers.keys()),
|
||||
site_name: Union[str, list[str]] = None,
|
||||
listing_type: str = "for_sale",
|
||||
) -> pd.DataFrame:
|
||||
"""
|
||||
|
@ -138,14 +138,12 @@ def scrape_property(
|
|||
if not isinstance(site_name, list):
|
||||
site_name = [site_name]
|
||||
|
||||
results = []
|
||||
|
||||
if len(site_name) == 1:
|
||||
final_df = _scrape_single_site(location, site_name[0], listing_type)
|
||||
final_df = final_df.drop_duplicates(
|
||||
subset=["street_address", "city", "unit"], keep="first"
|
||||
)
|
||||
return final_df
|
||||
|
||||
results = []
|
||||
results.append(final_df)
|
||||
else:
|
||||
with ThreadPoolExecutor() as executor:
|
||||
futures = {
|
||||
executor.submit(_scrape_single_site, location, s_name, listing_type): s_name
|
||||
|
@ -158,8 +156,7 @@ def scrape_property(
|
|||
|
||||
if not results:
|
||||
return pd.DataFrame()
|
||||
|
||||
final_df = pd.concat(results, ignore_index=True)
|
||||
final_df = final_df.drop_duplicates(
|
||||
subset=["street_address", "city", "unit"], keep="first"
|
||||
)
|
||||
final_df = final_df.drop_duplicates(subset=["street_address", "city", "unit"], keep="first")
|
||||
return final_df
|
Loading…
Reference in New Issue