diff --git a/ApartmentHunter.py b/ApartmentHunter.py index 3618861..66c3ebc 100644 --- a/ApartmentHunter.py +++ b/ApartmentHunter.py @@ -9,7 +9,7 @@ from selenium.webdriver.support.ui import WebDriverWait #list of Cities Cities =["Dallas","Fort Worth","Arlington","Plano","Irving","Denton","Richardson","Grapevine"] -St="tx" +state="tx" baseurl="https://www.apartments.com/" #baseURL search ="/2-bedrooms-under-1500" #search terms search_dict = { @@ -75,13 +75,14 @@ def genSearch(city, state, search_dict): driver = webdriver.Chrome() wait = WebDriverWait(driver, 20) -url = baseurl+'Dallas'+"-"+St+search +url = baseurl+'Dallas'+"-"+state+search driver.get(url) src = driver.page_source soup = BeautifulSoup(src, 'html.parser') for city in Cities: - url = baseurl+city+"-"+St+search + url= genSearch(city, state, search_dict) + #url = baseurl+city+"-"+St+search driver.get(url) src = driver.page_source soup = BeautifulSoup(src, 'html.parser') @@ -91,7 +92,6 @@ for city in Cities: ApartmentBeds = soup.find_all('p', class_='property-beds') ApartmentInfo= soup.find_all('div', class_="property-information") #finds all div elements with class of property-information and its children ApartmentLink = [] #used to collect all href links - print(city, len(Apartment)) for x in range(0,len(ApartmentInfo)): #go through all the div elements ApartmentLink.append(ApartmentInfo[x].find_all('a')[0]['href']) # find all the a elements in each div and appends its href value to ApartmentLink for apartment, address, link in zip(Apartment, ApartmentAddr, ApartmentLink): @@ -99,5 +99,5 @@ for city in Cities: Name = apartment.contents[0] Addr = address.contents[0] Web = link - #print(Name + ", ", Addr + ", ", Web) + print(Name + ", ", Addr + ", ", Web)