From 0b9248d092922e399ce6878cc39cbef4f472ce20 Mon Sep 17 00:00:00 2001 From: Ben Menchaca Date: Tue, 8 May 2018 15:09:15 +0000 Subject: [PATCH] Polling for stats now works properly. --- icomfort3-scraper/lcc_zone.py | 2 +- icomfort3-scraper/session.py | 23 ++++++++++------------- icomfort3-scraper/test.py | 5 +++-- 3 files changed, 14 insertions(+), 16 deletions(-) diff --git a/icomfort3-scraper/lcc_zone.py b/icomfort3-scraper/lcc_zone.py index 17e8278..2b7d28f 100644 --- a/icomfort3-scraper/lcc_zone.py +++ b/icomfort3-scraper/lcc_zone.py @@ -7,7 +7,7 @@ try: from urllib.parse import urlencode, urlunsplit except ImportError: - from urlparse import urlunsplit + from urlparse import urlunsplit, urlparse from urllib import urlencode import requests diff --git a/icomfort3-scraper/session.py b/icomfort3-scraper/session.py index f5a0cdf..ddb129f 100644 --- a/icomfort3-scraper/session.py +++ b/icomfort3-scraper/session.py @@ -8,9 +8,9 @@ import requests try: - from urllib.parse import urlencode, urlunsplit + from urllib.parse import urlencode, urlunsplit, urlparse, parse_qs except ImportError: - from urlparse import urlunsplit + from urlparse import urlunsplit, urlparse, parse_qs from urllib import urlencode import requests @@ -148,9 +148,6 @@ def request_json(self, url, referer_url=''): if referer_url: header_dict['Referer'] = referer_url response = self.session.get(url, headers=header_dict) - print("URL was: %s" % response.request.url) - print("Request Headers: %s" % response.request.headers) - print("Content Type: %s" % response.headers['content-type']) if response.headers['content-type'] == 'text/html; charset=utf-8': print("Response is HTML.") html_soup = BeautifulSoup(response.content, "lxml") @@ -175,7 +172,6 @@ def request_json(self, url, referer_url=''): print(response_json) self.login_complete = False return False - print(response_json) return response_json @@ -191,21 +187,22 @@ def __initialize_session(self): # machine so that polling can start. # First, pull the list of homes my_homes_url = IComfort3Session.create_url('Dashboard/MyHomes') - my_homes = self.session.get(myhomes_url) + my_homes = self.session.get(my_homes_url) my_homes_soup = BeautifulSoup(my_homes.content, "lxml") home_lists = my_homes_soup.findAll('ul', {'class': 'HomeZones'}) for home in home_lists: self.homes[home.get("data-homeid")] = [] # Iterate over the list of homes, and pull all of the LCC_ID/Zones for home in self.homes.keys(): - home_zones_query = {'homeID': home_id} - home_zones = self.session.get(home_zones_url, params=home_zones_query) - home_zones_soup = BeautifulSoup(home_zones.content, "lxml"); - hrefs = home_zones_soup.findAll('a', href=True) + zones_url = IComfort3Session.create_url('Dashboard/GetHomeZones') + zones_query = {'homeID': home} + zones = self.session.get(zones_url, params=zones_query) + zones_soup = BeautifulSoup(zones.content, "lxml"); + hrefs = zones_soup.findAll('a', href=True) for href in hrefs: params = parse_qs(urlparse(href['href']).query) - self.homes[home].append((params['lddId'][0], params['zoneId'][0])) - print(self.homes) + self.homes[home].append((params['lccId'][0], + params['zoneId'][0])) self.initialized = True return True diff --git a/icomfort3-scraper/test.py b/icomfort3-scraper/test.py index 2b5cf62..b4d8358 100755 --- a/icomfort3-scraper/test.py +++ b/icomfort3-scraper/test.py @@ -5,10 +5,11 @@ from lcc_zone import IComfort3Zone s = IComfort3Session() -s.login(icomfort_username, icomfort_password) +s.login(secrets.icomfort_username, secrets.icomfort_password) print("Logged In: ", s.login_complete) print("Initialized: ", s.initialized) for home in s.homes: - for (lcc, zone) in home: + lcc_zones = s.homes[home] + for (lcc, zone) in lcc_zones: z = IComfort3Zone(home, lcc, zone) print(z.fetch_update(s))