Skip to content

Commit

Permalink
Polling for stats now works properly.
Browse files Browse the repository at this point in the history
  • Loading branch information
bmenchaca committed May 8, 2018
1 parent 9bd27b8 commit 0b9248d
Show file tree
Hide file tree
Showing 3 changed files with 14 additions and 16 deletions.
2 changes: 1 addition & 1 deletion icomfort3-scraper/lcc_zone.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
try:
from urllib.parse import urlencode, urlunsplit
except ImportError:
from urlparse import urlunsplit
from urlparse import urlunsplit, urlparse
from urllib import urlencode

import requests
Expand Down
23 changes: 10 additions & 13 deletions icomfort3-scraper/session.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,9 +8,9 @@
import requests

try:
from urllib.parse import urlencode, urlunsplit
from urllib.parse import urlencode, urlunsplit, urlparse, parse_qs
except ImportError:
from urlparse import urlunsplit
from urlparse import urlunsplit, urlparse, parse_qs
from urllib import urlencode

import requests
Expand Down Expand Up @@ -148,9 +148,6 @@ def request_json(self, url, referer_url=''):
if referer_url:
header_dict['Referer'] = referer_url
response = self.session.get(url, headers=header_dict)
print("URL was: %s" % response.request.url)
print("Request Headers: %s" % response.request.headers)
print("Content Type: %s" % response.headers['content-type'])
if response.headers['content-type'] == 'text/html; charset=utf-8':
print("Response is HTML.")
html_soup = BeautifulSoup(response.content, "lxml")
Expand All @@ -175,7 +172,6 @@ def request_json(self, url, referer_url=''):
print(response_json)
self.login_complete = False
return False
print(response_json)
return response_json


Expand All @@ -191,21 +187,22 @@ def __initialize_session(self):
# machine so that polling can start.
# First, pull the list of homes
my_homes_url = IComfort3Session.create_url('Dashboard/MyHomes')
my_homes = self.session.get(myhomes_url)
my_homes = self.session.get(my_homes_url)
my_homes_soup = BeautifulSoup(my_homes.content, "lxml")
home_lists = my_homes_soup.findAll('ul', {'class': 'HomeZones'})
for home in home_lists:
self.homes[home.get("data-homeid")] = []
# Iterate over the list of homes, and pull all of the LCC_ID/Zones
for home in self.homes.keys():
home_zones_query = {'homeID': home_id}
home_zones = self.session.get(home_zones_url, params=home_zones_query)
home_zones_soup = BeautifulSoup(home_zones.content, "lxml");
hrefs = home_zones_soup.findAll('a', href=True)
zones_url = IComfort3Session.create_url('Dashboard/GetHomeZones')
zones_query = {'homeID': home}
zones = self.session.get(zones_url, params=zones_query)
zones_soup = BeautifulSoup(zones.content, "lxml");
hrefs = zones_soup.findAll('a', href=True)
for href in hrefs:
params = parse_qs(urlparse(href['href']).query)
self.homes[home].append((params['lddId'][0], params['zoneId'][0]))
print(self.homes)
self.homes[home].append((params['lccId'][0],
params['zoneId'][0]))
self.initialized = True
return True

Expand Down
5 changes: 3 additions & 2 deletions icomfort3-scraper/test.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,10 +5,11 @@
from lcc_zone import IComfort3Zone

s = IComfort3Session()
s.login(icomfort_username, icomfort_password)
s.login(secrets.icomfort_username, secrets.icomfort_password)
print("Logged In: ", s.login_complete)
print("Initialized: ", s.initialized)
for home in s.homes:
for (lcc, zone) in home:
lcc_zones = s.homes[home]
for (lcc, zone) in lcc_zones:
z = IComfort3Zone(home, lcc, zone)
print(z.fetch_update(s))

0 comments on commit 0b9248d

Please sign in to comment.