Skip to content

Commit

Permalink
Merge pull request #242 from dopplershift/client-updates
Browse files Browse the repository at this point in the history
HTTP Client updates
  • Loading branch information
jrleeman authored Aug 13, 2018
2 parents f057105 + f9a44c3 commit 6771749
Show file tree
Hide file tree
Showing 8 changed files with 100 additions and 55 deletions.
1 change: 1 addition & 0 deletions environment.yml
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ dependencies:
- jupyter
- sphinx
- sphinx-gallery
- pillow
- doc8
- pytest
- pytest-cov
Expand Down
7 changes: 7 additions & 0 deletions examples/Basic_Usage.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,14 @@
from __future__ import print_function

from siphon.catalog import TDSCatalog
from siphon.http_util import session_manager

###########################################
cat = TDSCatalog('http://thredds.ucar.edu/thredds/catalog.xml')
print(list(cat.catalog_refs))

###########################################
# Basic HTTP authentication can also be used by using the HTTP session manager
# and setting some default options for HTTP sessions
session_manager.set_session_options(auth=('username', 'password'))
cat = TDSCatalog('https://rda.ucar.edu/thredds/catalog.xml')
8 changes: 4 additions & 4 deletions siphon/catalog.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
# Python 3
from urllib.parse import urljoin, urlparse

from .http_util import create_http_session, urlopen
from .http_util import session_manager
from .metadata import TDSCatalogMetadata

logging.basicConfig(level=logging.ERROR)
Expand Down Expand Up @@ -250,7 +250,7 @@ def __init__(self, catalog_url):
The URL of a THREDDS client catalog
"""
session = create_http_session()
session = session_manager.create_session()

# get catalog.xml file
resp = session.get(catalog_url)
Expand Down Expand Up @@ -488,7 +488,7 @@ def resolve_url(self, catalog_url):
if catalog_url != '':
resolver_base = catalog_url.split('catalog.xml')[0]
resolver_url = resolver_base + self.url_path
resolver_xml = urlopen(resolver_url)
resolver_xml = session_manager.urlopen(resolver_url)
tree = ET.parse(resolver_xml)
root = tree.getroot()
if 'name' in root.attrib:
Expand Down Expand Up @@ -682,7 +682,7 @@ def access_with_service(self, service):
from .ncss import NCSS
provider = NCSS
elif service == 'HTTPServer':
provider = urlopen
provider = session_manager.urlopen
else:
raise ValueError(service + ' is not an access method supported by Siphon')

Expand Down
4 changes: 2 additions & 2 deletions siphon/cdmr/tests/test_ncstream.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,8 +29,8 @@ def get_test_latest_url(query=None):
@recorder.use_cassette('latest_rap_ncstream_header')
def get_header_remote():
"""Get a header from a remote data source."""
from siphon.http_util import urlopen
return urlopen(get_test_latest_url('req=header'))
from siphon.http_util import session_manager
return session_manager.urlopen(get_test_latest_url('req=header'))


@pytest.mark.parametrize('src, result', [(b'\xb6\xe0\x02', 45110), (b'\x17\n\x0b', 23)])
Expand Down
100 changes: 62 additions & 38 deletions siphon/http_util.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,8 +18,6 @@

from . import __version__

user_agent = 'Siphon ({})'.format(__version__)

HTTPError = requests.HTTPError


Expand Down Expand Up @@ -49,49 +47,74 @@ def dst(self, dt): # pylint:disable=unused-argument
utc = UTC()


def create_http_session():
"""Create a new HTTP session with our user-agent set.
class HTTPSessionManager(object):
"""Manage the creation of sessions for HTTP access."""

Returns
-------
session : requests.Session
The created session
def __init__(self):
"""Initialize ``HTTPSessionManager``."""
self.user_agent = 'Siphon ({})'.format(__version__)
self.options = {}

See Also
--------
urlopen
def set_session_options(self, **kwargs):
"""Set options for created session instances.
"""
ret = requests.Session()
ret.headers['User-Agent'] = user_agent
return ret
Takes keyword arguments and sets them as attributes on the returned
:class:`requests.Session` instance.
See Also
--------
create_session
def urlopen(url, **kwargs):
"""GET a file-like object for a URL using HTTP.
"""
self.options = kwargs

This is a thin wrapper around :meth:`requests.Session.get` that returns a file-like object
wrapped around the resulting content.
def create_session(self):
"""Create a new HTTP session with our user-agent set.
Parameters
----------
url : str
The URL to request
Returns
-------
session : requests.Session
The created session
kwargs : arbitrary keyword arguments
Additional keyword arguments to pass to :meth:`requests.Session.get`.
See Also
--------
urlopen, set_session_options
Returns
-------
fobj : file-like object
A file-like interface to the content in the response
"""
ret = requests.Session()
ret.headers['User-Agent'] = self.user_agent
for k, v in self.options.items():
setattr(ret, k, v)
return ret

See Also
--------
:meth:`requests.Session.get`
def urlopen(self, url, **kwargs):
"""GET a file-like object for a URL using HTTP.
"""
return BytesIO(create_http_session().get(url, **kwargs).content)
This is a thin wrapper around :meth:`requests.Session.get` that returns a file-like
object wrapped around the resulting content.
Parameters
----------
url : str
The URL to request
kwargs : arbitrary keyword arguments
Additional keyword arguments to pass to :meth:`requests.Session.get`.
Returns
-------
fobj : file-like object
A file-like interface to the content in the response
See Also
--------
:meth:`requests.Session.get`
"""
return BytesIO(self.create_session().get(url, **kwargs).content)


session_manager = HTTPSessionManager()


def parse_iso_date(s):
Expand Down Expand Up @@ -352,7 +375,7 @@ def __init__(self, url):
"""
self._base = url
self._session = create_http_session()
self._session = session_manager.create_session()
self._get_metadata()

def get_query(self, query):
Expand Down Expand Up @@ -458,9 +481,10 @@ def get(self, path, params=None):
text = resp.reason
else:
text = resp.text
raise requests.HTTPError('Error accessing {0}: {1:d} {2}'.format(resp.request.url,
resp.status_code,
text))
raise requests.HTTPError('Error accessing {0}\n'
'Server Error ({1:d}: {2})'.format(resp.request.url,
resp.status_code,
text))
return resp

def _get_metadata(self):
Expand Down
5 changes: 3 additions & 2 deletions siphon/simplewebservice/acis.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@

import requests

from ..http_util import create_http_session
from ..http_util import session_manager


def acis_request(method, params):
Expand Down Expand Up @@ -51,7 +51,8 @@ def acis_request(method, params):
timeout = 300 if method == 'MultiStnData' else 60

try:
response = create_http_session().post(base_url + method, json=params, timeout=timeout)
response = session_manager.create_session().post(base_url + method, json=params,
timeout=timeout)
return response.json()
except requests.exceptions.Timeout:
raise AcisApiException('Connection Timeout')
Expand Down
22 changes: 17 additions & 5 deletions siphon/tests/test_http_util.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,8 @@

import pytest

from siphon.http_util import (create_http_session, DataQuery, HTTPEndPoint, HTTPError,
parse_iso_date, urlopen, utc)
from siphon.http_util import (DataQuery, HTTPEndPoint, HTTPError,
parse_iso_date, session_manager, utc)
import siphon.testing

recorder = siphon.testing.get_recorder(__file__)
Expand All @@ -17,18 +17,30 @@
@recorder.use_cassette('top_thredds_catalog')
def test_urlopen():
"""Test siphon's urlopen wrapper."""
fobj = urlopen('http://thredds-test.unidata.ucar.edu/thredds/catalog.xml')
fobj = session_manager.urlopen('http://thredds-test.unidata.ucar.edu/thredds/catalog.xml')
assert fobj.read(2) == b'<?'


@recorder.use_cassette('top_thredds_catalog')
def test_session():
"""Test that https sessions contain the proper user agent."""
session = create_http_session()
"""Test that http sessions contain the proper user agent."""
session = session_manager.create_session()
resp = session.get('http://thredds-test.unidata.ucar.edu/thredds/catalog.xml')
assert resp.request.headers['user-agent'].startswith('Siphon')


@recorder.use_cassette('top_thredds_catalog')
def test_session_options():
"""Test that http sessions receive proper options."""
auth = ('foo', 'bar')
session_manager.set_session_options(auth=auth)
try:
session = session_manager.create_session()
assert session.auth == auth
finally:
session_manager.set_session_options()


def test_parse_iso():
"""Test parsing ISO-formatted dates."""
parsed = parse_iso_date('2015-06-15T12:00:00Z')
Expand Down
8 changes: 4 additions & 4 deletions siphon/tests/test_ncss_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
import logging
import xml.etree.ElementTree as ET

from siphon.http_util import urlopen
from siphon.http_util import session_manager
from siphon.ncss_dataset import _Types, NCSSDataset
from siphon.testing import get_recorder

Expand Down Expand Up @@ -559,7 +559,7 @@ def test_dataset_elements_full_ncss_station():
"""Test parsing the dataset from a full ncss station page."""
url = ('http://thredds.ucar.edu/thredds/ncss/nws/synoptic/'
'ncdecoded/Surface_Synoptic_Point_Data_fc.cdmr/dataset.xml')
element = ET.fromstring(urlopen(url).read())
element = ET.fromstring(session_manager.urlopen(url).read())
parsed = NCSSDataset(element)
assert parsed

Expand All @@ -570,7 +570,7 @@ def test_dataset_elements_full_ncss_grid():
url = ('http://thredds.ucar.edu/thredds/ncss/grib/NCEP/GFS/'
'Global_0p5deg/GFS_Global_0p5deg_20150602_0000.grib2/'
'dataset.xml')
element = ET.fromstring(urlopen(url).read())
element = ET.fromstring(session_manager.urlopen(url).read())
parsed = NCSSDataset(element)
assert parsed

Expand All @@ -580,6 +580,6 @@ def test_dataset_parsing_tds5(recwarn):
"""Test parsing the dataset from TDS 5."""
url = ('http://thredds-test.unidata.ucar.edu/thredds/ncss/grid/casestudies/irma/model/'
'gfs/GFS_Global_0p5deg_20170903_1200.grib2/dataset.xml')
element = ET.fromstring(urlopen(url).read())
element = ET.fromstring(session_manager.urlopen(url).read())
NCSSDataset(element)
assert len(recwarn) == 0

0 comments on commit 6771749

Please sign in to comment.