Skip to content

Commit

Permalink
Cleaning up base test case
Browse files Browse the repository at this point in the history
  • Loading branch information
Jon Wayne Parrott committed Sep 16, 2015
1 parent 64d99cd commit 1787ef0
Show file tree
Hide file tree
Showing 17 changed files with 102 additions and 104 deletions.
18 changes: 12 additions & 6 deletions CONTRIBUTING.md
Original file line number Diff line number Diff line change
Expand Up @@ -81,16 +81,22 @@ If you want to run the Google App Engine tests, you will need:

$ export GAE_PYTHONPATH=<path your AppeEngine sdk>

To run the bigquery tests, you'll need to create a bigquery dataset:
To run the bigquery tests:

* Create a dataset in your project named `test_dataset`.
* Create a table named `test_table2`, upload ``tests/resources/data.csv`` and give it the following schema:

Name STRING
Age INTEGER
Weight FLOAT
IsMagic BOOLEAN
gcloud alpha bigquery datasets create test_dataset

* Load sample data into google cloud storage (for import tests):

gsutil cp tests/resources/data.csv gs://$TEST_BUCKET_NAME/data.csv

* Load the sample data into a table named `test_table` (for export and streaming tests):

gcloud alpha bigquery import \
gs://$TEST_BUCKET_NAME/data.csv \
test_dataset/test_table \
--schema-file tests/resources/schema.json

### Test environments

Expand Down
8 changes: 2 additions & 6 deletions appengine/bigquery/main_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@ class TestAuthSample(tests.AppEngineTestbedCase):
def setUp(self):
super(TestAuthSample, self).setUp()
self.app = webtest.TestApp(main.app)
main.PROJECTID = self.project_id

def test_anonymous_get(self):
response = self.app.get('/')
Expand Down Expand Up @@ -55,12 +56,7 @@ def test_oauthed_get(self, *args):
{'status': '200'})

with mock.patch.object(main.decorator, 'http', return_value=mock_http):
original_projectid = main.PROJECTID
try:
main.PROJECTID = self.constants['projectId']
response = self.app.get('/')
finally:
main.PROJECTID = original_projectid
response = self.app.get('/')

# Should make the api call
self.assertEqual(response.status_int, 200)
Expand Down
13 changes: 8 additions & 5 deletions bigquery/samples/async_query_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,13 +21,16 @@
class TestAsyncQuery(tests.CloudBaseTest):

def test_async_query(self):
query = \
'SELECT corpus FROM publicdata:samples.shakespeare GROUP BY corpus;'

with tests.capture_stdout() as stdout:
main(
self.constants['projectId'],
self.constants['query'],
False,
5,
5)
project_id=self.project_id,
query_string=query,
batch=False,
num_retries=5,
interval=1)

value = stdout.getvalue().strip().split('\n').pop()

Expand Down
43 changes: 25 additions & 18 deletions bigquery/samples/export_data_to_cloud_storage_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,33 +19,40 @@


class TestExportTableToGCS(CloudBaseTest):
dataset_id = 'test_dataset'
table_id = 'test_table'

def test_export_table_csv(self):
cloud_storage_output_uri = 'gs://{}/output.csv'.format(self.bucket_name)
main(
self.constants['cloudStorageOutputURI'],
self.constants['projectId'],
self.constants['datasetId'],
self.constants['newTableId'],
5,
1,
cloud_storage_output_uri,
self.project_id,
self.dataset_id,
self.table_id,
num_retries=5,
interval=1,
export_format="CSV")

def test_export_table_json(self):
cloud_storage_output_uri = \
'gs://{}/output.json'.format(self.bucket_name)
main(
self.constants['cloudStorageOutputURI'],
self.constants['projectId'],
self.constants['datasetId'],
self.constants['newTableId'],
5,
1,
cloud_storage_output_uri,
self.project_id,
self.dataset_id,
self.table_id,
num_retries=5,
interval=1,
export_format="NEWLINE_DELIMITED_JSON")

def test_export_table_avro(self):
cloud_storage_output_uri = \
'gs://{}/output.avro'.format(self.bucket_name)
main(
self.constants['cloudStorageOutputURI'],
self.constants['projectId'],
self.constants['datasetId'],
self.constants['newTableId'],
5,
1,
cloud_storage_output_uri,
self.project_id,
self.dataset_id,
self.table_id,
num_retries=5,
interval=1,
export_format="AVRO")
2 changes: 1 addition & 1 deletion bigquery/samples/getting_started_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
class TestGettingStarted(tests.CloudBaseTest):
def test_main(self):
with tests.capture_stdout() as mock_stdout:
main(self.constants['projectId'])
main(self.project_id)

stdout = mock_stdout.getvalue()
self.assertRegexpMatches(stdout, re.compile(
Expand Down
2 changes: 1 addition & 1 deletion bigquery/samples/list_datasets_projects_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ class TestListDatasetsProjects(tests.CloudBaseTest):

def test_main(self):
with tests.capture_stdout() as mock_stdout:
main(self.constants['projectId'])
main(self.project_id)

stdout = mock_stdout.getvalue()

Expand Down
20 changes: 13 additions & 7 deletions bigquery/samples/load_data_from_csv_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,12 +20,18 @@


class TestLoadDataFromCSV(CloudBaseTest):
dataset_id = 'test_dataset'
table_id = 'test_import_table'

def test_load_table(self):
cloud_storage_input_uri = 'gs://{}/data.csv'.format(self.bucket_name)
schema_file = os.path.join(self.resource_path, 'schema.json')

main(
self.constants['projectId'],
self.constants['datasetId'],
self.constants['newTableId'],
os.path.join(self.resource_path, 'schema.json'),
self.constants['cloudStorageInputURI'],
1,
5)
self.project_id,
self.dataset_id,
self.table_id,
schema_file=schema_file,
data_path=cloud_storage_input_uri,
poll_interval=1,
num_retries=5)
10 changes: 6 additions & 4 deletions bigquery/samples/streaming_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,8 @@


class TestStreaming(CloudBaseTest):
dataset_id = 'test_dataset'
table_id = 'test_table'

def test_stream_row_to_bigquery(self):
with open(
Expand All @@ -33,10 +35,10 @@ def test_stream_row_to_bigquery(self):

with capture_stdout() as stdout:
streaming.main(
self.constants['projectId'],
self.constants['datasetId'],
self.constants['newTableId'],
5)
self.project_id,
self.dataset_id,
self.table_id,
num_retries=5)

results = stdout.getvalue().split('\n')
self.assertIsNotNone(json.loads(results[0]))
11 changes: 7 additions & 4 deletions bigquery/samples/sync_query_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,12 +21,15 @@
class TestSyncQuery(CloudBaseTest):

def test_sync_query(self):
query = \
'SELECT corpus FROM publicdata:samples.shakespeare GROUP BY corpus;'

with capture_stdout() as stdout:
main(
self.constants['projectId'],
self.constants['query'],
30,
5)
project_id=self.project_id,
query=query,
timeout=30,
num_retries=5)

result = stdout.getvalue().split('\n')[0]
self.assertIsNotNone(json.loads(result))
Original file line number Diff line number Diff line change
Expand Up @@ -20,4 +20,4 @@ class BlogTestCase(CloudBaseTest):
"""Simple test case that ensures the blog code doesn't throw any errors."""

def test_main(self):
main(self.constants['projectId'])
main(self.project_id)
Original file line number Diff line number Diff line change
Expand Up @@ -20,4 +20,4 @@ class WikiTestCase(CloudBaseTest):
"""Simple test case that ensures the wiki code doesn't throw any errors."""

def test_main(self):
main(self.constants['projectId'])
main(self.project_id)
7 changes: 1 addition & 6 deletions monitoring/samples/auth_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@
# See the License for the specific language governing permissions and
# limitations under the License.

import os
import re

import tests
Expand All @@ -21,13 +20,9 @@

class TestTimeseriesList(tests.CloudBaseTest):

@classmethod
def setUpClass(cls):
cls.test_project_id = os.environ.get(tests.PROJECT_ID_ENV)

def test_main(self):
with tests.capture_stdout() as stdout:
auth.main(self.test_project_id)
auth.main(self.project_id)
output = stdout.getvalue().strip()
self.assertRegexpMatches(
output, re.compile(r'Timeseries.list raw response:\s*'
Expand Down
2 changes: 1 addition & 1 deletion storage/api/compose_objects_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ class TestComposeObjects(CloudBaseTest):
def test_main(self):
args = [
'ignored_command_name',
self.constants['bucketName'],
self.bucket_name,
'dest.txt',
os.path.join(self.resource_path, 'file1.txt'),
os.path.join(self.resource_path, 'file2.txt'),
Expand Down
2 changes: 1 addition & 1 deletion storage/api/list_objects_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,6 @@ class TestListObjects(CloudBaseTest):
def test_main(self):
args = [
'ignored_command_name',
self.constants['bucketName']
self.bucket_name
]
main(args)
4 changes: 0 additions & 4 deletions tests/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,18 +14,14 @@

from .utils import (
AppEngineTestbedCase,
BUCKET_NAME_ENV,
capture_stdout,
CloudBaseTest,
PROJECT_ID_ENV,
RESOURCE_PATH)


__all__ = [
'AppEngineTestbedCase',
'BUCKET_NAME_ENV',
'capture_stdout',
'CloudBaseTest',
'PROJECT_ID_ENV',
'RESOURCE_PATH'
]
8 changes: 0 additions & 8 deletions tests/resources/constants.json

This file was deleted.

52 changes: 22 additions & 30 deletions tests/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,6 @@
"""

import contextlib
import json
import os
import sys
import tempfile
Expand All @@ -32,45 +31,30 @@
except ImportError:
APPENGINE_AVAILABLE = False

BUCKET_NAME_ENV = 'TEST_BUCKET_NAME'
PROJECT_ID_ENV = 'TEST_PROJECT_ID'

RESOURCE_PATH = os.path.join(
os.path.abspath(os.path.dirname(__file__)), 'resources')
PROJECT_ID_ENV_VAR = 'TEST_PROJECT_ID'
BUCKET_NAME_ENV_VAR = 'TEST_BUCKET_NAME'


class CloudBaseTest(unittest.TestCase):

def setUp(self):
self.resource_path = RESOURCE_PATH
self.project_id = os.environ.get(PROJECT_ID_ENV_VAR)

# A hack to prevent get_application_default from going GAE route.
self._server_software_org = os.environ.get('SERVER_SOFTWARE')
os.environ['SERVER_SOFTWARE'] = ''
if not self.project_id:
raise EnvironmentError(
'You must set the {} environment variable to a valid Google '
'Cloud project ID.'.format(PROJECT_ID_ENV_VAR))

# Constants from environment
test_bucket_name = os.environ.get(BUCKET_NAME_ENV, '')
test_project_id = os.environ.get(PROJECT_ID_ENV, '')
if not test_project_id or not test_bucket_name:
raise Exception('You need to define an env var "%s" and "%s" to '
'run the test.'
% (PROJECT_ID_ENV, BUCKET_NAME_ENV))

# Constants from resources/constants.json
with open(
os.path.join(RESOURCE_PATH, 'constants.json'),
'r') as constants_file:

self.constants = json.load(constants_file)
self.constants['projectId'] = test_project_id
self.constants['bucketName'] = test_bucket_name
self.constants['cloudStorageInputURI'] = (
self.constants['cloudStorageInputURI'] % test_bucket_name)
self.constants['cloudStorageOutputURI'] = (
self.constants['cloudStorageOutputURI'] % test_bucket_name)
self.bucket_name = os.environ.get(BUCKET_NAME_ENV_VAR)

def tearDown(self):
if self._server_software_org:
os.environ['SERVER_SOFTWARE'] = self._server_software_org
if not self.bucket_name:
raise EnvironmentError(
'You must set the {} environment variable to a valid Google '
'Cloud Storage bucket.'.format(BUCKET_NAME_ENV_VAR))


class AppEngineTestbedCase(CloudBaseTest):
Expand All @@ -81,6 +65,10 @@ def setUp(self):
if not APPENGINE_AVAILABLE:
raise SkipTest()

# A hack to prevent get_application_default from going GAE route.
self._server_software_org = os.environ.get('SERVER_SOFTWARE')
os.environ['SERVER_SOFTWARE'] = ''

# Setup the datastore and memcache stub.
# First, create an instance of the Testbed class.
self.testbed = testbed.Testbed()
Expand All @@ -103,6 +91,10 @@ def setUp(self):

def tearDown(self):
super(AppEngineTestbedCase, self).tearDown()

if self._server_software_org:
os.environ['SERVER_SOFTWARE'] = self._server_software_org

self.testbed.deactivate()

def loginUser(self, email='user@example.com', id='123', is_admin=False):
Expand All @@ -115,7 +107,7 @@ def loginUser(self, email='user@example.com', id='123', is_admin=False):

@contextlib.contextmanager
def capture_stdout():
"""Capture stdout."""
"""Capture stdout to a StringIO object."""
fake_stdout = cStringIO()
old_stdout = sys.stdout

Expand Down

0 comments on commit 1787ef0

Please sign in to comment.