Skip to content

Commit

Permalink
BigQuery: Fix unit tests for new versions of fastparquet and pytest (…
Browse files Browse the repository at this point in the history
…#8553)

* Exclude fastparquet.

* Use exeception from pytest.raises context instead of context manager
itself.
  • Loading branch information
tswast authored Jul 1, 2019
1 parent f6631cd commit fc6979e
Show file tree
Hide file tree
Showing 4 changed files with 15 additions and 10 deletions.
5 changes: 5 additions & 0 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,11 @@
all_extras = []

for extra in extras:
if extra == "fastparquet":
# Skip fastparquet from "all" because it is redundant with pyarrow and
# creates a dependency on pre-release versions of numpy. See:
# https://github.com/googleapis/google-cloud-python/issues/8549
continue
all_extras.extend(extras[extra])

extras["all"] = all_extras
Expand Down
8 changes: 4 additions & 4 deletions tests/unit/test__pandas_helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -533,16 +533,16 @@ def test_dataframe_to_arrow_w_unknown_type(module_under_test):
@pytest.mark.skipIf(pandas is None, "Requires `pandas`")
def test_dataframe_to_parquet_without_pyarrow(module_under_test, monkeypatch):
monkeypatch.setattr(module_under_test, "pyarrow", None)
with pytest.raises(ValueError) as exc:
with pytest.raises(ValueError) as exc_context:
module_under_test.dataframe_to_parquet(pandas.DataFrame(), (), None)
assert "pyarrow is required" in str(exc)
assert "pyarrow is required" in str(exc_context.value)


@pytest.mark.skipIf(pandas is None, "Requires `pandas`")
@pytest.mark.skipIf(pyarrow is None, "Requires `pyarrow`")
def test_dataframe_to_parquet_w_missing_columns(module_under_test, monkeypatch):
with pytest.raises(ValueError) as exc:
with pytest.raises(ValueError) as exc_context:
module_under_test.dataframe_to_parquet(
pandas.DataFrame(), (schema.SchemaField("not_found", "STRING"),), None
)
assert "columns in schema must match" in str(exc)
assert "columns in schema must match" in str(exc_context.value)
4 changes: 2 additions & 2 deletions tests/unit/test_magics.py
Original file line number Diff line number Diff line change
Expand Up @@ -273,10 +273,10 @@ def test__make_bqstorage_client_true_raises_import_error(monkeypatch):
google.auth.credentials.Credentials, instance=True
)

with pytest.raises(ImportError) as exc:
with pytest.raises(ImportError) as exc_context:
magics._make_bqstorage_client(True, credentials_mock)

assert "google-cloud-bigquery-storage" in str(exc)
assert "google-cloud-bigquery-storage" in str(exc_context.value)


@pytest.mark.usefixtures("ipython_interactive")
Expand Down
8 changes: 4 additions & 4 deletions tests/unit/test_table.py
Original file line number Diff line number Diff line change
Expand Up @@ -2226,9 +2226,9 @@ def test_to_dataframe_w_bqstorage_raises_import_error(self):

with mock.patch.object(mut, "bigquery_storage_v1beta1", None), pytest.raises(
ValueError
) as exc:
) as exc_context:
row_iterator.to_dataframe(bqstorage_client=bqstorage_client)
assert mut._NO_BQSTORAGE_ERROR in str(exc)
assert mut._NO_BQSTORAGE_ERROR in str(exc_context.value)

@unittest.skipIf(
bigquery_storage_v1beta1 is None, "Requires `google-cloud-bigquery-storage`"
Expand Down Expand Up @@ -2514,6 +2514,6 @@ def test_table_reference_to_bqstorage_raises_import_error():
for cls in classes:
with mock.patch.object(mut, "bigquery_storage_v1beta1", None), pytest.raises(
ValueError
) as exc:
) as exc_context:
cls.from_string("my-project.my_dataset.my_table").to_bqstorage()
assert mut._NO_BQSTORAGE_ERROR in str(exc)
assert mut._NO_BQSTORAGE_ERROR in str(exc_context.value)

0 comments on commit fc6979e

Please sign in to comment.