Skip to content

Commit ef5ca30

Browse files
chore: migrate to owl bot (#194)
* chore: migrate to owl bot * chore: copy files from googleapis-gen 79c15da3a71c276e23aa2746f9fa243741763179 * chore: run the post processor * 🦉 Updates from OwlBot * Update .OwlBot.lock.yaml * 🦉 Updates from OwlBot Co-authored-by: Owl Bot <gcf-owl-bot[bot]@users.noreply.github.com>
1 parent de609b0 commit ef5ca30

File tree

5 files changed

+180
-356
lines changed

5 files changed

+180
-356
lines changed
Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,4 @@
1+
docker:
2+
digest: sha256:4c981a6b6f2b8914a448d7b3a01688365be03e3ed26dfee399a6aa77fb112eaa
3+
image: gcr.io/repo-automation-bots/owlbot-python:latest
4+
Lines changed: 30 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,30 @@
1+
# Copyright 2021 Google LLC
2+
#
3+
# Licensed under the Apache License, Version 2.0 (the "License");
4+
# you may not use this file except in compliance with the License.
5+
# You may obtain a copy of the License at
6+
#
7+
# http://www.apache.org/licenses/LICENSE-2.0
8+
#
9+
# Unless required by applicable law or agreed to in writing, software
10+
# distributed under the License is distributed on an "AS IS" BASIS,
11+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12+
# See the License for the specific language governing permissions and
13+
# limitations under the License.
14+
15+
docker:
16+
image: gcr.io/repo-automation-bots/owlbot-python:latest
17+
18+
deep-remove-regex:
19+
- /owl-bot-staging
20+
21+
deep-preserve-regex:
22+
- /owl-bot-staging/v1alpha2
23+
- /owl-bot-staging/v1beta1
24+
25+
deep-copy-regex:
26+
- source: /google/cloud/bigquery/storage/(v.*)/.*-py/(.*)
27+
dest: /owl-bot-staging/$1/$2
28+
29+
begin-after-commit-hash: 79c15da3a71c276e23aa2746f9fa243741763179
30+
Lines changed: 146 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,146 @@
1+
# Copyright 2018 Google LLC
2+
#
3+
# Licensed under the Apache License, Version 2.0 (the "License");
4+
# you may not use this file except in compliance with the License.
5+
# You may obtain a copy of the License at
6+
#
7+
# http://www.apache.org/licenses/LICENSE-2.0
8+
#
9+
# Unless required by applicable law or agreed to in writing, software
10+
# distributed under the License is distributed on an "AS IS" BASIS,
11+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12+
# See the License for the specific language governing permissions and
13+
# limitations under the License.
14+
15+
"""This script is used to synthesize generated parts of this library."""
16+
17+
import re
18+
19+
import synthtool as s
20+
from synthtool import gcp
21+
from synthtool.languages import python
22+
23+
common = gcp.CommonTemplates()
24+
25+
default_version = "v1"
26+
27+
for library in s.get_staging_dirs(default_version):
28+
# We don't want the generated client to be accessible through
29+
# "google.cloud.bigquery_storage", replace it with the hand written client that
30+
# wraps it.
31+
s.replace(
32+
library / "google/cloud/bigquery_storage/__init__.py",
33+
f"from google\.cloud\.bigquery_storage_{library.name}\.services.big_query_read.client import",
34+
f"from google.cloud.bigquery_storage_{library.name} import",
35+
)
36+
37+
# We also don't want to expose the async client just yet, at least not until
38+
# it is wrapped in its own manual client class.
39+
s.replace(
40+
library / "google/cloud/bigquery_storage/__init__.py",
41+
(
42+
f"from google\.cloud\.bigquery_storage_{library.name}\.services.big_query_read.async_client "
43+
r"import BigQueryReadAsyncClient\n"
44+
),
45+
"",
46+
)
47+
s.replace(
48+
library / "google/cloud/bigquery_storage/__init__.py",
49+
r"""["']BigQueryReadAsyncClient["'],\n""",
50+
"",
51+
)
52+
53+
# We want types and __version__ to be accessible through the "main" library
54+
# entry point.
55+
s.replace(
56+
library / "google/cloud/bigquery_storage/__init__.py",
57+
f"from google\.cloud\.bigquery_storage_{library.name}\.types\.arrow import ArrowRecordBatch",
58+
(
59+
f"from google.cloud.bigquery_storage_{library.name} import types\n"
60+
f"from google.cloud.bigquery_storage_{library.name} import __version__\n"
61+
"\g<0>"
62+
),
63+
)
64+
s.replace(
65+
library / "google/cloud/bigquery_storage/__init__.py",
66+
r"""["']ArrowRecordBatch["']""",
67+
('"__version__",\n' ' "types",\n' " \g<0>"),
68+
)
69+
70+
# We want to expose all types through "google.cloud.bigquery_storage.types",
71+
# not just the types generated for the BQ Storage library. For example, we also
72+
# want to include common proto types such as Timestamp.
73+
s.replace(
74+
library / "google/cloud/bigquery_storage/__init__.py",
75+
r"import types",
76+
"import gapic_types as types",
77+
)
78+
79+
# The DataFormat enum is not exposed in bigquery_storage_v1/types, add it there.
80+
s.replace(
81+
library / f"google/cloud/bigquery_storage_{library.name}*/types/__init__.py",
82+
r"from \.stream import \(",
83+
"\g<0>\n DataFormat,",
84+
)
85+
s.replace(
86+
library / f"google/cloud/bigquery_storage_{library.name}*/types/__init__.py",
87+
r"""["']ReadSession["']""",
88+
'"DataFormat",\n \g<0>',
89+
)
90+
91+
# The append_rows method doesn't contain keyword arguments that build request
92+
# objects, so flattened tests are not needed and break with TypeError.
93+
s.replace(
94+
library / f'tests/unit/gapic/bigquery_storage_{library.name}*/test_big_query_write.py',
95+
r"(@[a-z.()\n]*\n)?(async )?"
96+
r"def test_append_rows_flattened[_a-z]*\(\):\n"
97+
r"( {4}.*|\n)+",
98+
'\n',
99+
)
100+
101+
s.move(
102+
library,
103+
excludes=[
104+
"bigquery-storage-*-py.tar.gz",
105+
"docs/conf.py",
106+
"docs/index.rst",
107+
f"google/cloud/bigquery_storage_{library.name}/__init__.py",
108+
# v1beta2 was first generated after the microgenerator migration.
109+
"scripts/fixup_bigquery_storage_v1beta2_keywords.py",
110+
"README.rst",
111+
"nox*.py",
112+
"setup.py",
113+
"setup.cfg",
114+
],
115+
)
116+
117+
s.remove_staging_dirs()
118+
119+
# ----------------------------------------------------------------------------
120+
# Add templated files
121+
# ----------------------------------------------------------------------------
122+
extras = ["fastavro", "pandas", "pyarrow"]
123+
124+
templated_files = common.py_library(
125+
microgenerator=True,
126+
samples=True,
127+
unit_test_extras=extras,
128+
system_test_extras=extras,
129+
system_test_external_dependencies=["google-cloud-bigquery"],
130+
cov_level=95,
131+
)
132+
s.move(
133+
templated_files, excludes=[".coveragerc"]
134+
) # microgenerator has a good .coveragerc file
135+
136+
137+
# ----------------------------------------------------------------------------
138+
# Samples templates
139+
# ----------------------------------------------------------------------------
140+
141+
python.py_samples(skip_readmes=True)
142+
143+
# TODO(busunkim): Use latest sphinx after microgenerator transition
144+
s.replace("noxfile.py", """['"]sphinx['"]""", '"sphinx<3.0.0"')
145+
146+
s.shell.run(["nox", "-s", "blacken"], hide_output=False)

packages/google-cloud-bigquery-storage/synth.metadata

Lines changed: 0 additions & 184 deletions
This file was deleted.

0 commit comments

Comments
 (0)