Skip to content

Commit 03f9fbe

Browse files
tswastgcf-owl-bot[bot]
authored andcommitted
chore: sort imports (#761)
* chore: sort imports * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot <gcf-owl-bot[bot]@users.noreply.github.com>
1 parent db2ef9d commit 03f9fbe

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

76 files changed

+291
-434
lines changed

packages/google-cloud-bigquery-storage/docs/conf.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -24,9 +24,9 @@
2424
# All configuration values have a default; values that are commented out
2525
# serve to show the default.
2626

27-
import sys
2827
import os
2928
import shlex
29+
import sys
3030

3131
# If extensions (or modules to document with autodoc) are in another directory,
3232
# add these directories to sys.path here. If the directory is relative to the

packages/google-cloud-bigquery-storage/google/cloud/bigquery_storage/__init__.py

Lines changed: 39 additions & 38 deletions
Original file line numberDiff line numberDiff line change
@@ -19,52 +19,53 @@
1919

2020

2121
from google.cloud.bigquery_storage_v1 import BigQueryReadClient
22+
from google.cloud.bigquery_storage_v1 import gapic_types as types
23+
from google.cloud.bigquery_storage_v1.services.big_query_write.async_client import (
24+
BigQueryWriteAsyncClient,
25+
)
2226
from google.cloud.bigquery_storage_v1.services.big_query_write.client import (
2327
BigQueryWriteClient,
2428
)
25-
from google.cloud.bigquery_storage_v1.services.big_query_write.async_client import (
26-
BigQueryWriteAsyncClient,
29+
from google.cloud.bigquery_storage_v1.types.arrow import (
30+
ArrowRecordBatch,
31+
ArrowSchema,
32+
ArrowSerializationOptions,
2733
)
28-
29-
from google.cloud.bigquery_storage_v1 import gapic_types as types
30-
from google.cloud.bigquery_storage_v1.types.arrow import ArrowRecordBatch
31-
from google.cloud.bigquery_storage_v1.types.arrow import ArrowSchema
32-
from google.cloud.bigquery_storage_v1.types.arrow import ArrowSerializationOptions
33-
from google.cloud.bigquery_storage_v1.types.avro import AvroRows
34-
from google.cloud.bigquery_storage_v1.types.avro import AvroSchema
35-
from google.cloud.bigquery_storage_v1.types.avro import AvroSerializationOptions
36-
from google.cloud.bigquery_storage_v1.types.protobuf import ProtoRows
37-
from google.cloud.bigquery_storage_v1.types.protobuf import ProtoSchema
38-
from google.cloud.bigquery_storage_v1.types.storage import AppendRowsRequest
39-
from google.cloud.bigquery_storage_v1.types.storage import AppendRowsResponse
40-
from google.cloud.bigquery_storage_v1.types.storage import (
41-
BatchCommitWriteStreamsRequest,
34+
from google.cloud.bigquery_storage_v1.types.avro import (
35+
AvroRows,
36+
AvroSchema,
37+
AvroSerializationOptions,
4238
)
39+
from google.cloud.bigquery_storage_v1.types.protobuf import ProtoRows, ProtoSchema
4340
from google.cloud.bigquery_storage_v1.types.storage import (
41+
AppendRowsRequest,
42+
AppendRowsResponse,
43+
BatchCommitWriteStreamsRequest,
4444
BatchCommitWriteStreamsResponse,
45+
CreateReadSessionRequest,
46+
CreateWriteStreamRequest,
47+
FinalizeWriteStreamRequest,
48+
FinalizeWriteStreamResponse,
49+
FlushRowsRequest,
50+
FlushRowsResponse,
51+
GetWriteStreamRequest,
52+
ReadRowsRequest,
53+
ReadRowsResponse,
54+
RowError,
55+
SplitReadStreamRequest,
56+
SplitReadStreamResponse,
57+
StorageError,
58+
StreamStats,
59+
ThrottleState,
60+
)
61+
from google.cloud.bigquery_storage_v1.types.stream import (
62+
DataFormat,
63+
ReadSession,
64+
ReadStream,
65+
WriteStream,
66+
WriteStreamView,
4567
)
46-
from google.cloud.bigquery_storage_v1.types.storage import CreateReadSessionRequest
47-
from google.cloud.bigquery_storage_v1.types.storage import CreateWriteStreamRequest
48-
from google.cloud.bigquery_storage_v1.types.storage import FinalizeWriteStreamRequest
49-
from google.cloud.bigquery_storage_v1.types.storage import FinalizeWriteStreamResponse
50-
from google.cloud.bigquery_storage_v1.types.storage import FlushRowsRequest
51-
from google.cloud.bigquery_storage_v1.types.storage import FlushRowsResponse
52-
from google.cloud.bigquery_storage_v1.types.storage import GetWriteStreamRequest
53-
from google.cloud.bigquery_storage_v1.types.storage import ReadRowsRequest
54-
from google.cloud.bigquery_storage_v1.types.storage import ReadRowsResponse
55-
from google.cloud.bigquery_storage_v1.types.storage import RowError
56-
from google.cloud.bigquery_storage_v1.types.storage import SplitReadStreamRequest
57-
from google.cloud.bigquery_storage_v1.types.storage import SplitReadStreamResponse
58-
from google.cloud.bigquery_storage_v1.types.storage import StorageError
59-
from google.cloud.bigquery_storage_v1.types.storage import StreamStats
60-
from google.cloud.bigquery_storage_v1.types.storage import ThrottleState
61-
from google.cloud.bigquery_storage_v1.types.stream import ReadSession
62-
from google.cloud.bigquery_storage_v1.types.stream import ReadStream
63-
from google.cloud.bigquery_storage_v1.types.stream import WriteStream
64-
from google.cloud.bigquery_storage_v1.types.stream import DataFormat
65-
from google.cloud.bigquery_storage_v1.types.stream import WriteStreamView
66-
from google.cloud.bigquery_storage_v1.types.table import TableFieldSchema
67-
from google.cloud.bigquery_storage_v1.types.table import TableSchema
68+
from google.cloud.bigquery_storage_v1.types.table import TableFieldSchema, TableSchema
6869

6970
__all__ = (
7071
"BigQueryReadClient",

packages/google-cloud-bigquery-storage/google/cloud/bigquery_storage_v1/__init__.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -20,8 +20,7 @@
2020

2121
__version__ = package_version.__version__
2222

23-
from google.cloud.bigquery_storage_v1 import client
24-
from google.cloud.bigquery_storage_v1 import types
23+
from google.cloud.bigquery_storage_v1 import client, types
2524

2625

2726
class BigQueryReadClient(client.BigQueryReadClient):

packages/google-cloud-bigquery-storage/google/cloud/bigquery_storage_v1/client.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -24,9 +24,7 @@
2424
import google.api_core.gapic_v1.method
2525

2626
from google.cloud.bigquery_storage_v1 import reader
27-
from google.cloud.bigquery_storage_v1.services import big_query_read
28-
from google.cloud.bigquery_storage_v1.services import big_query_write
29-
27+
from google.cloud.bigquery_storage_v1.services import big_query_read, big_query_write
3028

3129
_SCOPES = (
3230
"https://www.googleapis.com/auth/bigquery",

packages/google-cloud-bigquery-storage/google/cloud/bigquery_storage_v1/gapic_types.py

Lines changed: 3 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -20,15 +20,11 @@
2020
import inspect
2121
import sys
2222

23-
import proto
24-
25-
from google.cloud.bigquery_storage_v1.types import arrow
26-
from google.cloud.bigquery_storage_v1.types import avro
27-
from google.cloud.bigquery_storage_v1.types import storage
28-
from google.cloud.bigquery_storage_v1.types import stream
29-
3023
from google.protobuf import message as protobuf_message
3124
from google.protobuf import timestamp_pb2
25+
import proto
26+
27+
from google.cloud.bigquery_storage_v1.types import arrow, avro, storage, stream
3228

3329

3430
# The current api core helper does not find new proto messages of type proto.Message,

packages/google-cloud-bigquery-storage/google/cloud/bigquery_storage_v1/services/big_query_read/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -13,8 +13,8 @@
1313
# See the License for the specific language governing permissions and
1414
# limitations under the License.
1515
#
16-
from .client import BigQueryReadClient
1716
from .async_client import BigQueryReadAsyncClient
17+
from .client import BigQueryReadClient
1818

1919
__all__ = (
2020
"BigQueryReadClient",

packages/google-cloud-bigquery-storage/google/cloud/bigquery_storage_v1/services/big_query_read/async_client.py

Lines changed: 10 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -17,41 +17,40 @@
1717
import functools
1818
import re
1919
from typing import (
20+
AsyncIterable,
21+
Awaitable,
2022
Dict,
2123
Mapping,
2224
MutableMapping,
2325
MutableSequence,
2426
Optional,
25-
AsyncIterable,
26-
Awaitable,
2727
Sequence,
2828
Tuple,
2929
Type,
3030
Union,
3131
)
3232

33-
from google.cloud.bigquery_storage_v1 import gapic_version as package_version
34-
35-
from google.api_core.client_options import ClientOptions
3633
from google.api_core import exceptions as core_exceptions
3734
from google.api_core import gapic_v1
3835
from google.api_core import retry_async as retries
36+
from google.api_core.client_options import ClientOptions
3937
from google.auth import credentials as ga_credentials # type: ignore
4038
from google.oauth2 import service_account # type: ignore
4139

40+
from google.cloud.bigquery_storage_v1 import gapic_version as package_version
41+
4242
try:
4343
OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None]
4444
except AttributeError: # pragma: NO COVER
4545
OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore
4646

47-
from google.cloud.bigquery_storage_v1.types import arrow
48-
from google.cloud.bigquery_storage_v1.types import avro
49-
from google.cloud.bigquery_storage_v1.types import storage
50-
from google.cloud.bigquery_storage_v1.types import stream
5147
from google.protobuf import timestamp_pb2 # type: ignore
52-
from .transports.base import BigQueryReadTransport, DEFAULT_CLIENT_INFO
53-
from .transports.grpc_asyncio import BigQueryReadGrpcAsyncIOTransport
48+
49+
from google.cloud.bigquery_storage_v1.types import arrow, avro, storage, stream
50+
5451
from .client import BigQueryReadClient
52+
from .transports.base import DEFAULT_CLIENT_INFO, BigQueryReadTransport
53+
from .transports.grpc_asyncio import BigQueryReadGrpcAsyncIOTransport
5554

5655

5756
class BigQueryReadAsyncClient:

packages/google-cloud-bigquery-storage/google/cloud/bigquery_storage_v1/services/big_query_read/client.py

Lines changed: 8 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -18,11 +18,11 @@
1818
import re
1919
from typing import (
2020
Dict,
21+
Iterable,
2122
Mapping,
2223
MutableMapping,
2324
MutableSequence,
2425
Optional,
25-
Iterable,
2626
Sequence,
2727
Tuple,
2828
Type,
@@ -31,29 +31,28 @@
3131
)
3232
import warnings
3333

34-
from google.cloud.bigquery_storage_v1 import gapic_version as package_version
35-
3634
from google.api_core import client_options as client_options_lib
3735
from google.api_core import exceptions as core_exceptions
3836
from google.api_core import gapic_v1
3937
from google.api_core import retry as retries
4038
from google.auth import credentials as ga_credentials # type: ignore
39+
from google.auth.exceptions import MutualTLSChannelError # type: ignore
4140
from google.auth.transport import mtls # type: ignore
4241
from google.auth.transport.grpc import SslCredentials # type: ignore
43-
from google.auth.exceptions import MutualTLSChannelError # type: ignore
4442
from google.oauth2 import service_account # type: ignore
4543

44+
from google.cloud.bigquery_storage_v1 import gapic_version as package_version
45+
4646
try:
4747
OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None]
4848
except AttributeError: # pragma: NO COVER
4949
OptionalRetry = Union[retries.Retry, object, None] # type: ignore
5050

51-
from google.cloud.bigquery_storage_v1.types import arrow
52-
from google.cloud.bigquery_storage_v1.types import avro
53-
from google.cloud.bigquery_storage_v1.types import storage
54-
from google.cloud.bigquery_storage_v1.types import stream
5551
from google.protobuf import timestamp_pb2 # type: ignore
56-
from .transports.base import BigQueryReadTransport, DEFAULT_CLIENT_INFO
52+
53+
from google.cloud.bigquery_storage_v1.types import arrow, avro, storage, stream
54+
55+
from .transports.base import DEFAULT_CLIENT_INFO, BigQueryReadTransport
5756
from .transports.grpc import BigQueryReadGrpcTransport
5857
from .transports.grpc_asyncio import BigQueryReadGrpcAsyncIOTransport
5958

packages/google-cloud-bigquery-storage/google/cloud/bigquery_storage_v1/services/big_query_read/transports/__init__.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,6 @@
2020
from .grpc import BigQueryReadGrpcTransport
2121
from .grpc_asyncio import BigQueryReadGrpcAsyncIOTransport
2222

23-
2423
# Compile a registry of transports.
2524
_transport_registry = OrderedDict() # type: Dict[str, Type[BigQueryReadTransport]]
2625
_transport_registry["grpc"] = BigQueryReadGrpcTransport

packages/google-cloud-bigquery-storage/google/cloud/bigquery_storage_v1/services/big_query_read/transports/base.py

Lines changed: 3 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -16,18 +16,16 @@
1616
import abc
1717
from typing import Awaitable, Callable, Dict, Optional, Sequence, Union
1818

19-
from google.cloud.bigquery_storage_v1 import gapic_version as package_version
20-
21-
import google.auth # type: ignore
2219
import google.api_core
2320
from google.api_core import exceptions as core_exceptions
2421
from google.api_core import gapic_v1
2522
from google.api_core import retry as retries
23+
import google.auth # type: ignore
2624
from google.auth import credentials as ga_credentials # type: ignore
2725
from google.oauth2 import service_account # type: ignore
2826

29-
from google.cloud.bigquery_storage_v1.types import storage
30-
from google.cloud.bigquery_storage_v1.types import stream
27+
from google.cloud.bigquery_storage_v1 import gapic_version as package_version
28+
from google.cloud.bigquery_storage_v1.types import storage, stream
3129

3230
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
3331
gapic_version=package_version.__version__

0 commit comments

Comments
 (0)