diff --git a/docs/opentelemetry-tracing.rst b/docs/opentelemetry-tracing.rst index 9b3dea276f..f7a6e473d6 100644 --- a/docs/opentelemetry-tracing.rst +++ b/docs/opentelemetry-tracing.rst @@ -9,6 +9,7 @@ To take advantage of these traces, we first need to install OpenTelemetry: .. code-block:: sh pip install opentelemetry-api opentelemetry-sdk opentelemetry-instrumentation + pip install opentelemetry-exporter-google-cloud # [Optional] Installs the cloud monitoring exporter, however you can use any exporter of your choice pip install opentelemetry-exporter-google-cloud @@ -19,14 +20,14 @@ We also need to tell OpenTelemetry which exporter to use. To export Spanner trac from opentelemetry import trace from opentelemetry.sdk.trace import TracerProvider - from opentelemetry.trace.sampling import ProbabilitySampler + from opentelemetry.sdk.trace.sampling import TraceIdRatioBased from opentelemetry.exporter.cloud_trace import CloudTraceSpanExporter # BatchExportSpanProcessor exports spans to Cloud Trace # in a seperate thread to not block on the main thread from opentelemetry.sdk.trace.export import BatchExportSpanProcessor # Create and export one trace every 1000 requests - sampler = ProbabilitySampler(1/1000) + sampler = TraceIdRatioBased(1/1000) # Use the default tracer provider trace.set_tracer_provider(TracerProvider(sampler=sampler)) trace.get_tracer_provider().add_span_processor( @@ -38,3 +39,6 @@ Generated spanner traces should now be available on `Cloud Trace `_ + +To allow for SQL statements to be annotated in your spans, please set +the environment variable `SPANNER_ENABLE_EXTENDED_TRACING=true`. diff --git a/examples/trace.py b/examples/trace.py new file mode 100644 index 0000000000..321156849d --- /dev/null +++ b/examples/trace.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License + +import os +import time + +import google.cloud.spanner as spanner +from opentelemetry.exporter.cloud_trace import CloudTraceSpanExporter +from opentelemetry.sdk.trace import TracerProvider +from opentelemetry.sdk.trace.export import BatchExportSpanProcessor +from opentelemetry.sdk.trace.sampling import ALWAYS_ON +from opentelemetry import trace + + +def main(): + # Setup OpenTelemetry, trace and Cloud Trace exporter. + sampler = ALWAYS_ON + tracerProvider = TracerProvider(sampler=sampler) + tracerProvider.add_span_processor( + BatchExportSpanProcessor(CloudTraceSpanExporter())) + trace.set_tracer_provider(tracerProvider) + tracer = trace.get_tracer(__name__) + + # Setup the Cloud Spanner Client. + project_id = os.environ.get('SPANNER_PROJECT_ID') + spanner_client = spanner.Client(project_id) + instance = spanner_client.instance('test-instance') + database = instance.database('test-db') + + # Now run our queries + with tracer.start_as_current_span('QueryDatabase'): + with database.snapshot() as snapshot: + with tracer.start_as_current_span('InformationSchema'): + info_schema = snapshot.execute_sql( + 'SELECT * FROM INFORMATION_SCHEMA.TABLES') + for row in info_schema: + print(row) + + with tracer.start_as_current_span('ServerTimeQuery'): + with database.snapshot() as snapshot: + # Purposefully issue a bad SQL statement to examine exceptions + # that get recorded and a ERROR span status. + data = snapshot.execute_sql('SELECT CURRENT_TIMESTAMPx()') + for row in data: + print(row) + + +if __name__ == '__main__': + main() diff --git a/google/cloud/spanner_v1/_opentelemetry_tracing.py b/google/cloud/spanner_v1/_opentelemetry_tracing.py index 8f9f8559ef..346ed400c0 100644 --- a/google/cloud/spanner_v1/_opentelemetry_tracing.py +++ b/google/cloud/spanner_v1/_opentelemetry_tracing.py @@ -15,6 +15,7 @@ """Manages OpenTelemetry trace creation and handling""" from contextlib import contextmanager +import os from google.api_core.exceptions import GoogleAPICallError from google.cloud.spanner_v1 import SpannerClient @@ -22,12 +23,31 @@ try: from opentelemetry import trace from opentelemetry.trace.status import Status, StatusCode + from opentelemetry.semconv.trace import SpanAttributes HAS_OPENTELEMETRY_INSTALLED = True + DB_SYSTEM = SpanAttributes.DB_SYSTEM + DB_NAME = SpanAttributes.DB_NAME + DB_CONNECTION_STRING = SpanAttributes.DB_CONNECTION_STRING + NET_HOST_NAME = SpanAttributes.NET_HOST_NAME + DB_STATEMENT = SpanAttributes.DB_STATEMENT except ImportError: HAS_OPENTELEMETRY_INSTALLED = False +EXTENDED_TRACING_ENABLED = os.environ.get('SPANNER_ENABLE_EXTENDED_TRACING', '') == 'true' + + +def annotate_with_sql_statement(span, sql): + """ + annotate_sql_statement will set the attribute DB_STATEMENT + to the sql statement, only if SPANNER_ENABLE_EXTENDED_TRACING=true + is set in the environment. + """ + if EXTENDED_TRACING_ENABLED: + span.set_attribute(DB_STATEMENT, sql) + + @contextmanager def trace_call(name, session, extra_attributes=None): if not HAS_OPENTELEMETRY_INSTALLED or not session: @@ -39,15 +59,18 @@ def trace_call(name, session, extra_attributes=None): # Set base attributes that we know for every trace created attributes = { - "db.type": "spanner", - "db.url": SpannerClient.DEFAULT_ENDPOINT, - "db.instance": session._database.name, - "net.host.name": SpannerClient.DEFAULT_ENDPOINT, + DB_SYSTEM: "google.cloud.spanner", + DB_CONNECTION_STRING: SpannerClient.DEFAULT_ENDPOINT, + DB_NAME: session._database.name, + NET_HOST_NAME: SpannerClient.DEFAULT_ENDPOINT, } if extra_attributes: attributes.update(extra_attributes) + if not EXTENDED_TRACING_ENABLED: + attributes.pop(DB_STATEMENT, None) + with tracer.start_as_current_span( name, kind=trace.SpanKind.CLIENT, attributes=attributes ) as span: diff --git a/google/cloud/spanner_v1/snapshot.py b/google/cloud/spanner_v1/snapshot.py index 3bc1a746bd..b5ed26825e 100644 --- a/google/cloud/spanner_v1/snapshot.py +++ b/google/cloud/spanner_v1/snapshot.py @@ -38,7 +38,10 @@ _check_rst_stream_error, _SessionWrapper, ) -from google.cloud.spanner_v1._opentelemetry_tracing import trace_call +from google.cloud.spanner_v1._opentelemetry_tracing import ( + trace_call, + DB_STATEMENT, +) from google.cloud.spanner_v1.streamed import StreamedResultSet from google.cloud.spanner_v1 import RequestOptions @@ -488,7 +491,7 @@ def execute_sql( timeout=timeout, ) - trace_attributes = {"db.statement": sql} + trace_attributes = {DB_STATEMENT: sql} if self._transaction_id is None: # lock is added to handle the inline begin for first rpc @@ -696,7 +699,7 @@ def partition_query( partition_options=partition_options, ) - trace_attributes = {"db.statement": sql} + trace_attributes = {DB_STATEMENT: sql} with trace_call( "CloudSpanner.PartitionReadWriteTransaction", self._session, diff --git a/google/cloud/spanner_v1/transaction.py b/google/cloud/spanner_v1/transaction.py index c872cc380d..78c8709ef1 100644 --- a/google/cloud/spanner_v1/transaction.py +++ b/google/cloud/spanner_v1/transaction.py @@ -11,7 +11,6 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - """Spanner read-write transaction support.""" import functools import threading @@ -32,7 +31,10 @@ from google.cloud.spanner_v1 import TransactionOptions from google.cloud.spanner_v1.snapshot import _SnapshotBase from google.cloud.spanner_v1.batch import _BatchBase -from google.cloud.spanner_v1._opentelemetry_tracing import trace_call +from google.cloud.spanner_v1._opentelemetry_tracing import ( + DB_STATEMENT, + trace_call, +) from google.cloud.spanner_v1 import RequestOptions from google.api_core import gapic_v1 from google.api_core.exceptions import InternalServerError @@ -88,18 +90,20 @@ def _make_txn_selector(self): self._check_state() if self._transaction_id is None: - return TransactionSelector( - begin=TransactionOptions( - read_write=TransactionOptions.ReadWrite(), - exclude_txn_from_change_streams=self.exclude_txn_from_change_streams, - ) - ) + return TransactionSelector(begin=TransactionOptions( + read_write=TransactionOptions.ReadWrite(), + exclude_txn_from_change_streams=self. + exclude_txn_from_change_streams, + )) else: return TransactionSelector(id=self._transaction_id) - def _execute_request( - self, method, request, trace_name=None, session=None, attributes=None - ): + def _execute_request(self, + method, + request, + trace_name=None, + session=None, + attributes=None): """Helper method to execute request after fetching transaction selector. :type method: callable @@ -114,7 +118,9 @@ def _execute_request( method = functools.partial(method, request=request) response = _retry( method, - allowed_exceptions={InternalServerError: _check_rst_stream_error}, + allowed_exceptions={ + InternalServerError: _check_rst_stream_error + }, ) return response @@ -141,11 +147,12 @@ def begin(self): metadata = _metadata_with_prefix(database.name) if database._route_to_leader_enabled: metadata.append( - _metadata_with_leader_aware_routing(database._route_to_leader_enabled) - ) + _metadata_with_leader_aware_routing( + database._route_to_leader_enabled)) txn_options = TransactionOptions( read_write=TransactionOptions.ReadWrite(), - exclude_txn_from_change_streams=self.exclude_txn_from_change_streams, + exclude_txn_from_change_streams=self. + exclude_txn_from_change_streams, ) with trace_call("CloudSpanner.BeginTransaction", self._session): method = functools.partial( @@ -156,7 +163,9 @@ def begin(self): ) response = _retry( method, - allowed_exceptions={InternalServerError: _check_rst_stream_error}, + allowed_exceptions={ + InternalServerError: _check_rst_stream_error + }, ) self._transaction_id = response.id return self._transaction_id @@ -172,9 +181,7 @@ def rollback(self): if database._route_to_leader_enabled: metadata.append( _metadata_with_leader_aware_routing( - database._route_to_leader_enabled - ) - ) + database._route_to_leader_enabled)) with trace_call("CloudSpanner.Rollback", self._session): method = functools.partial( api.rollback, @@ -184,14 +191,17 @@ def rollback(self): ) _retry( method, - allowed_exceptions={InternalServerError: _check_rst_stream_error}, + allowed_exceptions={ + InternalServerError: _check_rst_stream_error + }, ) self.rolled_back = True del self._session._transaction - def commit( - self, return_commit_stats=False, request_options=None, max_commit_delay=None - ): + def commit(self, + return_commit_stats=False, + request_options=None, + max_commit_delay=None): """Commit mutations to the database. :type return_commit_stats: bool @@ -226,8 +236,8 @@ def commit( metadata = _metadata_with_prefix(database.name) if database._route_to_leader_enabled: metadata.append( - _metadata_with_leader_aware_routing(database._route_to_leader_enabled) - ) + _metadata_with_leader_aware_routing( + database._route_to_leader_enabled)) trace_attributes = {"num_mutations": len(self._mutations)} if request_options is None: @@ -248,7 +258,8 @@ def commit( max_commit_delay=max_commit_delay, request_options=request_options, ) - with trace_call("CloudSpanner.Commit", self._session, trace_attributes): + with trace_call("CloudSpanner.Commit", self._session, + trace_attributes): method = functools.partial( api.commit, request=request, @@ -256,7 +267,9 @@ def commit( ) response = _retry( method, - allowed_exceptions={InternalServerError: _check_rst_stream_error}, + allowed_exceptions={ + InternalServerError: _check_rst_stream_error + }, ) self.committed = response.commit_timestamp if return_commit_stats: @@ -285,9 +298,10 @@ def _make_params_pb(params, param_types): If ``params`` is None but ``param_types`` is not None. """ if params is not None: - return Struct( - fields={key: _make_value_pb(value) for key, value in params.items()} - ) + return Struct(fields={ + key: _make_value_pb(value) + for key, value in params.items() + }) return {} @@ -349,8 +363,8 @@ def execute_update( metadata = _metadata_with_prefix(database.name) if database._route_to_leader_enabled: metadata.append( - _metadata_with_leader_aware_routing(database._route_to_leader_enabled) - ) + _metadata_with_leader_aware_routing( + database._route_to_leader_enabled)) api = database.spanner_api seqno, self._execute_sql_count = ( @@ -361,7 +375,8 @@ def execute_update( # Query-level options have higher precedence than client-level and # environment-level options default_query_options = database._instance._client._query_options - query_options = _merge_query_options(default_query_options, query_options) + query_options = _merge_query_options(default_query_options, + query_options) if request_options is None: request_options = RequestOptions() @@ -369,7 +384,7 @@ def execute_update( request_options = RequestOptions(request_options) request_options.transaction_tag = self.transaction_tag - trace_attributes = {"db.statement": dml} + trace_attributes = {DB_STATEMENT: dml} request = ExecuteSqlRequest( session=self._session.name, @@ -401,12 +416,9 @@ def execute_update( trace_attributes, ) # Setting the transaction id because the transaction begin was inlined for first rpc. - if ( - self._transaction_id is None - and response is not None - and response.metadata is not None - and response.metadata.transaction is not None - ): + if (self._transaction_id is None and response is not None + and response.metadata is not None + and response.metadata.transaction is not None): self._transaction_id = response.metadata.transaction.id else: response = self._execute_request( @@ -469,17 +481,16 @@ def batch_update( dml, params, param_types = statement params_pb = self._make_params_pb(params, param_types) parsed.append( - ExecuteBatchDmlRequest.Statement( - sql=dml, params=params_pb, param_types=param_types - ) - ) + ExecuteBatchDmlRequest.Statement(sql=dml, + params=params_pb, + param_types=param_types)) database = self._session._database metadata = _metadata_with_prefix(database.name) if database._route_to_leader_enabled: metadata.append( - _metadata_with_leader_aware_routing(database._route_to_leader_enabled) - ) + _metadata_with_leader_aware_routing( + database._route_to_leader_enabled)) api = database.spanner_api seqno, self._execute_sql_count = ( @@ -495,7 +506,7 @@ def batch_update( trace_attributes = { # Get just the queries from the DML statement batch - "db.statement": ";".join([statement.sql for statement in parsed]) + DB_STATEMENT: ";".join([statement.sql for statement in parsed]) } request = ExecuteBatchDmlRequest( session=self._session.name, @@ -524,11 +535,9 @@ def batch_update( ) # Setting the transaction id because the transaction begin was inlined for first rpc. for result_set in response.result_sets: - if ( - self._transaction_id is None - and result_set.metadata is not None - and result_set.metadata.transaction is not None - ): + if (self._transaction_id is None + and result_set.metadata is not None + and result_set.metadata.transaction is not None): self._transaction_id = result_set.metadata.transaction.id break else: @@ -541,7 +550,8 @@ def batch_update( ) row_counts = [ - result_set.stats.row_count_exact for result_set in response.result_sets + result_set.stats.row_count_exact + for result_set in response.result_sets ] return response.status, row_counts diff --git a/setup.py b/setup.py index 98b1a61748..48ee347ea7 100644 --- a/setup.py +++ b/setup.py @@ -47,9 +47,10 @@ ] extras = { "tracing": [ - "opentelemetry-api >= 1.1.0", - "opentelemetry-sdk >= 1.1.0", - "opentelemetry-instrumentation >= 0.20b0, < 0.23dev", + "opentelemetry-api >= 1.25.0", + "opentelemetry-sdk >= 1.25.0", + "opentelemetry-instrumentation >= 0.46b0", + "opentelemetry-semantic-conventions >= 0.46b0", ], "libcst": "libcst >= 0.2.5", } diff --git a/testing/constraints-3.7.txt b/testing/constraints-3.7.txt index 20170203f5..c59c2f9ef1 100644 --- a/testing/constraints-3.7.txt +++ b/testing/constraints-3.7.txt @@ -10,9 +10,10 @@ grpc-google-iam-v1==0.12.4 libcst==0.2.5 proto-plus==1.22.0 sqlparse==0.4.4 -opentelemetry-api==1.1.0 -opentelemetry-sdk==1.1.0 -opentelemetry-instrumentation==0.20b0 +opentelemetry-api==1.25.0 +opentelemetry-sdk==1.25.0 +opentelemetry-instrumentation==0.46b0 +opentelemetry-semantic-conventions==0.46b0 protobuf==3.20.2 deprecated==1.2.14 grpc-interceptor==0.15.4 diff --git a/tests/_helpers.py b/tests/_helpers.py index 42178fd439..915dcf6c44 100644 --- a/tests/_helpers.py +++ b/tests/_helpers.py @@ -10,13 +10,25 @@ ) from opentelemetry.trace.status import StatusCode + from opentelemetry.semconv.trace import SpanAttributes + trace.set_tracer_provider(TracerProvider()) HAS_OPENTELEMETRY_INSTALLED = True + + DB_SYSTEM = SpanAttributes.DB_SYSTEM + DB_NAME = SpanAttributes.DB_NAME + DB_CONNECTION_STRING = SpanAttributes.DB_CONNECTION_STRING + NET_HOST_NAME = SpanAttributes.NET_HOST_NAME + except ImportError: HAS_OPENTELEMETRY_INSTALLED = False StatusCode = mock.Mock() + DB_SYSTEM = "db.system" + DB_NAME = "db.name" + DB_CONNECTION_STRING = "db.connection_string" + NET_HOST_NAME = "net.host.name" _TEST_OT_EXPORTER = None _TEST_OT_PROVIDER_INITIALIZED = False diff --git a/tests/system/test_session_api.py b/tests/system/test_session_api.py index 00fdf828da..8e7760ebc6 100644 --- a/tests/system/test_session_api.py +++ b/tests/system/test_session_api.py @@ -341,10 +341,10 @@ def assert_span_attributes( def _make_attributes(db_instance, **kwargs): attributes = { - "db.type": "spanner", - "db.url": "spanner.googleapis.com", - "net.host.name": "spanner.googleapis.com", - "db.instance": db_instance, + ot_helpers.DB_SYSTEM: "google.cloud.spanner", + ot_helpers.DB_CONNECTION_STRING: "spanner.googleapis.com", + ot_helpers.DB_NAME: db_instance, + ot_helpers.NET_HOST_NAME: "spanner.googleapis.com", } attributes.update(kwargs) diff --git a/tests/unit/test__opentelemetry_tracing.py b/tests/unit/test__opentelemetry_tracing.py index 25870227bf..a9a4ac0caf 100644 --- a/tests/unit/test__opentelemetry_tracing.py +++ b/tests/unit/test__opentelemetry_tracing.py @@ -12,8 +12,15 @@ from google.api_core.exceptions import GoogleAPICallError from google.cloud.spanner_v1 import _opentelemetry_tracing -from tests._helpers import OpenTelemetryBase, HAS_OPENTELEMETRY_INSTALLED - +from tests._helpers import ( + OpenTelemetryBase, + StatusCode, + DB_SYSTEM, + DB_NAME, + DB_CONNECTION_STRING, + HAS_OPENTELEMETRY_INSTALLED, + NET_HOST_NAME, +) def _make_rpc_error(error_cls, trailing_metadata=None): import grpc @@ -51,14 +58,14 @@ class TestTracing(OpenTelemetryBase): def test_trace_call(self): extra_attributes = { "attribute1": "value1", - # Since our database is mocked, we have to override the db.instance parameter so it is a string - "db.instance": "database_name", + # Since our database is mocked, we have to override the DB_NAME parameter so it is a string + DB_NAME: "database_name", } expected_attributes = { - "db.type": "spanner", - "db.url": "spanner.googleapis.com", - "net.host.name": "spanner.googleapis.com", + DB_SYSTEM: "google.cloud.spanner", + DB_CONNECTION_STRING: "spanner.googleapis.com", + NET_HOST_NAME: "spanner.googleapis.com", } expected_attributes.update(extra_attributes) @@ -78,13 +85,14 @@ def test_trace_call(self): self.assertEqual(span.status.status_code, StatusCode.OK) def test_trace_error(self): - extra_attributes = {"db.instance": "database_name"} + extra_attributes = {DB_NAME: "database_name"} expected_attributes = { - "db.type": "spanner", - "db.url": "spanner.googleapis.com", - "net.host.name": "spanner.googleapis.com", + DB_SYSTEM: "google.cloud.spanner", + DB_CONNECTION_STRING: "spanner.googleapis.com", + NET_HOST_NAME: "spanner.googleapis.com", } + expected_attributes.update(extra_attributes) with self.assertRaises(GoogleAPICallError): @@ -104,13 +112,14 @@ def test_trace_error(self): self.assertEqual(span.status.status_code, StatusCode.ERROR) def test_trace_grpc_error(self): - extra_attributes = {"db.instance": "database_name"} + extra_attributes = {DB_NAME: "database_name"} expected_attributes = { - "db.type": "spanner", - "db.url": "spanner.googleapis.com:443", - "net.host.name": "spanner.googleapis.com:443", + DB_SYSTEM: "google.cloud.spanner", + DB_CONNECTION_STRING: "spanner.googleapis.com", + NET_HOST_NAME: "spanner.googleapis.com", } + expected_attributes.update(extra_attributes) with self.assertRaises(GoogleAPICallError): @@ -127,13 +136,14 @@ def test_trace_grpc_error(self): self.assertEqual(span.status.status_code, StatusCode.ERROR) def test_trace_codeless_error(self): - extra_attributes = {"db.instance": "database_name"} + extra_attributes = {DB_NAME: "database_name"} expected_attributes = { - "db.type": "spanner", - "db.url": "spanner.googleapis.com:443", - "net.host.name": "spanner.googleapis.com:443", + DB_SYSTEM: "google.cloud.spanner", + DB_CONNECTION_STRING: "spanner.googleapis.com", + NET_HOST_NAME: "spanner.googleapis.com", } + expected_attributes.update(extra_attributes) with self.assertRaises(GoogleAPICallError): diff --git a/tests/unit/test_batch.py b/tests/unit/test_batch.py index ee96decf5e..85f0bb2aed 100644 --- a/tests/unit/test_batch.py +++ b/tests/unit/test_batch.py @@ -14,7 +14,14 @@ import unittest -from tests._helpers import OpenTelemetryBase, StatusCode +from tests._helpers import ( + OpenTelemetryBase, + StatusCode, + DB_SYSTEM, + DB_NAME, + DB_CONNECTION_STRING, + NET_HOST_NAME, +) from google.cloud.spanner_v1 import RequestOptions TABLE_NAME = "citizens" @@ -24,10 +31,10 @@ ["bharney@example.com", "Bharney", "Rhubble", 31], ] BASE_ATTRIBUTES = { - "db.type": "spanner", - "db.url": "spanner.googleapis.com", - "db.instance": "testing", - "net.host.name": "spanner.googleapis.com", + DB_SYSTEM: "google.cloud.spanner", + DB_CONNECTION_STRING: "spanner.googleapis.com", + DB_NAME: "testing", + NET_HOST_NAME: "spanner.googleapis.com", } diff --git a/tests/unit/test_session.py b/tests/unit/test_session.py index d4052f0ae3..ee1f727868 100644 --- a/tests/unit/test_session.py +++ b/tests/unit/test_session.py @@ -20,6 +20,10 @@ OpenTelemetryBase, StatusCode, HAS_OPENTELEMETRY_INSTALLED, + DB_SYSTEM, + DB_NAME, + DB_CONNECTION_STRING, + NET_HOST_NAME, ) @@ -46,12 +50,11 @@ class TestSession(OpenTelemetryBase): SESSION_NAME = DATABASE_NAME + "/sessions/" + SESSION_ID DATABASE_ROLE = "dummy-role" BASE_ATTRIBUTES = { - "db.type": "spanner", - "db.url": "spanner.googleapis.com", - "db.instance": DATABASE_NAME, - "net.host.name": "spanner.googleapis.com", + DB_SYSTEM: "google.cloud.spanner", + DB_CONNECTION_STRING: "spanner.googleapis.com", + DB_NAME: DATABASE_NAME, + NET_HOST_NAME: "spanner.googleapis.com", } - def _getTargetClass(self): from google.cloud.spanner_v1.session import Session diff --git a/tests/unit/test_snapshot.py b/tests/unit/test_snapshot.py index bf5563dcfd..4f1a1e50ba 100644 --- a/tests/unit/test_snapshot.py +++ b/tests/unit/test_snapshot.py @@ -21,6 +21,12 @@ OpenTelemetryBase, StatusCode, HAS_OPENTELEMETRY_INSTALLED, + DB_STATEMENT, + DB_SYSTEM, + DB_NAME, + DB_CONNECTION_STRING, + EXTENDED_TRACING_EANBLED, + NET_HOST_NAME, ) from google.cloud.spanner_v1.param_types import INT64 from google.api_core.retry import Retry @@ -41,10 +47,10 @@ SECONDS = 3 MICROS = 123456 BASE_ATTRIBUTES = { - "db.type": "spanner", - "db.url": "spanner.googleapis.com", - "db.instance": "testing", - "net.host.name": "spanner.googleapis.com", + DB_SYSTEM: "google.cloud.spanner", + DB_CONNECTION_STRING: "spanner.googleapis.com", + DB_NAME: "testing", + NET_HOST_NAME: "spanner.googleapis.com", } DIRECTED_READ_OPTIONS = { "include_replicas": { @@ -531,10 +537,10 @@ def test_iteration_w_multiple_span_creation(self): self.assertEqual( dict(span.attributes), { - "db.type": "spanner", - "db.url": "spanner.googleapis.com", - "db.instance": "testing", - "net.host.name": "spanner.googleapis.com", + DB_SYSTEM: "google.cloud.spanner", + DB_CONNECTION_STRING: "spanner.googleapis.com", + DB_NAME: "testing", + NET_HOST_NAME: "spanner.googleapis.com", }, ) @@ -862,11 +868,12 @@ def test_execute_sql_other_error(self): self.assertEqual(derived._execute_sql_count, 1) - self.assertSpanAttributes( - "CloudSpanner.ReadWriteTransaction", - status=StatusCode.ERROR, - attributes=dict(BASE_ATTRIBUTES, **{"db.statement": SQL_QUERY}), - ) + if EXTENDED_TRACING_EANBLED: + self.assertSpanAttributes( + "CloudSpanner.ReadWriteTransaction", + status=StatusCode.ERROR, + attributes=dict(BASE_ATTRIBUTES, **{DB_STATEMENT: SQL_QUERY}), + ) def _execute_sql_helper( self, @@ -1021,7 +1028,7 @@ def _execute_sql_helper( self.assertSpanAttributes( "CloudSpanner.ReadWriteTransaction", status=StatusCode.OK, - attributes=dict(BASE_ATTRIBUTES, **{"db.statement": SQL_QUERY_WITH_PARAM}), + attributes=dict(BASE_ATTRIBUTES, **{DB_STATEMENT: SQL_QUERY_WITH_PARAM}), ) def test_execute_sql_wo_multi_use(self): @@ -1366,7 +1373,7 @@ def _partition_query_helper( self.assertSpanAttributes( "CloudSpanner.PartitionReadWriteTransaction", status=StatusCode.OK, - attributes=dict(BASE_ATTRIBUTES, **{"db.statement": SQL_QUERY_WITH_PARAM}), + attributes=dict(BASE_ATTRIBUTES, **{DB_STATEMENT: SQL_QUERY_WITH_PARAM}), ) def test_partition_query_other_error(self): @@ -1384,7 +1391,7 @@ def test_partition_query_other_error(self): self.assertSpanAttributes( "CloudSpanner.PartitionReadWriteTransaction", status=StatusCode.ERROR, - attributes=dict(BASE_ATTRIBUTES, **{"db.statement": SQL_QUERY}), + attributes=dict(BASE_ATTRIBUTES, **{DB_STATEMENT: SQL_QUERY}), ) def test_partition_query_single_use_raises(self): diff --git a/tests/unit/test_spanner.py b/tests/unit/test_spanner.py index ab5479eb3c..a8d0969802 100644 --- a/tests/unit/test_spanner.py +++ b/tests/unit/test_spanner.py @@ -45,7 +45,13 @@ from google.api_core import gapic_v1 -from tests._helpers import OpenTelemetryBase +from tests._helpers import ( + OpenTelemetryBase, + DB_SYSTEM, + DB_NAME, + DB_CONNECTION_STRING, + NET_HOST_NAME, +) TABLE_NAME = "citizens" COLUMNS = ["email", "first_name", "last_name", "age"] @@ -110,10 +116,10 @@ class TestTransaction(OpenTelemetryBase): TRANSACTION_TAG = "transaction-tag" BASE_ATTRIBUTES = { - "db.type": "spanner", - "db.url": "spanner.googleapis.com", - "db.instance": "testing", - "net.host.name": "spanner.googleapis.com", + DB_SYSTEM: "google.cloud.spanner", + DB_CONNECTION_STRING: "spanner.googleapis.com", + DB_NAME: "testing", + NET_HOST_NAME: "spanner.googleapis.com", } def _getTargetClass(self): diff --git a/tests/unit/test_transaction.py b/tests/unit/test_transaction.py index b40ae8843f..589822a9f6 100644 --- a/tests/unit/test_transaction.py +++ b/tests/unit/test_transaction.py @@ -21,7 +21,14 @@ from google.api_core.retry import Retry from google.api_core import gapic_v1 -from tests._helpers import OpenTelemetryBase, StatusCode +from tests._helpers import ( + OpenTelemetryBase, + StatusCode, + DB_SYSTEM, + DB_NAME, + DB_CONNECTION_STRING, + NET_HOST_NAME, +) TABLE_NAME = "citizens" COLUMNS = ["email", "first_name", "last_name", "age"] @@ -53,10 +60,10 @@ class TestTransaction(OpenTelemetryBase): TRANSACTION_TAG = "transaction-tag" BASE_ATTRIBUTES = { - "db.type": "spanner", - "db.url": "spanner.googleapis.com", - "db.instance": "testing", - "net.host.name": "spanner.googleapis.com", + DB_SYSTEM: "google.cloud.spanner", + DB_CONNECTION_STRING: "spanner.googleapis.com", + DB_NAME: "testing", + NET_HOST_NAME: "spanner.googleapis.com", } def _getTargetClass(self):