Skip to content

Commit

Permalink
Hyphen in database name support (SDBM-1013) (#17775)
Browse files Browse the repository at this point in the history
* Hyphen in database name support

* add relnote

* linter errors

* correct ha
  • Loading branch information
nenadnoveljic authored Jun 11, 2024
1 parent 260a86d commit 2552879
Showing 16 changed files with 114 additions and 110 deletions.
1 change: 1 addition & 0 deletions sqlserver/changelog.d/17775.fixed
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Hyphen in database name support
2 changes: 1 addition & 1 deletion sqlserver/datadog_checks/sqlserver/const.py
Original file line number Diff line number Diff line change
@@ -56,7 +56,7 @@
]

DATABASE_SERVICE_CHECK_QUERY = """SELECT 1;"""
SWITCH_DB_STATEMENT = """USE {};"""
SWITCH_DB_STATEMENT = """USE [{}];"""

VALID_METRIC_TYPES = ('gauge', 'rate', 'histogram')

2 changes: 1 addition & 1 deletion sqlserver/tests/common.py
Original file line number Diff line number Diff line change
@@ -283,7 +283,7 @@ def assert_metrics(
tags = check_tags + ['database:{}'.format(dbname)]
for mname in DB_PERF_COUNT_METRICS_NAMES_SINGLE:
aggregator.assert_metric(mname, hostname=hostname, tags=tags)
if dbname == 'datadog_test' and is_always_on():
if dbname == 'datadog_test-1' and is_always_on():
for mname in DB_PERF_COUNT_METRICS_NAMES_AO:
aggregator.assert_metric(mname, hostname=hostname, tags=tags)
else:
16 changes: 8 additions & 8 deletions sqlserver/tests/compose-ha/sql/aoag_primary.sql
Original file line number Diff line number Diff line change
@@ -20,7 +20,7 @@ CREATE USER fred FOR LOGIN fred;
GRANT CONNECT ANY DATABASE to fred;
GO

CREATE DATABASE datadog_test;
CREATE DATABASE [datadog_test-1];
GO

-- create an offline database to have an unavailable database to test with
@@ -38,10 +38,10 @@ GO

-- Create test database for integration tests
-- only bob and fred have read/write access to this database
USE datadog_test;
CREATE TABLE datadog_test.dbo.ϑings (id int, name varchar(255));
INSERT INTO datadog_test.dbo.ϑings VALUES (1, 'foo'), (2, 'bar');
CREATE CLUSTERED INDEX thingsindex ON datadog_test.dbo.ϑings (name);
USE [datadog_test-1];
CREATE TABLE [datadog_test-1].dbo.ϑings (id int, name varchar(255));
INSERT INTO [datadog_test-1].dbo.ϑings VALUES (1, 'foo'), (2, 'bar');
CREATE CLUSTERED INDEX thingsindex ON [datadog_test-1].dbo.ϑings (name);
CREATE USER bob FOR LOGIN bob;
CREATE USER fred FOR LOGIN fred;
GO
@@ -184,10 +184,10 @@ USE [master]
GO

--change recovery model and take full backup for db to meet requirements of AOAG
ALTER DATABASE datadog_test SET RECOVERY FULL ;
ALTER DATABASE [datadog_test-1] SET RECOVERY FULL ;
GO

BACKUP DATABASE datadog_test TO DISK = N'/var/opt/mssql/backup/datadog_test.bak' WITH NOFORMAT, NOINIT, NAME = N'datadog_test-Full Database Backup', SKIP, NOREWIND, NOUNLOAD, STATS = 10
BACKUP DATABASE [datadog_test-1] TO DISK = N'/var/opt/mssql/backup/[datadog_test-1].bak' WITH NOFORMAT, NOINIT, NAME = N'[datadog_test-1]-Full Database Backup', SKIP, NOREWIND, NOUNLOAD, STATS = 10
GO


@@ -271,5 +271,5 @@ USE [master]
GO

WAITFOR DELAY '00:00:10'
ALTER AVAILABILITY GROUP [AG1] ADD DATABASE [datadog_test]
ALTER AVAILABILITY GROUP [AG1] ADD DATABASE [datadog_test-1]
GO
14 changes: 7 additions & 7 deletions sqlserver/tests/compose-high-cardinality-windows/setup.sql
Original file line number Diff line number Diff line change
@@ -33,16 +33,16 @@ GO
-- Create test database for integration tests
-- only bob and fred have read/write access to this database
-- the datadog user has only connect access but can't read any objects
CREATE DATABASE datadog_test;
CREATE DATABASE [datadog_test-1];
GO
USE datadog_test;
USE [datadog_test-1];
GO

-- This table is pronounced "things" except we've replaced "th" with the greek lower case "theta" to ensure we
-- correctly support unicode throughout the integration.
CREATE TABLE datadog_test.dbo.ϑings (id int, name varchar(255));
INSERT INTO datadog_test.dbo.ϑings VALUES (1, 'foo'), (2, 'bar');
CREATE CLUSTERED INDEX thingsindex ON datadog_test.dbo.ϑings (name);
CREATE TABLE [datadog_test-1].dbo.ϑings (id int, name varchar(255));
INSERT INTO [datadog_test-1].dbo.ϑings VALUES (1, 'foo'), (2, 'bar');
CREATE CLUSTERED INDEX thingsindex ON [datadog_test-1].dbo.ϑings (name);
CREATE USER bob FOR LOGIN bob;
CREATE USER fred FOR LOGIN fred;
-- we don't need to recreate the datadog user in this new DB because it already exists in the model
@@ -196,7 +196,7 @@ GO
------------------------------ HIGH CARDINALITY ENV SETUP ------------------------------

-- Table variables
DECLARE @table_prefix VARCHAR(100) = 'CREATE TABLE datadog_test.dbo.'
DECLARE @table_prefix VARCHAR(100) = 'CREATE TABLE [datadog_test-1].dbo.'
DECLARE @table_columns VARCHAR(500) = ' (id INT NOT NULL IDENTITY, col1_txt TEXT, col2_txt TEXT, col3_txt TEXT, col4_txt TEXT, col5_txt TEXT, col6_txt TEXT, col7_txt TEXT, col8_txt TEXT, col9_txt TEXT, col10_txt TEXT, col11_float FLOAT, col12_float FLOAT, col13_float FLOAT, col14_int INT, col15_int INT, col16_int INT, col17_date DATE, PRIMARY KEY(id));';

-- Create a main table which contains high cardinality data for testing.
@@ -227,7 +227,7 @@ BEGIN
DECLARE @col16_int INT = FLOOR(RAND() * 2500);
DECLARE @col17_date DATE = CAST(CAST(RAND()*100000 AS INT) AS DATETIME);

INSERT INTO datadog_test.dbo.high_cardinality (col1_txt, col2_txt, col3_txt, col4_txt, col5_txt, col6_txt, col7_txt, col8_txt, col9_txt, col10_txt, col11_float, col12_float, col13_float, col14_int, col15_int, col16_int, col17_date) VALUES (@col1_txt, @col2_txt, @col3_txt, @col4_txt, @col5_txt, @col6_txt, @col7_txt, @col8_txt, @col9_txt, @col10_txt, @col11_float, @col12_float, @col13_float, @col14_int, @col15_int, @col16_int, @col17_date);
INSERT INTO [datadog_test-1].dbo.high_cardinality (col1_txt, col2_txt, col3_txt, col4_txt, col5_txt, col6_txt, col7_txt, col8_txt, col9_txt, col10_txt, col11_float, col12_float, col13_float, col14_int, col15_int, col16_int, col17_date) VALUES (@col1_txt, @col2_txt, @col3_txt, @col4_txt, @col5_txt, @col6_txt, @col7_txt, @col8_txt, @col9_txt, @col10_txt, @col11_float, @col12_float, @col13_float, @col14_int, @col15_int, @col16_int, @col17_date);

SET @row_count = @row_count + 1
END;
14 changes: 7 additions & 7 deletions sqlserver/tests/compose-high-cardinality/setup.sql
Original file line number Diff line number Diff line change
@@ -125,9 +125,9 @@ GO

-- Create test database for integration tests.
-- Only bob and fred have read/write access to this database.
CREATE DATABASE datadog_test;
CREATE DATABASE [datadog_test-1];
GO
USE datadog_test;
USE [datadog_test-1];
GO

CREATE USER bob FOR LOGIN bob;
@@ -174,12 +174,12 @@ GO

-- This table is pronounced "things" except we've replaced "th" with the greek lower case "theta" to ensure we
-- correctly support unicode throughout the integration.
CREATE TABLE datadog_test.dbo.ϑings (id int, name varchar(255));
INSERT INTO datadog_test.dbo.ϑings VALUES (1, 'foo'), (2, 'bar');
CREATE CLUSTERED INDEX thingsindex ON datadog_test.dbo.ϑings (name);
CREATE TABLE [datadog_test-1].dbo.ϑings (id int, name varchar(255));
INSERT INTO [datadog_test-1].dbo.ϑings VALUES (1, 'foo'), (2, 'bar');
CREATE CLUSTERED INDEX thingsindex ON [datadog_test-1].dbo.ϑings (name);

-- Table variables
DECLARE @table_prefix VARCHAR(100) = 'CREATE TABLE datadog_test.dbo.'
DECLARE @table_prefix VARCHAR(100) = 'CREATE TABLE [datadog_test-1].dbo.'
DECLARE @table_columns VARCHAR(500) = ' (id INT NOT NULL IDENTITY, col1_txt TEXT, col2_txt TEXT, col3_txt TEXT, col4_txt TEXT, col5_txt TEXT, col6_txt TEXT, col7_txt TEXT, col8_txt TEXT, col9_txt TEXT, col10_txt TEXT, col11_float FLOAT, col12_float FLOAT, col13_float FLOAT, col14_int INT, col15_int INT, col16_int INT, col17_date DATE, PRIMARY KEY(id));';

-- Create a main table which contains high cardinality data for testing.
@@ -210,7 +210,7 @@ BEGIN
DECLARE @col16_int INT = FLOOR(RAND() * 2500);
DECLARE @col17_date DATE = CAST(CAST(RAND()*100000 AS INT) AS DATETIME);

INSERT INTO datadog_test.dbo.high_cardinality (col1_txt, col2_txt, col3_txt, col4_txt, col5_txt, col6_txt, col7_txt, col8_txt, col9_txt, col10_txt, col11_float, col12_float, col13_float, col14_int, col15_int, col16_int, col17_date) VALUES (@col1_txt, @col2_txt, @col3_txt, @col4_txt, @col5_txt, @col6_txt, @col7_txt, @col8_txt, @col9_txt, @col10_txt, @col11_float, @col12_float, @col13_float, @col14_int, @col15_int, @col16_int, @col17_date);
INSERT INTO [datadog_test-1].dbo.high_cardinality (col1_txt, col2_txt, col3_txt, col4_txt, col5_txt, col6_txt, col7_txt, col8_txt, col9_txt, col10_txt, col11_float, col12_float, col13_float, col14_int, col15_int, col16_int, col17_date) VALUES (@col1_txt, @col2_txt, @col3_txt, @col4_txt, @col5_txt, @col6_txt, @col7_txt, @col8_txt, @col9_txt, @col10_txt, @col11_float, @col12_float, @col13_float, @col14_int, @col15_int, @col16_int, @col17_date);

SET @row_count = @row_count + 1
END;
10 changes: 5 additions & 5 deletions sqlserver/tests/compose-windows/setup.sql
Original file line number Diff line number Diff line change
@@ -33,16 +33,16 @@ GO
-- Create test database for integration tests
-- only bob and fred have read/write access to this database
-- the datadog user has only connect access but can't read any objects
CREATE DATABASE datadog_test;
CREATE DATABASE [datadog_test-1];
GO
USE datadog_test;
USE [datadog_test-1];
GO

-- This table is pronounced "things" except we've replaced "th" with the greek lower case "theta" to ensure we
-- correctly support unicode throughout the integration.
CREATE TABLE datadog_test.dbo.ϑings (id int, name varchar(255));
INSERT INTO datadog_test.dbo.ϑings VALUES (1, 'foo'), (2, 'bar');
CREATE CLUSTERED INDEX thingsindex ON datadog_test.dbo.ϑings (name);
CREATE TABLE [datadog_test-1].dbo.ϑings (id int, name varchar(255));
INSERT INTO [datadog_test-1].dbo.ϑings VALUES (1, 'foo'), (2, 'bar');
CREATE CLUSTERED INDEX thingsindex ON [datadog_test-1].dbo.ϑings (name);
CREATE USER bob FOR LOGIN bob;
CREATE USER fred FOR LOGIN fred;
-- we don't need to recreate the datadog user in this new DB because it already exists in the model
10 changes: 5 additions & 5 deletions sqlserver/tests/compose/setup.sql
Original file line number Diff line number Diff line change
@@ -17,16 +17,16 @@ GO

-- Create test database for integration tests
-- only bob and fred have read/write access to this database
CREATE DATABASE datadog_test;
CREATE DATABASE [datadog_test-1];
GO
USE datadog_test;
USE [datadog_test-1];
-- This table is pronounced "things" except we've replaced "th" with the greek lower case "theta" to ensure we
-- correctly support unicode throughout the integration.
CREATE TABLE datadog_test.dbo.ϑings (id int, name varchar(255));
INSERT INTO datadog_test.dbo.ϑings VALUES (1, 'foo'), (2, 'bar');
CREATE TABLE [datadog_test-1].dbo.ϑings (id int, name varchar(255));
INSERT INTO [datadog_test-1].dbo.ϑings VALUES (1, 'foo'), (2, 'bar');
CREATE USER bob FOR LOGIN bob;
CREATE USER fred FOR LOGIN fred;
CREATE CLUSTERED INDEX thingsindex ON datadog_test.dbo.ϑings (name);
CREATE CLUSTERED INDEX thingsindex ON [datadog_test-1].dbo.ϑings (name);
GO

EXEC sp_addrolemember 'db_datareader', 'bob'
3 changes: 2 additions & 1 deletion sqlserver/tests/conftest.py
Original file line number Diff line number Diff line change
@@ -13,6 +13,7 @@
from datadog_checks.dev import WaitFor, docker_run
from datadog_checks.dev.conditions import CheckDockerLogs
from datadog_checks.dev.docker import using_windows_containers
from datadog_checks.sqlserver.const import SWITCH_DB_STATEMENT

from .common import (
DOCKER_SERVER,
@@ -198,7 +199,7 @@ def execute_with_retries(self, query, params=(), database=None, retries=3, sleep
logging.info("executing query with retries. query='%s' params=%s attempt=%s", query, params, attempt)
with self.conn.cursor() as cursor:
if database:
cursor.execute("USE {}".format(database))
cursor.execute(SWITCH_DB_STATEMENT.format(database))
cursor.execute(query, params)
if return_result:
return cursor.fetchall()
20 changes: 10 additions & 10 deletions sqlserver/tests/test_activity.py
Original file line number Diff line number Diff line change
@@ -64,14 +64,14 @@ def dbm_instance(instance_docker):
"database,query,match_pattern,is_proc,expected_comments",
[
[
"datadog_test",
"datadog_test-1",
"/*test=foo*/ SELECT * FROM ϑings",
r"SELECT \* FROM ϑings",
False,
["/*test=foo*/"],
],
[
"datadog_test",
"datadog_test-1",
"EXEC bobProc",
r"SELECT \* FROM ϑings",
True,
@@ -98,7 +98,7 @@ def test_collect_load_activity(

def run_test_query(c, q):
cur = c.cursor()
cur.execute("USE {}".format(database))
cur.execute("USE [{}]".format(database))
# 0xFF can't be decoded to Unicode, which makes it good test data,
# since Unicode is a default format
cur.execute("SET CONTEXT_INFO 0xff")
@@ -166,7 +166,7 @@ def run_test_query(c, q):
assert blocked_row['procedure_signature'], "missing procedure signature"
assert blocked_row['procedure_name'], "missing procedure name"
assert re.match(match_pattern, blocked_row['text'], re.IGNORECASE), "incorrect blocked query"
assert blocked_row['database_name'] == "datadog_test", "incorrect database_name"
assert blocked_row['database_name'] == "datadog_test-1", "incorrect database_name"
assert blocked_row['context_info'] == "ff", "incorrect context_info"
assert blocked_row['id'], "missing session id"
assert blocked_row['now'], "missing current timestamp"
@@ -255,7 +255,7 @@ def test_activity_nested_blocking_transactions(

def run_queries(conn, queries):
cur = conn.cursor()
cur.execute("USE {}".format("datadog_test"))
cur.execute("USE [{}]".format("datadog_test-1"))
cur.execute("BEGIN TRANSACTION")
for q in queries:
try:
@@ -307,7 +307,7 @@ def run_queries(conn, queries):
# associated sys.dm_exec_requests.
assert root_blocker["user_name"] == "fred"
assert root_blocker["session_status"] == "sleeping"
assert root_blocker["database_name"] == "datadog_test"
assert root_blocker["database_name"] == "datadog_test-1"
assert root_blocker["last_request_start_time"]
assert root_blocker["client_port"]
assert root_blocker["client_address"]
@@ -329,7 +329,7 @@ def run_queries(conn, queries):
assert tx3["session_status"] == "running"
# verify other essential fields are present
assert tx2["user_name"] == "bob"
assert tx2["database_name"] == "datadog_test"
assert tx2["database_name"] == "datadog_test-1"
assert tx2["last_request_start_time"]
assert tx2["client_port"]
assert tx2["client_address"]
@@ -341,7 +341,7 @@ def run_queries(conn, queries):
assert tx2["query_plan_hash"]

assert tx3["user_name"] == "fred"
assert tx3["database_name"] == "datadog_test"
assert tx3["database_name"] == "datadog_test-1"
assert tx3["last_request_start_time"]
assert tx3["client_port"]
assert tx3["client_address"]
@@ -392,7 +392,7 @@ def test_activity_metadata(

def _run_test_query(conn, q):
cur = conn.cursor()
cur.execute("USE {}".format("datadog_test"))
cur.execute("USE [{}]".format("datadog_test-1"))
cur.execute(q)

def _obfuscate_sql(sql_query, options=None):
@@ -647,7 +647,7 @@ def _obfuscate_sql(sql_query, options=None):

def run_test_query(c, q):
cur = c.cursor()
cur.execute("USE datadog_test")
cur.execute("USE [datadog_test-1]")
cur.execute(q)

run_test_query(fred_conn, "EXEC procedureWithLargeCommment")
10 changes: 5 additions & 5 deletions sqlserver/tests/test_database_metrics.py
Original file line number Diff line number Diff line change
@@ -24,7 +24,7 @@
SQLSERVER_MAJOR_VERSION,
)

AUTODISCOVERY_DBS = ['master', 'msdb', 'datadog_test']
AUTODISCOVERY_DBS = ['master', 'msdb', 'datadog_test-1']

STATIC_SERVER_INFO = {
STATIC_INFO_MAJOR_VERSION: SQLSERVER_MAJOR_VERSION,
@@ -61,8 +61,8 @@ def test_sqlserver_index_usage_metrics(
('msdb', 'PK__backupse__21F79AAB9439648C', 'backupset', 0, 1, 0, 0),
],
[
('datadog_test', 'idx_something', 'some_table', 10, 60, 12, 18),
('datadog_test', 'idx_something_else', 'some_table', 20, 30, 40, 50),
('datadog_test-1', 'idx_something', 'some_table', 10, 60, 12, 18),
('datadog_test-1', 'idx_something_else', 'some_table', 20, 30, 40, 50),
],
]
mocked_results_tempdb = [
@@ -153,7 +153,7 @@ def test_sqlserver_db_fragmentation_metrics(
('msdb', 'syscachedcredentials', 1, 'PK__syscache__F6D56B562DA81DC6', 0, 0.0, 0, 0.0),
('msdb', 'syscollector_blobs_internal', 1, 'PK_syscollector_blobs_internal_paremeter_name', 0, 0.0, 0, 0.0),
],
[('datadog_test', 'ϑings', 1, 'thingsindex', 1, 1.0, 1, 0.0)],
[('datadog_test-1', 'ϑings', 1, 'thingsindex', 1, 1.0, 1, 0.0)],
]
mocked_results_tempdb = [
[('tempdb', '#TempExample__000000000008', 1, 'PK__#TempExa__3214EC278A26D67E', 1, 1.0, 1, 0.0)],
@@ -250,7 +250,7 @@ def test_sqlserver_database_backup_metrics(
('model', 'model', 2),
('msdb', 'msdb', 0),
('tempdb', 'tempdb', 0),
('datadog_test', 'datadog_test', 10),
('datadog_test-1', 'datadog_test-1', 10),
]

sqlserver_check = SQLServer(CHECK_NAME, init_config, [instance_docker_metrics])
Rate limit · GitHub

Access has been restricted

You have triggered a rate limit.

Please wait a few minutes before you try again;
in some cases this may take up to an hour.

0 comments on commit 2552879

Please sign in to comment.