From 302e263b8bfa268e4272acfdbc7f5c4e26e2ea9a Mon Sep 17 00:00:00 2001 From: Eden Gorevoy Date: Wed, 10 Sep 2025 15:57:54 -0400 Subject: [PATCH 1/5] Enable only_custom_queries for sqlserver --- sqlserver/changelog.d/21304.fixed | 1 + sqlserver/datadog_checks/sqlserver/config.py | 1 + .../datadog_checks/sqlserver/sqlserver.py | 108 ++++++++++-------- sqlserver/tests/test_integration.py | 53 +++++++++ 4 files changed, 113 insertions(+), 50 deletions(-) create mode 100644 sqlserver/changelog.d/21304.fixed diff --git a/sqlserver/changelog.d/21304.fixed b/sqlserver/changelog.d/21304.fixed new file mode 100644 index 0000000000000..6715225d47b1b --- /dev/null +++ b/sqlserver/changelog.d/21304.fixed @@ -0,0 +1 @@ +Support `only_custom_queries` configuration option in sqlserver integration. diff --git a/sqlserver/datadog_checks/sqlserver/config.py b/sqlserver/datadog_checks/sqlserver/config.py index 6bdf3fc38dbc0..ecbb09d6d2891 100644 --- a/sqlserver/datadog_checks/sqlserver/config.py +++ b/sqlserver/datadog_checks/sqlserver/config.py @@ -40,6 +40,7 @@ def __init__(self, init_config, instance, log): self.proc: str = instance.get('stored_procedure') self.custom_metrics: list[dict] = init_config.get('custom_metrics', []) or [] + self.only_custom_queries: bool = is_affirmative(instance.get('only_custom_queries', False)) self.ignore_missing_database = is_affirmative(instance.get("ignore_missing_database", False)) if self.ignore_missing_database: self.log.warning( diff --git a/sqlserver/datadog_checks/sqlserver/sqlserver.py b/sqlserver/datadog_checks/sqlserver/sqlserver.py index 5fe57cdd36011..616436572fa5e 100644 --- a/sqlserver/datadog_checks/sqlserver/sqlserver.py +++ b/sqlserver/datadog_checks/sqlserver/sqlserver.py @@ -831,10 +831,12 @@ def check(self, _): ) self._query_manager.compile_queries() self._send_database_instance_metadata() + if self._config.proc: self.do_stored_procedure_check() else: self.collect_metrics() + if self._config.autodiscovery and self._config.autodiscovery_db_service_check: self._check_database_conns() if self._config.dbm_enabled: @@ -852,6 +854,7 @@ def check(self, _): handler.run_job_loop(self.tag_manager.get_tags()) except Exception as e: self.log.error("Error running XE session handler for %s: %s", handler.session_name, e) + else: self.log.debug("Skipping check") @@ -926,52 +929,56 @@ def log_missing_metric(self, metric_name, major_version, engine_version): else: self.log.warning("%s metrics are not supported on Azure engine version: %s", metric_name, engine_version) - def collect_metrics(self): - """Fetch the metrics from all the associated database tables.""" + # queries for default integration metrics from the database + def load_basic_metrics(self, cursor): + # initiate autodiscovery or if the server was down at check __init__ key could be missing. + if self.autodiscover_databases(cursor) or not self.instance_metrics: + self._make_metric_list_to_collect(self._config.custom_metrics) + + instance_results = {} + engine_edition = self.static_info_cache.get(STATIC_INFO_ENGINE_EDITION, "") + # Execute the `fetch_all` operations first to minimize the database calls + for cls, metric_names in self.instance_per_type_metrics.items(): + if not metric_names: + instance_results[cls] = None, None + else: + try: + db_names = [d.name for d in self.databases] or [ + self.instance.get("database", self.connection.DEFAULT_DATABASE) + ] + metric_cls = getattr(metrics, cls) + with tracked_query(self, operation=metric_cls.OPERATION_NAME): + rows, cols = metric_cls.fetch_all_values( + cursor, + list(metric_names), + self.log, + databases=db_names, + engine_edition=engine_edition, + ) + except Exception as e: + self.log.error("Error running `fetch_all` for metrics %s - skipping. Error: %s", cls, e) + rows, cols = None, None - with self.connection.open_managed_default_connection(): - with self.connection.get_managed_cursor() as cursor: - # initiate autodiscovery or if the server was down at check __init__ key could be missing. - if self.autodiscover_databases(cursor) or not self.instance_metrics: - self._make_metric_list_to_collect(self._config.custom_metrics) + instance_results[cls] = rows, cols - instance_results = {} - engine_edition = self.static_info_cache.get(STATIC_INFO_ENGINE_EDITION, "") - # Execute the `fetch_all` operations first to minimize the database calls - for cls, metric_names in self.instance_per_type_metrics.items(): - if not metric_names: - instance_results[cls] = None, None + for metric in self.instance_metrics: + key = metric.__class__.__name__ + if key not in instance_results: + self.log.warning("No %s metrics found, skipping", str(key)) + else: + rows, cols = instance_results[key] + if rows is not None: + if key == "SqlIncrFractionMetric": + metric.fetch_metric(rows, cols, self.sqlserver_incr_fraction_metric_previous_values) else: - try: - db_names = [d.name for d in self.databases] or [ - self.instance.get("database", self.connection.DEFAULT_DATABASE) - ] - metric_cls = getattr(metrics, cls) - with tracked_query(self, operation=metric_cls.OPERATION_NAME): - rows, cols = metric_cls.fetch_all_values( - cursor, - list(metric_names), - self.log, - databases=db_names, - engine_edition=engine_edition, - ) - except Exception as e: - self.log.error("Error running `fetch_all` for metrics %s - skipping. Error: %s", cls, e) - rows, cols = None, None - - instance_results[cls] = rows, cols + metric.fetch_metric(rows, cols) - for metric in self.instance_metrics: - key = metric.__class__.__name__ - if key not in instance_results: - self.log.warning("No %s metrics found, skipping", str(key)) - else: - rows, cols = instance_results[key] - if rows is not None: - if key == "SqlIncrFractionMetric": - metric.fetch_metric(rows, cols, self.sqlserver_incr_fraction_metric_previous_values) - else: - metric.fetch_metric(rows, cols) + def collect_metrics(self): + """Fetch the metrics from all the associated database tables.""" + with self.connection.open_managed_default_connection(): + if not self._config.only_custom_queries: + with self.connection.get_managed_cursor() as cursor: + self.load_basic_metrics(cursor) # Neither pyodbc nor adodbapi are able to read results of a query if the number of rows affected # statement are returned as part of the result set, so we disable for the entire connection @@ -980,14 +987,15 @@ def collect_metrics(self): with self.connection.get_managed_cursor() as cursor: cursor.execute("SET NOCOUNT ON") try: - # restore the current database after executing dynamic queries - # this is to ensure the current database context is not changed - with self.connection.restore_current_database_context(): - if self.database_metrics: - for database_metric in self.database_metrics: - database_metric.execute() - - # reuse connection for any custom queries + if not self._config.only_custom_queries: + # restore the current database after executing dynamic queries + # this is to ensure the current database context is not changed + with self.connection.restore_current_database_context(): + if self.database_metrics: + for database_metric in self.database_metrics: + database_metric.execute() + + # reuse the connection for custom queries self._query_manager.execute() finally: with self.connection.get_managed_cursor() as cursor: diff --git a/sqlserver/tests/test_integration.py b/sqlserver/tests/test_integration.py index e8125652d1d70..93eea8dc417cf 100644 --- a/sqlserver/tests/test_integration.py +++ b/sqlserver/tests/test_integration.py @@ -506,6 +506,59 @@ def test_custom_queries(aggregator, dd_run_check, instance_docker, custom_query, aggregator.assert_metric(metric_name, **kwargs) +@pytest.mark.integration +@pytest.mark.usefixtures('dd_environment') +@pytest.mark.parametrize( + "custom_query, assert_metrics", + [ + ( + { + 'query': "SELECT letter, num FROM (VALUES (97, 'a'), (98, 'b')) AS t (num,letter)", + 'columns': [{'name': 'customtag', 'type': 'tag'}, {'name': 'num', 'type': 'gauge'}], + 'tags': ['query:custom'], + }, + [ + ("sqlserver.num", {"value": 97, "tags": ["customtag:a", "query:custom"]}), + ("sqlserver.num", {"value": 98, "tags": ["customtag:b", "query:custom"]}), + ], + ), + ( + { + 'query': "EXEC exampleProcWithoutNocount", + 'columns': [{'name': 'value', 'type': 'gauge'}], + 'tags': ['hello:there'], + }, + [ + ("sqlserver.value", {"value": 1, "tags": ["hello:there"]}), + ], + ), + ], +) +def test_custom_queries_only(aggregator, dd_run_check, instance_docker, custom_query, assert_metrics): + instance = copy(instance_docker) + instance['custom_queries'] = [custom_query] + instance['only_custom_queries'] = True + instance['procedure_metrics'] = {'enabled': False} + + check = SQLServer(CHECK_NAME, {}, [instance]) + dd_run_check(check) + + for metric_name, kwargs in assert_metrics: + kwargs = copy(kwargs) + kwargs['tags'] = ( + check._config.tags + + [ + "database_hostname:{}".format("stubbed.hostname"), + "database_instance:{}".format("stubbed.hostname"), + "ddagenthostname:{}".format("stubbed.hostname"), + "dd.internal.resource:database_instance:{}".format("stubbed.hostname"), + "sqlserver_servername:{}".format(check.static_info_cache.get(STATIC_INFO_SERVERNAME)), + ] + + kwargs.get('tags', []) + ) + aggregator.assert_metric(metric_name, **kwargs) + + @pytest.mark.integration @pytest.mark.usefixtures('dd_environment') def test_load_static_information(aggregator, dd_run_check, instance_docker): From 908576679d89ba907a3f1f680dc5759cb8f927f8 Mon Sep 17 00:00:00 2001 From: Eden Gorevoy Date: Wed, 10 Sep 2025 17:18:09 -0400 Subject: [PATCH 2/5] Add test --- sqlserver/tests/test_integration.py | 66 +++++++++++------------------ 1 file changed, 24 insertions(+), 42 deletions(-) diff --git a/sqlserver/tests/test_integration.py b/sqlserver/tests/test_integration.py index 93eea8dc417cf..df77096ce84ec 100644 --- a/sqlserver/tests/test_integration.py +++ b/sqlserver/tests/test_integration.py @@ -508,55 +508,37 @@ def test_custom_queries(aggregator, dd_run_check, instance_docker, custom_query, @pytest.mark.integration @pytest.mark.usefixtures('dd_environment') -@pytest.mark.parametrize( - "custom_query, assert_metrics", - [ - ( - { - 'query': "SELECT letter, num FROM (VALUES (97, 'a'), (98, 'b')) AS t (num,letter)", - 'columns': [{'name': 'customtag', 'type': 'tag'}, {'name': 'num', 'type': 'gauge'}], - 'tags': ['query:custom'], - }, - [ - ("sqlserver.num", {"value": 97, "tags": ["customtag:a", "query:custom"]}), - ("sqlserver.num", {"value": 98, "tags": ["customtag:b", "query:custom"]}), - ], - ), - ( - { - 'query': "EXEC exampleProcWithoutNocount", - 'columns': [{'name': 'value', 'type': 'gauge'}], - 'tags': ['hello:there'], - }, - [ - ("sqlserver.value", {"value": 1, "tags": ["hello:there"]}), - ], - ), - ], -) -def test_custom_queries_only(aggregator, dd_run_check, instance_docker, custom_query, assert_metrics): +def test_only_custom_queries(aggregator, dd_run_check, instance_docker): + """Test that only_custom_queries=True skips regular metrics but executes custom queries.""" instance = copy(instance_docker) - instance['custom_queries'] = [custom_query] instance['only_custom_queries'] = True + instance['custom_queries'] = [ + { + 'query': "SELECT 42 as custom_value", + 'columns': [{'name': 'custom_value', 'type': 'gauge'}], + 'tags': ['test:only_custom'], + } + ] instance['procedure_metrics'] = {'enabled': False} check = SQLServer(CHECK_NAME, {}, [instance]) dd_run_check(check) - for metric_name, kwargs in assert_metrics: - kwargs = copy(kwargs) - kwargs['tags'] = ( - check._config.tags - + [ - "database_hostname:{}".format("stubbed.hostname"), - "database_instance:{}".format("stubbed.hostname"), - "ddagenthostname:{}".format("stubbed.hostname"), - "dd.internal.resource:database_instance:{}".format("stubbed.hostname"), - "sqlserver_servername:{}".format(check.static_info_cache.get(STATIC_INFO_SERVERNAME)), - ] - + kwargs.get('tags', []) - ) - aggregator.assert_metric(metric_name, **kwargs) + # Verify that regular integration metrics are NOT collected + # (These would normally be collected by default) + aggregator.assert_metric('sqlserver.cache.hit_ratio', count=0) + aggregator.assert_metric('sqlserver.broker_activation.tasks_running', count=0) + + # Verify that custom query metrics ARE collected + instance_tags = check._config.tags + [ + "database_hostname:{}".format("stubbed.hostname"), + "database_instance:{}".format("stubbed.hostname"), + "ddagenthostname:{}".format("stubbed.hostname"), + "dd.internal.resource:database_instance:{}".format("stubbed.hostname"), + "sqlserver_servername:{}".format(check.static_info_cache.get(STATIC_INFO_SERVERNAME)), + ] + + aggregator.assert_metric('sqlserver.custom_value', value=42, tags=instance_tags + ['test:only_custom'], count=1) @pytest.mark.integration From b099de0759691927e8e9793526582010ea99f197 Mon Sep 17 00:00:00 2001 From: Eden Gorevoy Date: Thu, 11 Sep 2025 17:43:55 -0400 Subject: [PATCH 3/5] Add validation and validation tests --- sqlserver/datadog_checks/sqlserver/config.py | 20 +++ sqlserver/tests/test_unit.py | 139 +++++++++++++++++++ 2 files changed, 159 insertions(+) diff --git a/sqlserver/datadog_checks/sqlserver/config.py b/sqlserver/datadog_checks/sqlserver/config.py index ecbb09d6d2891..6b0e040883c86 100644 --- a/sqlserver/datadog_checks/sqlserver/config.py +++ b/sqlserver/datadog_checks/sqlserver/config.py @@ -133,6 +133,8 @@ def __init__(self, init_config, instance, log): additional_tags=["raw_query_statement:enabled"] if self.collect_raw_query_statement["enabled"] else [], ) + self._validate_only_custom_queries(instance) + def _compile_valid_patterns(self, patterns: list[str]) -> re.Pattern: valid_patterns = [] @@ -272,3 +274,21 @@ def _build_database_metrics_configs(self, instance): if value is not None: config[key] = value return configurable_metrics + + def _validate_only_custom_queries(self, instance): + # Warn about any metric-collecting options that are enabled + if self.only_custom_queries: + if self.dbm_enabled: + self.log.warning( + "only_custom_queries is enabled with DBM if you don't want to collect DBM metrics, set dbm: false" + ) + + if self.proc: + self.log.warning( + "only_custom_queries is enabled with stored_procedure " + "stored_procedure is deprecated. if you don't want to collect " + "stored_procedure metrics, remove that config option" + ) + + if instance.get('custom_queries', []) == []: + self.log.warning("only_custom_queries is enabled but no custom queries are defined") diff --git a/sqlserver/tests/test_unit.py b/sqlserver/tests/test_unit.py index 1bb0c039d15fc..b318cd4d853f6 100644 --- a/sqlserver/tests/test_unit.py +++ b/sqlserver/tests/test_unit.py @@ -3,6 +3,7 @@ # Licensed under a 3-clause BSD style license (see LICENSE) import copy import json +import logging import os import re import time @@ -925,3 +926,141 @@ def test_database_identifier(instance_docker, template, expected, tags): check._database_identifier = None assert check.database_identifier == expected + + +def test_only_custom_queries_validation_warnings(caplog): + """Test that appropriate warning logs are emitted when only_custom_queries conflicts with other configurations.""" + from datadog_checks.sqlserver.config import SQLServerConfig + + caplog.clear() + caplog.set_level(logging.WARNING) + + # Use a real logger that will be captured by caplog + real_logger = logging.getLogger('test_sqlserver_config') + + # Test case 1: only_custom_queries with DBM enabled + instance_with_dbm = { + 'host': 'localhost', + 'username': 'test', + 'password': 'test', + 'only_custom_queries': True, + 'dbm': True, + 'custom_queries': [ + { + 'query': "SELECT 1 as test_value", + 'columns': [{'name': 'test_value', 'type': 'gauge'}], + 'tags': ['test:dbm_warning'], + } + ], + } + + config = SQLServerConfig({}, instance_with_dbm, real_logger) + config._validate_only_custom_queries(instance_with_dbm) + + # Check for DBM warning + dbm_warning_found = any("only_custom_queries is enabled with DBM" in record.message for record in caplog.records) + assert dbm_warning_found, "Expected warning about only_custom_queries with DBM not found" + + # Test case 2: only_custom_queries with stored procedure + caplog.clear() + instance_with_proc = { + 'host': 'localhost', + 'username': 'test', + 'password': 'test', + 'only_custom_queries': True, + 'stored_procedure': 'pyStoredProc', + 'custom_queries': [ + { + 'query': "SELECT 1 as test_value", + 'columns': [{'name': 'test_value', 'type': 'gauge'}], + 'tags': ['test:proc_warning'], + } + ], + } + + config = SQLServerConfig({}, instance_with_proc, real_logger) + config._validate_only_custom_queries(instance_with_proc) + + # Check for stored procedure warning + proc_warning_found = any( + "only_custom_queries is enabled with stored_procedure" in record.message for record in caplog.records + ) + assert proc_warning_found, "Expected warning about only_custom_queries with stored_procedure not found" + + # Test case 3: only_custom_queries with no custom queries defined + caplog.clear() + instance_no_queries = { + 'host': 'localhost', + 'username': 'test', + 'password': 'test', + 'only_custom_queries': True, + 'custom_queries': [], + } + + config = SQLServerConfig({}, instance_no_queries, real_logger) + config._validate_only_custom_queries(instance_no_queries) + + # Check for no custom queries warning + no_queries_warning_found = any( + "only_custom_queries is enabled but no custom queries are defined" in record.message + for record in caplog.records + ) + assert no_queries_warning_found, "Expected warning about only_custom_queries with no custom queries not found" + + # Test case 4: only_custom_queries with all conflicts (should emit all warnings) + caplog.clear() + instance_all_conflicts = { + 'host': 'localhost', + 'username': 'test', + 'password': 'test', + 'only_custom_queries': True, + 'dbm': True, + 'stored_procedure': 'pyStoredProc', + 'custom_queries': [], + } + + config = SQLServerConfig({}, instance_all_conflicts, real_logger) + config._validate_only_custom_queries(instance_all_conflicts) + + # Check that all three warnings are emitted + dbm_warning_found = any("only_custom_queries is enabled with DBM" in record.message for record in caplog.records) + proc_warning_found = any( + "only_custom_queries is enabled with stored_procedure" in record.message for record in caplog.records + ) + no_queries_warning_found = any( + "only_custom_queries is enabled but no custom queries are defined" in record.message + for record in caplog.records + ) + + assert dbm_warning_found, "Expected warning about only_custom_queries with DBM not found in all-conflicts test" + assert proc_warning_found, ( + "Expected warning about only_custom_queries with stored_procedure not found in all-conflicts test" + ) + assert no_queries_warning_found, ( + "Expected warning about only_custom_queries with no custom queries not found in all-conflicts test" + ) + + # Test case 5: only_custom_queries with no conflicts (should emit no warnings) + caplog.clear() + instance_no_conflicts = { + 'host': 'localhost', + 'username': 'test', + 'password': 'test', + 'only_custom_queries': True, + 'dbm': False, + 'custom_queries': [ + { + 'query': "SELECT 1 as test_value", + 'columns': [{'name': 'test_value', 'type': 'gauge'}], + 'tags': ['test:no_conflicts'], + } + ], + } + + config = SQLServerConfig({}, instance_no_conflicts, real_logger) + config._validate_only_custom_queries(instance_no_conflicts) + + # Check that no warnings are emitted + warning_count = len([record for record in caplog.records if record.levelno >= logging.WARNING]) + warning_messages = [record.message for record in caplog.records if record.levelno >= logging.WARNING] + assert warning_count == 0, f"Expected no warnings but found {warning_count} warnings: {warning_messages}" From fcb8df2df908927d6337380226cbd0694121aae6 Mon Sep 17 00:00:00 2001 From: Eden Gorevoy Date: Mon, 15 Sep 2025 10:03:34 -0400 Subject: [PATCH 4/5] Update warnings, fix integration test metric assertion --- sqlserver/datadog_checks/sqlserver/config.py | 6 ++---- sqlserver/tests/test_integration.py | 8 ++------ sqlserver/tests/test_unit.py | 2 +- 3 files changed, 5 insertions(+), 11 deletions(-) diff --git a/sqlserver/datadog_checks/sqlserver/config.py b/sqlserver/datadog_checks/sqlserver/config.py index 6b0e040883c86..2d6982b95779d 100644 --- a/sqlserver/datadog_checks/sqlserver/config.py +++ b/sqlserver/datadog_checks/sqlserver/config.py @@ -280,14 +280,12 @@ def _validate_only_custom_queries(self, instance): if self.only_custom_queries: if self.dbm_enabled: self.log.warning( - "only_custom_queries is enabled with DBM if you don't want to collect DBM metrics, set dbm: false" + "only_custom_queries is enabled with DBM. if you don't want to collect DBM metrics, set dbm: false" ) if self.proc: self.log.warning( - "only_custom_queries is enabled with stored_procedure " - "stored_procedure is deprecated. if you don't want to collect " - "stored_procedure metrics, remove that config option" + "`stored_procedure` is deprecated. to run custom queries, add to the `custom_queries` configuration" ) if instance.get('custom_queries', []) == []: diff --git a/sqlserver/tests/test_integration.py b/sqlserver/tests/test_integration.py index df77096ce84ec..8411612c7b90b 100644 --- a/sqlserver/tests/test_integration.py +++ b/sqlserver/tests/test_integration.py @@ -524,11 +524,6 @@ def test_only_custom_queries(aggregator, dd_run_check, instance_docker): check = SQLServer(CHECK_NAME, {}, [instance]) dd_run_check(check) - # Verify that regular integration metrics are NOT collected - # (These would normally be collected by default) - aggregator.assert_metric('sqlserver.cache.hit_ratio', count=0) - aggregator.assert_metric('sqlserver.broker_activation.tasks_running', count=0) - # Verify that custom query metrics ARE collected instance_tags = check._config.tags + [ "database_hostname:{}".format("stubbed.hostname"), @@ -537,8 +532,9 @@ def test_only_custom_queries(aggregator, dd_run_check, instance_docker): "dd.internal.resource:database_instance:{}".format("stubbed.hostname"), "sqlserver_servername:{}".format(check.static_info_cache.get(STATIC_INFO_SERVERNAME)), ] - aggregator.assert_metric('sqlserver.custom_value', value=42, tags=instance_tags + ['test:only_custom'], count=1) + aggregator.assert_all_metrics_covered() + @pytest.mark.integration diff --git a/sqlserver/tests/test_unit.py b/sqlserver/tests/test_unit.py index b318cd4d853f6..30dad8e026c66 100644 --- a/sqlserver/tests/test_unit.py +++ b/sqlserver/tests/test_unit.py @@ -983,7 +983,7 @@ def test_only_custom_queries_validation_warnings(caplog): # Check for stored procedure warning proc_warning_found = any( - "only_custom_queries is enabled with stored_procedure" in record.message for record in caplog.records + "`stored_procedure` is deprecated" in record.message for record in caplog.records ) assert proc_warning_found, "Expected warning about only_custom_queries with stored_procedure not found" From ec77aa32d6ecf65ac17d7a9ecbf1962bf2b30888 Mon Sep 17 00:00:00 2001 From: Eden Gorevoy Date: Mon, 15 Sep 2025 10:09:52 -0400 Subject: [PATCH 5/5] Update warnings, fix integration test metric assertion --- sqlserver/tests/test_integration.py | 1 - sqlserver/tests/test_unit.py | 8 ++------ 2 files changed, 2 insertions(+), 7 deletions(-) diff --git a/sqlserver/tests/test_integration.py b/sqlserver/tests/test_integration.py index 8411612c7b90b..67b26ada21fcd 100644 --- a/sqlserver/tests/test_integration.py +++ b/sqlserver/tests/test_integration.py @@ -536,7 +536,6 @@ def test_only_custom_queries(aggregator, dd_run_check, instance_docker): aggregator.assert_all_metrics_covered() - @pytest.mark.integration @pytest.mark.usefixtures('dd_environment') def test_load_static_information(aggregator, dd_run_check, instance_docker): diff --git a/sqlserver/tests/test_unit.py b/sqlserver/tests/test_unit.py index 30dad8e026c66..55b5cba8e2904 100644 --- a/sqlserver/tests/test_unit.py +++ b/sqlserver/tests/test_unit.py @@ -982,9 +982,7 @@ def test_only_custom_queries_validation_warnings(caplog): config._validate_only_custom_queries(instance_with_proc) # Check for stored procedure warning - proc_warning_found = any( - "`stored_procedure` is deprecated" in record.message for record in caplog.records - ) + proc_warning_found = any("`stored_procedure` is deprecated" in record.message for record in caplog.records) assert proc_warning_found, "Expected warning about only_custom_queries with stored_procedure not found" # Test case 3: only_custom_queries with no custom queries defined @@ -1024,9 +1022,7 @@ def test_only_custom_queries_validation_warnings(caplog): # Check that all three warnings are emitted dbm_warning_found = any("only_custom_queries is enabled with DBM" in record.message for record in caplog.records) - proc_warning_found = any( - "only_custom_queries is enabled with stored_procedure" in record.message for record in caplog.records - ) + proc_warning_found = any("`stored_procedure` is deprecated" in record.message for record in caplog.records) no_queries_warning_found = any( "only_custom_queries is enabled but no custom queries are defined" in record.message for record in caplog.records