Skip to content

Commit c36a70c

Browse files
committed
Enable only_custom_queries for sqlserver
1 parent aaa9182 commit c36a70c

File tree

3 files changed

+115
-50
lines changed

3 files changed

+115
-50
lines changed

sqlserver/datadog_checks/sqlserver/config.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -40,6 +40,7 @@ def __init__(self, init_config, instance, log):
4040

4141
self.proc: str = instance.get('stored_procedure')
4242
self.custom_metrics: list[dict] = init_config.get('custom_metrics', []) or []
43+
self.only_custom_queries: bool = is_affirmative(instance.get('only_custom_queries', False))
4344
self.ignore_missing_database = is_affirmative(instance.get("ignore_missing_database", False))
4445
if self.ignore_missing_database:
4546
self.log.warning(

sqlserver/datadog_checks/sqlserver/sqlserver.py

Lines changed: 61 additions & 50 deletions
Original file line numberDiff line numberDiff line change
@@ -821,6 +821,7 @@ def _check_database_conns(self):
821821
self._check_connections_by_use_db()
822822

823823
def check(self, _):
824+
self.log.error("[EG] blooob")
824825
if self.do_check:
825826
self.load_static_information()
826827
# configure custom queries for the check
@@ -830,11 +831,15 @@ def check(self, _):
830831
self, self.execute_query_raw, tags=self.tag_manager.get_tags(), hostname=self.reported_hostname
831832
)
832833
self._query_manager.compile_queries()
834+
833835
self._send_database_instance_metadata()
836+
834837
if self._config.proc:
835838
self.do_stored_procedure_check()
836839
else:
837840
self.collect_metrics()
841+
842+
self.log.error("[EG] I am right here!!!! blub")
838843
if self._config.autodiscovery and self._config.autodiscovery_db_service_check:
839844
self._check_database_conns()
840845
if self._config.dbm_enabled:
@@ -852,6 +857,7 @@ def check(self, _):
852857
handler.run_job_loop(self.tag_manager.get_tags())
853858
except Exception as e:
854859
self.log.error("Error running XE session handler for %s: %s", handler.session_name, e)
860+
855861
else:
856862
self.log.debug("Skipping check")
857863

@@ -926,52 +932,56 @@ def log_missing_metric(self, metric_name, major_version, engine_version):
926932
else:
927933
self.log.warning("%s metrics are not supported on Azure engine version: %s", metric_name, engine_version)
928934

929-
def collect_metrics(self):
930-
"""Fetch the metrics from all the associated database tables."""
935+
# queries for default integration metrics from the database
936+
def load_basic_metrics(self, cursor):
937+
# initiate autodiscovery or if the server was down at check __init__ key could be missing.
938+
if self.autodiscover_databases(cursor) or not self.instance_metrics:
939+
self._make_metric_list_to_collect(self._config.custom_metrics)
940+
941+
instance_results = {}
942+
engine_edition = self.static_info_cache.get(STATIC_INFO_ENGINE_EDITION, "")
943+
# Execute the `fetch_all` operations first to minimize the database calls
944+
for cls, metric_names in self.instance_per_type_metrics.items():
945+
if not metric_names:
946+
instance_results[cls] = None, None
947+
else:
948+
try:
949+
db_names = [d.name for d in self.databases] or [
950+
self.instance.get("database", self.connection.DEFAULT_DATABASE)
951+
]
952+
metric_cls = getattr(metrics, cls)
953+
with tracked_query(self, operation=metric_cls.OPERATION_NAME):
954+
rows, cols = metric_cls.fetch_all_values(
955+
cursor,
956+
list(metric_names),
957+
self.log,
958+
databases=db_names,
959+
engine_edition=engine_edition,
960+
)
961+
except Exception as e:
962+
self.log.error("Error running `fetch_all` for metrics %s - skipping. Error: %s", cls, e)
963+
rows, cols = None, None
931964

932-
with self.connection.open_managed_default_connection():
933-
with self.connection.get_managed_cursor() as cursor:
934-
# initiate autodiscovery or if the server was down at check __init__ key could be missing.
935-
if self.autodiscover_databases(cursor) or not self.instance_metrics:
936-
self._make_metric_list_to_collect(self._config.custom_metrics)
965+
instance_results[cls] = rows, cols
937966

938-
instance_results = {}
939-
engine_edition = self.static_info_cache.get(STATIC_INFO_ENGINE_EDITION, "")
940-
# Execute the `fetch_all` operations first to minimize the database calls
941-
for cls, metric_names in self.instance_per_type_metrics.items():
942-
if not metric_names:
943-
instance_results[cls] = None, None
967+
for metric in self.instance_metrics:
968+
key = metric.__class__.__name__
969+
if key not in instance_results:
970+
self.log.warning("No %s metrics found, skipping", str(key))
971+
else:
972+
rows, cols = instance_results[key]
973+
if rows is not None:
974+
if key == "SqlIncrFractionMetric":
975+
metric.fetch_metric(rows, cols, self.sqlserver_incr_fraction_metric_previous_values)
944976
else:
945-
try:
946-
db_names = [d.name for d in self.databases] or [
947-
self.instance.get("database", self.connection.DEFAULT_DATABASE)
948-
]
949-
metric_cls = getattr(metrics, cls)
950-
with tracked_query(self, operation=metric_cls.OPERATION_NAME):
951-
rows, cols = metric_cls.fetch_all_values(
952-
cursor,
953-
list(metric_names),
954-
self.log,
955-
databases=db_names,
956-
engine_edition=engine_edition,
957-
)
958-
except Exception as e:
959-
self.log.error("Error running `fetch_all` for metrics %s - skipping. Error: %s", cls, e)
960-
rows, cols = None, None
961-
962-
instance_results[cls] = rows, cols
977+
metric.fetch_metric(rows, cols)
963978

964-
for metric in self.instance_metrics:
965-
key = metric.__class__.__name__
966-
if key not in instance_results:
967-
self.log.warning("No %s metrics found, skipping", str(key))
968-
else:
969-
rows, cols = instance_results[key]
970-
if rows is not None:
971-
if key == "SqlIncrFractionMetric":
972-
metric.fetch_metric(rows, cols, self.sqlserver_incr_fraction_metric_previous_values)
973-
else:
974-
metric.fetch_metric(rows, cols)
979+
def collect_metrics(self):
980+
"""Fetch the metrics from all the associated database tables."""
981+
with self.connection.open_managed_default_connection():
982+
if not self._config.only_custom_queries:
983+
with self.connection.get_managed_cursor() as cursor:
984+
self.load_basic_metrics(cursor)
975985

976986
# Neither pyodbc nor adodbapi are able to read results of a query if the number of rows affected
977987
# statement are returned as part of the result set, so we disable for the entire connection
@@ -980,14 +990,15 @@ def collect_metrics(self):
980990
with self.connection.get_managed_cursor() as cursor:
981991
cursor.execute("SET NOCOUNT ON")
982992
try:
983-
# restore the current database after executing dynamic queries
984-
# this is to ensure the current database context is not changed
985-
with self.connection.restore_current_database_context():
986-
if self.database_metrics:
987-
for database_metric in self.database_metrics:
988-
database_metric.execute()
989-
990-
# reuse connection for any custom queries
993+
if not self._config.only_custom_queries:
994+
# restore the current database after executing dynamic queries
995+
# this is to ensure the current database context is not changed
996+
with self.connection.restore_current_database_context():
997+
if self.database_metrics:
998+
for database_metric in self.database_metrics:
999+
database_metric.execute()
1000+
1001+
# reuse the connection for custom queries
9911002
self._query_manager.execute()
9921003
finally:
9931004
with self.connection.get_managed_cursor() as cursor:

sqlserver/tests/test_integration.py

Lines changed: 53 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -506,6 +506,59 @@ def test_custom_queries(aggregator, dd_run_check, instance_docker, custom_query,
506506
aggregator.assert_metric(metric_name, **kwargs)
507507

508508

509+
@pytest.mark.integration
510+
@pytest.mark.usefixtures('dd_environment')
511+
@pytest.mark.parametrize(
512+
"custom_query, assert_metrics",
513+
[
514+
(
515+
{
516+
'query': "SELECT letter, num FROM (VALUES (97, 'a'), (98, 'b')) AS t (num,letter)",
517+
'columns': [{'name': 'customtag', 'type': 'tag'}, {'name': 'num', 'type': 'gauge'}],
518+
'tags': ['query:custom'],
519+
},
520+
[
521+
("sqlserver.num", {"value": 97, "tags": ["customtag:a", "query:custom"]}),
522+
("sqlserver.num", {"value": 98, "tags": ["customtag:b", "query:custom"]}),
523+
],
524+
),
525+
(
526+
{
527+
'query': "EXEC exampleProcWithoutNocount",
528+
'columns': [{'name': 'value', 'type': 'gauge'}],
529+
'tags': ['hello:there'],
530+
},
531+
[
532+
("sqlserver.value", {"value": 1, "tags": ["hello:there"]}),
533+
],
534+
),
535+
],
536+
)
537+
def test_custom_queries_only(aggregator, dd_run_check, instance_docker, custom_query, assert_metrics):
538+
instance = copy(instance_docker)
539+
instance['custom_queries'] = [custom_query]
540+
instance['only_custom_queries'] = True
541+
instance['procedure_metrics'] = {'enabled': False}
542+
543+
check = SQLServer(CHECK_NAME, {}, [instance])
544+
dd_run_check(check)
545+
546+
for metric_name, kwargs in assert_metrics:
547+
kwargs = copy(kwargs)
548+
kwargs['tags'] = (
549+
check._config.tags
550+
+ [
551+
"database_hostname:{}".format("stubbed.hostname"),
552+
"database_instance:{}".format("stubbed.hostname"),
553+
"ddagenthostname:{}".format("stubbed.hostname"),
554+
"dd.internal.resource:database_instance:{}".format("stubbed.hostname"),
555+
"sqlserver_servername:{}".format(check.static_info_cache.get(STATIC_INFO_SERVERNAME)),
556+
]
557+
+ kwargs.get('tags', [])
558+
)
559+
aggregator.assert_metric(metric_name, **kwargs)
560+
561+
509562
@pytest.mark.integration
510563
@pytest.mark.usefixtures('dd_environment')
511564
def test_load_static_information(aggregator, dd_run_check, instance_docker):

0 commit comments

Comments
 (0)