From e3f2de18ca7e91fc458327e079f8f101046fc659 Mon Sep 17 00:00:00 2001 From: Hannah Stepanek Date: Fri, 10 Oct 2025 12:57:35 -0700 Subject: [PATCH 1/5] Reorder inheritance on span nodes (#1539) * Reorder inheritance on span nodes Core tracing will be adding a bunch of logic to the span_event method which is called via super on inheriting classes before those inheriting classes add additional attributes. In order for the core tracing logic to work correctly, this inheritance call order has to be reversed; the inheriting classes must first add the attributes and THEN call super. * Remove *args, **kwargs * [MegaLinter] Apply linters fixes --------- Co-authored-by: hmstepanek <30059933+hmstepanek@users.noreply.github.com> --- newrelic/core/external_node.py | 11 +++++------ newrelic/core/function_node.py | 8 +++----- newrelic/core/loop_node.py | 8 +++----- newrelic/core/node_mixin.py | 25 ++++++++++++------------- newrelic/core/root_node.py | 8 ++++---- 5 files changed, 27 insertions(+), 33 deletions(-) diff --git a/newrelic/core/external_node.py b/newrelic/core/external_node.py index 9165d2081f..bd0fde04f6 100644 --- a/newrelic/core/external_node.py +++ b/newrelic/core/external_node.py @@ -169,16 +169,15 @@ def trace_node(self, stats, root, connections): start_time=start_time, end_time=end_time, name=name, params=params, children=children, label=None ) - def span_event(self, *args, **kwargs): + def span_event(self, settings, base_attrs=None, parent_guid=None, attr_class=dict): self.agent_attributes["http.url"] = self.http_url - attrs = super().span_event(*args, **kwargs) - i_attrs = attrs[0] + i_attrs = (base_attrs and base_attrs.copy()) or attr_class() i_attrs["category"] = "http" i_attrs["span.kind"] = "client" - _, i_attrs["component"] = attribute.process_user_attribute("component", self.library) + i_attrs["component"] = self.library if self.method: - _, i_attrs["http.method"] = attribute.process_user_attribute("http.method", self.method) + i_attrs["http.method"] = self.method - return attrs + return super().span_event(settings, base_attrs=i_attrs, parent_guid=parent_guid, attr_class=attr_class) diff --git a/newrelic/core/function_node.py b/newrelic/core/function_node.py index 809f26742c..588f675f31 100644 --- a/newrelic/core/function_node.py +++ b/newrelic/core/function_node.py @@ -114,10 +114,8 @@ def trace_node(self, stats, root, connections): start_time=start_time, end_time=end_time, name=name, params=params, children=children, label=self.label ) - def span_event(self, *args, **kwargs): - attrs = super().span_event(*args, **kwargs) - i_attrs = attrs[0] - + def span_event(self, settings, base_attrs=None, parent_guid=None, attr_class=dict): + i_attrs = (base_attrs and base_attrs.copy()) or attr_class() i_attrs["name"] = f"{self.group}/{self.name}" - return attrs + return super().span_event(settings, base_attrs=i_attrs, parent_guid=parent_guid, attr_class=attr_class) diff --git a/newrelic/core/loop_node.py b/newrelic/core/loop_node.py index b9328e7013..58d1b3a746 100644 --- a/newrelic/core/loop_node.py +++ b/newrelic/core/loop_node.py @@ -79,10 +79,8 @@ def trace_node(self, stats, root, connections): start_time=start_time, end_time=end_time, name=name, params=params, children=children, label=None ) - def span_event(self, *args, **kwargs): - attrs = super().span_event(*args, **kwargs) - i_attrs = attrs[0] - + def span_event(self, settings, base_attrs=None, parent_guid=None, attr_class=dict): + i_attrs = (base_attrs and base_attrs.copy()) or attr_class() i_attrs["name"] = f"EventLoop/Wait/{self.name}" - return attrs + return super().span_event(settings, base_attrs=i_attrs, parent_guid=parent_guid, attr_class=attr_class) diff --git a/newrelic/core/node_mixin.py b/newrelic/core/node_mixin.py index 8eedd191d4..699a2d1118 100644 --- a/newrelic/core/node_mixin.py +++ b/newrelic/core/node_mixin.py @@ -52,11 +52,11 @@ def get_trace_segment_params(self, settings, params=None): def span_event(self, settings, base_attrs=None, parent_guid=None, attr_class=dict): i_attrs = (base_attrs and base_attrs.copy()) or attr_class() i_attrs["type"] = "Span" - i_attrs["name"] = self.name + i_attrs["name"] = i_attrs.get("name") or self.name i_attrs["guid"] = self.guid i_attrs["timestamp"] = int(self.start_time * 1000) i_attrs["duration"] = self.duration - i_attrs["category"] = "generic" + i_attrs["category"] = i_attrs.get("category") or "generic" if parent_guid: i_attrs["parentId"] = parent_guid @@ -108,24 +108,23 @@ def db_instance(self): self._db_instance = db_instance_attr return db_instance_attr - def span_event(self, *args, **kwargs): - self.agent_attributes["db.instance"] = self.db_instance - attrs = super().span_event(*args, **kwargs) - i_attrs = attrs[0] - a_attrs = attrs[2] + def span_event(self, settings, base_attrs=None, parent_guid=None, attr_class=dict): + a_attrs = self.agent_attributes + a_attrs["db.instance"] = self.db_instance + i_attrs = (base_attrs and base_attrs.copy()) or attr_class() i_attrs["category"] = "datastore" i_attrs["span.kind"] = "client" if self.product: - i_attrs["component"] = a_attrs["db.system"] = attribute.process_user_attribute("db.system", self.product)[1] + i_attrs["component"] = a_attrs["db.system"] = self.product if self.operation: - a_attrs["db.operation"] = attribute.process_user_attribute("db.operation", self.operation)[1] + a_attrs["db.operation"] = self.operation if self.target: - a_attrs["db.collection"] = attribute.process_user_attribute("db.collection", self.target)[1] + a_attrs["db.collection"] = self.target if self.instance_hostname: - peer_hostname = attribute.process_user_attribute("peer.hostname", self.instance_hostname)[1] + peer_hostname = self.instance_hostname else: peer_hostname = "Unknown" @@ -133,7 +132,7 @@ def span_event(self, *args, **kwargs): peer_address = f"{peer_hostname}:{self.port_path_or_id or 'Unknown'}" - a_attrs["peer.address"] = attribute.process_user_attribute("peer.address", peer_address)[1] + a_attrs["peer.address"] = peer_address # Attempt to treat port_path_or_id as an integer, fallback to not including it try: @@ -141,4 +140,4 @@ def span_event(self, *args, **kwargs): except Exception: pass - return attrs + return super().span_event(settings, base_attrs=i_attrs, parent_guid=parent_guid, attr_class=attr_class) diff --git a/newrelic/core/root_node.py b/newrelic/core/root_node.py index 1591afa3ad..fa8b3de82b 100644 --- a/newrelic/core/root_node.py +++ b/newrelic/core/root_node.py @@ -37,16 +37,16 @@ class RootNode(_RootNode, GenericNodeMixin): - def span_event(self, *args, **kwargs): - span = super().span_event(*args, **kwargs) - i_attrs = span[0] + def span_event(self, settings, base_attrs=None, parent_guid=None, attr_class=dict): + i_attrs = (base_attrs and base_attrs.copy()) or attr_class() i_attrs["transaction.name"] = self.path i_attrs["nr.entryPoint"] = True if self.trusted_parent_span: i_attrs["trustedParentId"] = self.trusted_parent_span if self.tracing_vendors: i_attrs["tracingVendors"] = self.tracing_vendors - return span + + return super().span_event(settings, base_attrs=i_attrs, parent_guid=parent_guid, attr_class=attr_class) def trace_node(self, stats, root, connections): name = self.path From 64e58b4aebff7795615a3770ae2d0847a7a94c20 Mon Sep 17 00:00:00 2001 From: Hannah Stepanek Date: Fri, 6 Jun 2025 10:33:53 -0700 Subject: [PATCH 2/5] Add support 4 partial granularity tracing --- newrelic/api/transaction.py | 114 ++++++++++++++++++++++-------- newrelic/config.py | 8 +++ newrelic/core/agent_protocol.py | 1 + newrelic/core/attribute.py | 17 +++++ newrelic/core/config.py | 45 ++++++++++++ newrelic/core/data_collector.py | 6 +- newrelic/core/database_node.py | 4 +- newrelic/core/external_node.py | 4 +- newrelic/core/function_node.py | 4 +- newrelic/core/loop_node.py | 4 +- newrelic/core/node_mixin.py | 72 +++++++++++++++---- newrelic/core/root_node.py | 4 +- newrelic/core/transaction_node.py | 5 +- 13 files changed, 232 insertions(+), 56 deletions(-) diff --git a/newrelic/api/transaction.py b/newrelic/api/transaction.py index b163ff54fd..70206004e2 100644 --- a/newrelic/api/transaction.py +++ b/newrelic/api/transaction.py @@ -285,7 +285,7 @@ def __init__(self, application, enabled=None, source=None): self.tracestate = "" self._priority = None self._sampled = None - self._traceparent_sampled = None + self._remote_parent_sampled = None self._distributed_trace_state = 0 @@ -569,7 +569,7 @@ def __exit__(self, exc, value, tb): if self._settings.distributed_tracing.enabled: # Sampled and priority need to be computed at the end of the # transaction when distributed tracing or span events are enabled. - self._compute_sampled_and_priority() + self._make_sampling_decision() self._cached_path._name = self.path agent_attributes = self.agent_attributes @@ -636,6 +636,7 @@ def __exit__(self, exc, value, tb): trace_id=self.trace_id, loop_time=self._loop_time, root=root_node, + partial_granularity_sampled=hasattr(self, "partial_granularity_sampled"), ) # Clear settings as we are all done and don't need it @@ -1004,35 +1005,87 @@ def _update_agent_attributes(self): def user_attributes(self): return create_attributes(self._custom_params, DST_ALL, self.attribute_filter) - def sampling_algo_compute_sampled_and_priority(self): - if self._priority is None: + def sampling_algo_compute_sampled_and_priority(self, priority, sampled): + # self._priority and self._sampled are set when parsing the W3C tracestate + # or newrelic DT headers and may be overridden in _make_sampling_decision + # based on the configuration. The only time they are set in here is when the + # sampling decision must be made by the adaptive sampling algorithm. + if priority is None: # Truncate priority field to 6 digits past the decimal. - self._priority = float(f"{random.random():.6f}") # noqa: S311 - if self._sampled is None: - self._sampled = self._application.compute_sampled() - if self._sampled: - self._priority += 1 - - def _compute_sampled_and_priority(self): - if self._traceparent_sampled is None: + priority = float(f"{random.random():.6f}") # noqa: S311 + if sampled is None: + _logger.debug("No trusted account id found. Sampling decision will be made by adaptive sampling algorithm.") + sampled = self._application.compute_sampled() + if sampled: + priority += 1 + return priority, sampled + + def _compute_sampled_and_priority(self, priority, sampled, remote_parent_sampled_path, remote_parent_sampled_setting, remote_parent_not_sampled_path, remote_parent_not_sampled_setting): + if self._remote_parent_sampled is None: config = "default" # Use sampling algo. - elif self._traceparent_sampled: - setting_path = "distributed_tracing.sampler.remote_parent_sampled" - config = self.settings.distributed_tracing.sampler.remote_parent_sampled - else: # self._traceparent_sampled is False. - setting_path = "distributed_tracing.sampler.remote_parent_not_sampled" - config = self.settings.distributed_tracing.sampler.remote_parent_not_sampled - + _logger.debug("Sampling decision made based on no remote parent sampling decision present.") + elif self._remote_parent_sampled: + setting_path = remote_parent_sampled_path + config = remote_parent_sampled_setting + _logger.debug("Sampling decision made based on remote_parent_sampled=%s and %s=%s.", self._remote_parent_sampled, setting_path, config) + else: # self._remote_parent_sampled is False. + setting_path = remote_parent_not_sampled_path + config = remote_parent_not_sampled_setting + _logger.debug("Sampling decision made based on remote_parent_sampled=%s and %s=%s.", self._remote_parent_sampled, setting_path, config) if config == "always_on": - self._sampled = True - self._priority = 2.0 + sampled = True + priority = 2.0 elif config == "always_off": - self._sampled = False - self._priority = 0 + sampled = False + priority = 0 else: - if config != "default": + if config not in ("default", "adaptive"): _logger.warning("%s=%s is not a recognized value. Using 'default' instead.", setting_path, config) - self.sampling_algo_compute_sampled_and_priority() + + _logger.debug("Let adaptive sampler algorithm decide based on sampled=%s and priority=%s.", sampled, priority) + priority, sampled = self.sampling_algo_compute_sampled_and_priority(priority, sampled) + return priority, sampled + + def _make_sampling_decision(self): + # The sampling decision is computed each time a DT header is generated for exit spans as it is needed + # to send the DT headers. Don't recompute the sampling decision multiple times as it is expensive. + if hasattr(self, "_sampling_decision_made"): + return + priority = self._priority + sampled = self._sampled + # Compute sampling decision for full granularity. + if self.settings.distributed_tracing.sampler.full_granularity.enabled: + _logger.debug("Full granularity tracing is enabled. Asking if full granularity wants to sample. priority=%s, sampled=%s", priority, sampled) + computed_priority, computed_sampled = self._compute_sampled_and_priority( + priority, + sampled, + remote_parent_sampled_path = "distributed_tracing.sampler.full_granularity.remote_parent_sampled", + remote_parent_sampled_setting = self.settings.distributed_tracing.sampler.full_granularity.remote_parent_sampled, + remote_parent_not_sampled_path = "distributed_tracing.sampler.full_granularity.remote_parent_not_sampled", + remote_parent_not_sampled_setting = self.settings.distributed_tracing.sampler.full_granularity.remote_parent_not_sampled, + ) + _logger.debug("Full granularity sampling decision was %s with priority=%s.", sampled, priority) + if computed_sampled: + self._priority = computed_priority + self._sampled = computed_sampled + self._sampling_decision_made = True + return + + # If full granularity is not going to sample, let partial granularity decide. + if self.settings.distributed_tracing.sampler.partial_granularity.enabled: + _logger.debug("Partial granularity tracing is enabled. Asking if partial granularity wants to sample.") + self._priority, self._sampled = self._compute_sampled_and_priority( + priority, + sampled, + remote_parent_sampled_path = "distributed_tracing.sampler.partial_granularity.remote_parent_sampled", + remote_parent_sampled_setting = self.settings.distributed_tracing.sampler.partial_granularity.remote_parent_sampled, + remote_parent_not_sampled_path = "distributed_tracing.sampler.partial_granularity.remote_parent_not_sampled", + remote_parent_not_sampled_setting = self.settings.distributed_tracing.sampler.partial_granularity.remote_parent_not_sampled, + ) + _logger.debug("Partial granularity sampling decision was %s with priority=%s.", self._sampled, self._priority) + self._sampling_decision_made = True + if self._sampled: + self.partial_granularity_sampled = True def _freeze_path(self): if self._frozen_path is None: @@ -1101,7 +1154,7 @@ def _create_distributed_trace_data(self): if not (account_id and application_id and trusted_account_key and settings.distributed_tracing.enabled): return - self._compute_sampled_and_priority() + self._make_sampling_decision() data = { "ty": "App", "ac": account_id, @@ -1184,6 +1237,7 @@ def _accept_distributed_trace_payload(self, payload, transport_type="HTTP"): return False try: + self._remote_parent_sampled = payload.get("sa") version = payload.get("v") major_version = version and int(version[0]) @@ -1254,10 +1308,8 @@ def _accept_distributed_trace_data(self, data, transport_type): self._trace_id = data.get("tr") - priority = data.get("pr") - if priority is not None: - self._priority = priority - self._sampled = data.get("sa") + self._priority = data.get("pr") + self._sampled = data.get("sa") if "ti" in data: transport_start = data["ti"] / 1000.0 @@ -1297,6 +1349,7 @@ def accept_distributed_trace_headers(self, headers, transport_type="HTTP"): try: traceparent = ensure_str(traceparent).strip() data = W3CTraceParent.decode(traceparent) + self._remote_parent_sampled = data.get("sa") except: data = None @@ -1332,7 +1385,6 @@ def accept_distributed_trace_headers(self, headers, transport_type="HTTP"): else: self._record_supportability("Supportability/TraceContext/TraceState/NoNrEntry") - self._traceparent_sampled = data.get("sa") self._accept_distributed_trace_data(data, transport_type) self._record_supportability("Supportability/TraceContext/Accept/Success") return True diff --git a/newrelic/config.py b/newrelic/config.py index cb879d9c4b..3d388af171 100644 --- a/newrelic/config.py +++ b/newrelic/config.py @@ -404,8 +404,16 @@ def _process_configuration(section): _process_setting(section, "ml_insights_events.enabled", "getboolean", None) _process_setting(section, "distributed_tracing.enabled", "getboolean", None) _process_setting(section, "distributed_tracing.exclude_newrelic_header", "getboolean", None) + _process_setting(section, "distributed_tracing.sampler.adaptive_sampling_target", "getint", None) _process_setting(section, "distributed_tracing.sampler.remote_parent_sampled", "get", None) _process_setting(section, "distributed_tracing.sampler.remote_parent_not_sampled", "get", None) + _process_setting(section, "distributed_tracing.sampler.full_granularity.enabled", "getboolean", None) + _process_setting(section, "distributed_tracing.sampler.full_granularity.remote_parent_sampled", "get", None) + _process_setting(section, "distributed_tracing.sampler.full_granularity.remote_parent_not_sampled", "get", None) + _process_setting(section, "distributed_tracing.sampler.partial_granularity.enabled", "getboolean", None) + _process_setting(section, "distributed_tracing.sampler.partial_granularity.type", "get", None) + _process_setting(section, "distributed_tracing.sampler.partial_granularity.remote_parent_sampled", "get", None) + _process_setting(section, "distributed_tracing.sampler.partial_granularity.remote_parent_not_sampled", "get", None) _process_setting(section, "span_events.enabled", "getboolean", None) _process_setting(section, "span_events.max_samples_stored", "getint", None) _process_setting(section, "span_events.attributes.enabled", "getboolean", None) diff --git a/newrelic/core/agent_protocol.py b/newrelic/core/agent_protocol.py index 0657adc547..705bead3c9 100644 --- a/newrelic/core/agent_protocol.py +++ b/newrelic/core/agent_protocol.py @@ -297,6 +297,7 @@ def _connect_payload(app_name, linked_applications, environment, settings): connect_settings["browser_monitoring.loader"] = settings["browser_monitoring.loader"] connect_settings["browser_monitoring.debug"] = settings["browser_monitoring.debug"] connect_settings["ai_monitoring.enabled"] = settings["ai_monitoring.enabled"] + connect_settings["distributed_tracing.sampler.adaptive_sampling_target"] = settings["distributed_tracing.sampler.adaptive_sampling_target"] security_settings = {} security_settings["capture_params"] = settings["capture_params"] diff --git a/newrelic/core/attribute.py b/newrelic/core/attribute.py index 79b9a56cb2..afdcd95d29 100644 --- a/newrelic/core/attribute.py +++ b/newrelic/core/attribute.py @@ -108,6 +108,23 @@ "zeebe.client.resourceFile", } +SPAN_ENTITY_RELATIONSHIP_ATTRIBUTES = { + "cloud.account.id", + "cloud.platform", + "cloud.region", + "cloud.resource_id", + "db.instance", + "db.system", + "http.url", + "messaging.destination.name", + "messaging.system", + "peer.hostname", + "server.address", + "server.port", + "span.kind", +} + + MAX_NUM_USER_ATTRIBUTES = 128 MAX_ATTRIBUTE_LENGTH = 255 MAX_NUM_ML_USER_ATTRIBUTES = 64 diff --git a/newrelic/core/config.py b/newrelic/core/config.py index e7573a1fec..6024f9ee79 100644 --- a/newrelic/core/config.py +++ b/newrelic/core/config.py @@ -337,6 +337,14 @@ class DistributedTracingSamplerSettings(Settings): pass +class DistributedTracingSamplerFullGranularitySettings(Settings): + pass + + +class DistributedTracingSamplerPartialGranularitySettings(Settings): + pass + + class ServerlessModeSettings(Settings): pass @@ -507,6 +515,8 @@ class EventHarvestConfigHarvestLimitSettings(Settings): _settings.debug = DebugSettings() _settings.distributed_tracing = DistributedTracingSettings() _settings.distributed_tracing.sampler = DistributedTracingSamplerSettings() +_settings.distributed_tracing.sampler.full_granularity = DistributedTracingSamplerFullGranularitySettings() +_settings.distributed_tracing.sampler.partial_granularity = DistributedTracingSamplerPartialGranularitySettings() _settings.error_collector = ErrorCollectorSettings() _settings.error_collector.attributes = ErrorCollectorAttributesSettings() _settings.event_harvest_config = EventHarvestConfigSettings() @@ -837,12 +847,32 @@ def default_otlp_host(host): _settings.ml_insights_events.enabled = False _settings.distributed_tracing.enabled = _environ_as_bool("NEW_RELIC_DISTRIBUTED_TRACING_ENABLED", default=True) +_settings.distributed_tracing.sampler.adaptive_sampling_target = _environ_as_int( + "NEW_RELIC_DISTRIBUTED_TRACING_SAMPLER_ADAPTIVE_SAMPLING_TARGET", default=10 +) _settings.distributed_tracing.sampler.remote_parent_sampled = os.environ.get( "NEW_RELIC_DISTRIBUTED_TRACING_SAMPLER_REMOTE_PARENT_SAMPLED", "default" ) _settings.distributed_tracing.sampler.remote_parent_not_sampled = os.environ.get( "NEW_RELIC_DISTRIBUTED_TRACING_SAMPLER_REMOTE_PARENT_NOT_SAMPLED", "default" ) +_settings.distributed_tracing.sampler.full_granularity.enabled = _environ_as_bool("NEW_RELIC_DISTRIBUTED_TRACING_SAMPLER_FULL_GRANULARITY_ENABLED", default=True) +_settings.distributed_tracing.sampler.full_granularity.remote_parent_sampled = os.environ.get( + "NEW_RELIC_DISTRIBUTED_TRACING_SAMPLER_FULL_GRANULARITY_REMOTE_PARENT_SAMPLED", None +) +_settings.distributed_tracing.sampler.full_granularity.remote_parent_not_sampled = os.environ.get( + "NEW_RELIC_DISTRIBUTED_TRACING_SAMPLER_FULL_GRANULARITY_REMOTE_PARENT_NOT_SAMPLED", None +) +_settings.distributed_tracing.sampler.partial_granularity.enabled = _environ_as_bool("NEW_RELIC_DISTRIBUTED_TRACING_SAMPLER_PARTIAL_GRANULARITY_ENABLED", default=False) +_settings.distributed_tracing.sampler.partial_granularity.type = os.environ.get( + "NEW_RELIC_DISTRIBUTED_TRACING_SAMPLER_PARTIAL_GRANULARITY_TYPE", "essential" +) +_settings.distributed_tracing.sampler.partial_granularity.remote_parent_sampled = os.environ.get( + "NEW_RELIC_DISTRIBUTED_TRACING_SAMPLER_PARTIAL_GRANULARITY_REMOTE_PARENT_SAMPLED", "default" +) +_settings.distributed_tracing.sampler.partial_granularity.remote_parent_not_sampled = os.environ.get( + "NEW_RELIC_DISTRIBUTED_TRACING_SAMPLER_PARTIAL_GRANULARITY_REMOTE_PARENT_NOT_SAMPLED", "default" +) _settings.distributed_tracing.exclude_newrelic_header = False _settings.span_events.enabled = _environ_as_bool("NEW_RELIC_SPAN_EVENTS_ENABLED", default=True) _settings.event_harvest_config.harvest_limits.span_event_data = _environ_as_int( @@ -1369,9 +1399,24 @@ def finalize_application_settings(server_side_config=None, settings=_settings): application_settings.attribute_filter = AttributeFilter(flatten_settings(application_settings)) + simplify_distributed_tracing_sampler_granularity_settings(application_settings) + return application_settings +def simplify_distributed_tracing_sampler_granularity_settings(settings): + # Full granularity settings may appear under: + # * `distributed_tracing.sampler` + # * `distributed_tracing.sampler.full_granularity` + # The `distributed_tracing.sampler.full_granularity` path takes precedence. + # To simplify logic in the code that uses these settings, store the values that + # should be used at the `distributed_tracing.sampler.full_granularity` path. + if not settings.distributed_tracing.sampler.full_granularity.remote_parent_sampled: + settings.distributed_tracing.sampler.full_granularity.remote_parent_sampled = settings.distributed_tracing.sampler.remote_parent_sampled + if not settings.distributed_tracing.sampler.full_granularity.remote_parent_not_sampled: + settings.distributed_tracing.sampler.full_granularity.remote_parent_not_sampled = settings.distributed_tracing.sampler.remote_parent_not_sampled + + def _remove_ignored_configs(server_settings): if not server_settings.get("agent_config"): return server_settings diff --git a/newrelic/core/data_collector.py b/newrelic/core/data_collector.py index e481f1d6e7..e0187b088d 100644 --- a/newrelic/core/data_collector.py +++ b/newrelic/core/data_collector.py @@ -117,7 +117,11 @@ def send_ml_events(self, sampling_info, custom_event_data): def send_span_events(self, sampling_info, span_event_data): """Called to submit sample set for span events.""" - + # TODO: remove this later after list types are suported. + for span_event in span_event_data: + ids = span_event[1].get("nr.ids") + if ids: + span_event[1]["nr.ids"] = ",".join(ids) payload = (self.agent_run_id, sampling_info, span_event_data) return self._protocol.send("span_event_data", payload) diff --git a/newrelic/core/database_node.py b/newrelic/core/database_node.py index 7c4032c5b9..64e1e4b2ae 100644 --- a/newrelic/core/database_node.py +++ b/newrelic/core/database_node.py @@ -279,7 +279,7 @@ def trace_node(self, stats, root, connections): start_time=start_time, end_time=end_time, name=name, params=params, children=children, label=None ) - def span_event(self, *args, **kwargs): + def span_event(self, settings, base_attrs=None, parent_guid=None, attr_class=dict, partial_granularity_sampled=False, ct_exit_spans=None): sql = self.formatted if sql: @@ -288,4 +288,4 @@ def span_event(self, *args, **kwargs): self.agent_attributes["db.statement"] = sql - return super().span_event(*args, **kwargs) + return super().span_event(settings, base_attrs=base_attrs, parent_guid=parent_guid, attr_class=attr_class, partial_granularity_sampled=partial_granularity_sampled, ct_exit_spans=ct_exit_spans) diff --git a/newrelic/core/external_node.py b/newrelic/core/external_node.py index bd0fde04f6..ce0f33086a 100644 --- a/newrelic/core/external_node.py +++ b/newrelic/core/external_node.py @@ -169,7 +169,7 @@ def trace_node(self, stats, root, connections): start_time=start_time, end_time=end_time, name=name, params=params, children=children, label=None ) - def span_event(self, settings, base_attrs=None, parent_guid=None, attr_class=dict): + def span_event(self, settings, base_attrs=None, parent_guid=None, attr_class=dict, partial_granularity_sampled=False, ct_exit_spans=None): self.agent_attributes["http.url"] = self.http_url i_attrs = (base_attrs and base_attrs.copy()) or attr_class() @@ -180,4 +180,4 @@ def span_event(self, settings, base_attrs=None, parent_guid=None, attr_class=dic if self.method: i_attrs["http.method"] = self.method - return super().span_event(settings, base_attrs=i_attrs, parent_guid=parent_guid, attr_class=attr_class) + return super().span_event(settings, base_attrs=i_attrs, parent_guid=parent_guid, attr_class=attr_class, partial_granularity_sampled=partial_granularity_sampled, ct_exit_spans=ct_exit_spans) diff --git a/newrelic/core/function_node.py b/newrelic/core/function_node.py index 588f675f31..dcbf038a23 100644 --- a/newrelic/core/function_node.py +++ b/newrelic/core/function_node.py @@ -114,8 +114,8 @@ def trace_node(self, stats, root, connections): start_time=start_time, end_time=end_time, name=name, params=params, children=children, label=self.label ) - def span_event(self, settings, base_attrs=None, parent_guid=None, attr_class=dict): + def span_event(self, settings, base_attrs=None, parent_guid=None, attr_class=dict, partial_granularity_sampled=False, ct_exit_spans=None): i_attrs = (base_attrs and base_attrs.copy()) or attr_class() i_attrs["name"] = f"{self.group}/{self.name}" - return super().span_event(settings, base_attrs=i_attrs, parent_guid=parent_guid, attr_class=attr_class) + return super().span_event(settings, base_attrs=i_attrs, parent_guid=parent_guid, attr_class=attr_class, partial_granularity_sampled=partial_granularity_sampled, ct_exit_spans=ct_exit_spans) diff --git a/newrelic/core/loop_node.py b/newrelic/core/loop_node.py index 58d1b3a746..4d400c40d9 100644 --- a/newrelic/core/loop_node.py +++ b/newrelic/core/loop_node.py @@ -79,8 +79,8 @@ def trace_node(self, stats, root, connections): start_time=start_time, end_time=end_time, name=name, params=params, children=children, label=None ) - def span_event(self, settings, base_attrs=None, parent_guid=None, attr_class=dict): + def span_event(self, settings, base_attrs=None, parent_guid=None, attr_class=dict, partial_granularity_sampled=False, ct_exit_spans=None): i_attrs = (base_attrs and base_attrs.copy()) or attr_class() i_attrs["name"] = f"EventLoop/Wait/{self.name}" - return super().span_event(settings, base_attrs=i_attrs, parent_guid=parent_guid, attr_class=attr_class) + return super().span_event(settings, base_attrs=i_attrs, parent_guid=parent_guid, attr_class=attr_class, partial_granularity_sampled=partial_granularity_sampled, ct_exit_spans=ct_exit_spans) diff --git a/newrelic/core/node_mixin.py b/newrelic/core/node_mixin.py index 699a2d1118..95bb72667c 100644 --- a/newrelic/core/node_mixin.py +++ b/newrelic/core/node_mixin.py @@ -49,7 +49,7 @@ def get_trace_segment_params(self, settings, params=None): _params["exclusive_duration_millis"] = 1000.0 * self.exclusive return _params - def span_event(self, settings, base_attrs=None, parent_guid=None, attr_class=dict): + def span_event(self, settings, base_attrs=None, parent_guid=None, attr_class=dict, partial_granularity_sampled=False, ct_exit_spans=None): i_attrs = (base_attrs and base_attrs.copy()) or attr_class() i_attrs["type"] = "Span" i_attrs["name"] = i_attrs.get("name") or self.name @@ -68,18 +68,66 @@ def span_event(self, settings, base_attrs=None, parent_guid=None, attr_class=dic u_attrs = attribute.resolve_user_attributes( self.processed_user_attributes, settings.attribute_filter, DST_SPAN_EVENTS, attr_class=attr_class ) - - # intrinsics, user attrs, agent attrs - return [i_attrs, u_attrs, a_attrs] - - def span_events(self, settings, base_attrs=None, parent_guid=None, attr_class=dict): - yield self.span_event(settings, base_attrs=base_attrs, parent_guid=parent_guid, attr_class=attr_class) - + if not partial_granularity_sampled: + # intrinsics, user attrs, agent attrs + return [i_attrs, u_attrs, a_attrs] + else: + if ct_exit_spans is None: + ct_exit_spans = {} + + partial_granularity_type = settings.distributed_tracing.sampler.partial_granularity.type + exit_span_attrs_present = attribute.SPAN_ENTITY_RELATIONSHIP_ATTRIBUTES & set(a_attrs) + # If this is the entry node or an LLM span always return it. + if i_attrs.get("nr.entryPoint") or i_attrs["name"].startswith("Llm/"): + if partial_granularity_type == "reduced": + return [i_attrs, u_attrs, a_attrs] + else: + return [i_attrs, {}, {}] + # If the span is not an exit span, skip it by returning None. + if not exit_span_attrs_present: + return None + # If the span is an exit span and we are in reduced mode (meaning no attribute dropping), + # just return the exit span as is. + if partial_granularity_type == "reduced": + return [i_attrs, u_attrs, a_attrs] + else: + a_minimized_attrs = attr_class({key: a_attrs[key] for key in exit_span_attrs_present}) + # If we are in essential mode return the span with minimized attributes. + if partial_granularity_type == "essential": + return [i_attrs, {}, a_minimized_attrs] + # If the span is an exit span but span compression (compact) is enabled, we need to check + # for uniqueness before returning it. + # Combine all the entity relationship attr values into a string to be + # used as the hash to check for uniqueness. + span_attrs = "".join([str(a_minimized_attrs[key]) for key in exit_span_attrs_present]) + new_exit_span = span_attrs not in ct_exit_spans + # If this is a new exit span, add it to the known ct_exit_spans and return it. + if new_exit_span: + # ids is the list of span guids that share this unqiue exit span. + a_minimized_attrs["nr.ids"] = [] + a_minimized_attrs["nr.durations"] = self.duration + ct_exit_spans[span_attrs] = [a_minimized_attrs] + return [i_attrs, {}, a_minimized_attrs] + # If this is an exit span we've already seen, add it's guid to the list + # of ids on the seen span and return None. + ct_exit_spans[span_attrs][0]["nr.ids"].append(self.guid) + ct_exit_spans[span_attrs][0]["nr.durations"] += self.duration + return None + + def span_events(self, settings, base_attrs=None, parent_guid=None, attr_class=dict, partial_granularity_sampled=False, ct_exit_spans=None): + span = self.span_event(settings, base_attrs=base_attrs, parent_guid=parent_guid, attr_class=attr_class, partial_granularity_sampled=partial_granularity_sampled, ct_exit_spans=ct_exit_spans) + parent_id = parent_guid + if span: # span will be None if the span is an inprocess span or repeated exit span. + yield span + # Compressed spans are always reparented onto the entry span. + if not settings.distributed_tracing.sampler.partial_granularity.type == "compact" or span[0].get("nr.entryPoint"): + parent_id = self.guid for child in self.children: for event in child.span_events( # noqa: UP028 - settings, base_attrs=base_attrs, parent_guid=self.guid, attr_class=attr_class + settings, base_attrs=base_attrs, parent_guid=parent_id, attr_class=attr_class, partial_granularity_sampled=partial_granularity_sampled, ct_exit_spans=ct_exit_spans ): - yield event + if event: # event will be None if the span is an inprocess span or repeated exit span. + yield event class DatastoreNodeMixin(GenericNodeMixin): @@ -108,7 +156,7 @@ def db_instance(self): self._db_instance = db_instance_attr return db_instance_attr - def span_event(self, settings, base_attrs=None, parent_guid=None, attr_class=dict): + def span_event(self, settings, base_attrs=None, parent_guid=None, attr_class=dict, partial_granularity_sampled=False, ct_exit_spans=None): a_attrs = self.agent_attributes a_attrs["db.instance"] = self.db_instance i_attrs = (base_attrs and base_attrs.copy()) or attr_class() @@ -140,4 +188,4 @@ def span_event(self, settings, base_attrs=None, parent_guid=None, attr_class=dic except Exception: pass - return super().span_event(settings, base_attrs=i_attrs, parent_guid=parent_guid, attr_class=attr_class) + return super().span_event(settings, base_attrs=i_attrs, parent_guid=parent_guid, attr_class=attr_class, partial_granularity_sampled=partial_granularity_sampled, ct_exit_spans=ct_exit_spans) diff --git a/newrelic/core/root_node.py b/newrelic/core/root_node.py index fa8b3de82b..7de8b1ead5 100644 --- a/newrelic/core/root_node.py +++ b/newrelic/core/root_node.py @@ -37,7 +37,7 @@ class RootNode(_RootNode, GenericNodeMixin): - def span_event(self, settings, base_attrs=None, parent_guid=None, attr_class=dict): + def span_event(self, settings, base_attrs=None, parent_guid=None, attr_class=dict, partial_granularity_sampled=False, ct_exit_spans=None): i_attrs = (base_attrs and base_attrs.copy()) or attr_class() i_attrs["transaction.name"] = self.path i_attrs["nr.entryPoint"] = True @@ -46,7 +46,7 @@ def span_event(self, settings, base_attrs=None, parent_guid=None, attr_class=dic if self.tracing_vendors: i_attrs["tracingVendors"] = self.tracing_vendors - return super().span_event(settings, base_attrs=i_attrs, parent_guid=parent_guid, attr_class=attr_class) + return super().span_event(settings, base_attrs=i_attrs, parent_guid=parent_guid, attr_class=attr_class, partial_granularity_sampled=partial_granularity_sampled, ct_exit_spans=ct_exit_spans) def trace_node(self, stats, root, connections): name = self.path diff --git a/newrelic/core/transaction_node.py b/newrelic/core/transaction_node.py index 34871d8b21..060833ec07 100644 --- a/newrelic/core/transaction_node.py +++ b/newrelic/core/transaction_node.py @@ -98,6 +98,7 @@ "root_span_guid", "trace_id", "loop_time", + "partial_granularity_sampled", ], ) @@ -633,5 +634,5 @@ def span_events(self, settings, attr_class=dict): ("priority", self.priority), ) ) - - yield from self.root.span_events(settings, base_attrs, parent_guid=self.parent_span, attr_class=attr_class) + ct_exit_spans = {} + yield from self.root.span_events(settings, base_attrs, parent_guid=self.parent_span, attr_class=attr_class, partial_granularity_sampled = self.partial_granularity_sampled, ct_exit_spans=ct_exit_spans) From 27573b4562f4aae70ccca4065556812664473f6f Mon Sep 17 00:00:00 2001 From: Hannah Stepanek Date: Fri, 10 Oct 2025 15:26:27 -0700 Subject: [PATCH 3/5] Add tests --- .../test_distributed_tracing.py | 48 ++++++++++++------- 1 file changed, 32 insertions(+), 16 deletions(-) diff --git a/tests/agent_features/test_distributed_tracing.py b/tests/agent_features/test_distributed_tracing.py index 36261d97e2..6d244c192b 100644 --- a/tests/agent_features/test_distributed_tracing.py +++ b/tests/agent_features/test_distributed_tracing.py @@ -419,21 +419,31 @@ def _test_inbound_dt_payload_acceptance(): @pytest.mark.parametrize( - "sampled,remote_parent_sampled,remote_parent_not_sampled,expected_sampled,expected_priority,expected_adaptive_sampling_algo_called", + "traceparent_sampled,newrelic_sampled,remote_parent_sampled,remote_parent_not_sampled,expected_sampled,expected_priority,expected_adaptive_sampling_algo_called", ( - (True, "default", "default", None, None, True), # Uses sampling algo. - (True, "always_on", "default", True, 2, False), # Always sampled. - (True, "always_off", "default", False, 0, False), # Never sampled. - (False, "default", "default", None, None, True), # Uses sampling algo. - (False, "always_on", "default", None, None, True), # Uses sampling alog. - (False, "always_off", "default", None, None, True), # Uses sampling algo. - (True, "default", "always_on", None, None, True), # Uses sampling algo. - (True, "default", "always_off", None, None, True), # Uses sampling algo. - (False, "default", "always_on", True, 2, False), # Always sampled. - (False, "default", "always_off", False, 0, False), # Never sampled. + (True, None, "default", "default", None, None, True), # Uses sampling algo. + (True, None, "always_on", "default", True, 2, False), # Always sampled. + (True, None, "always_off", "default", False, 0, False), # Never sampled. + (False, None, "default", "default", None, None, True), # Uses sampling algo. + (False, None, "always_on", "default", None, None, True), # Uses sampling alog. + (False, None, "always_off", "default", None, None, True), # Uses sampling algo. + (True, None, "default", "always_on", None, None, True), # Uses sampling algo. + (True, None, "default", "always_off", None, None, True), # Uses sampling algo. + (False, None, "default", "always_on", True, 2, False), # Always sampled. + (False, None, "default", "always_off", False, 0, False), # Never sampled. + (None, True, "default", "default", None, None, True), # Uses sampling algo. + (None, True, "always_on", "default", True, 2, False), # Always sampled. + (None, True, "always_off", "default", False, 0, False), # Never sampled. + (None, False, "default", "default", None, None, True), # Uses sampling algo. + (None, False, "always_on", "default", None, None, True), # Uses sampling alog. + (None, False, "always_off", "default", None, None, True), # Uses sampling algo. + (None, True, "default", "always_on", None, None, True), # Uses sampling algo. + (None, True, "default", "always_off", None, None, True), # Uses sampling algo. + (None, False, "default", "always_on", True, 2, False), # Always sampled. + (None, False, "default", "always_off", False, 0, False), # Never sampled. ), ) -def test_distributed_trace_w3cparent_sampling_decision( +def test_distributed_trace_remote_parent_sampling_decision( sampled, remote_parent_sampled, remote_parent_not_sampled, @@ -471,10 +481,16 @@ def test_distributed_trace_w3cparent_sampling_decision( def _test(): txn = current_transaction() - headers = { - "traceparent": f"00-0af7651916cd43dd8448eb211c80319c-00f067aa0ba902b7-{int(sampled):02x}", - "tracestate": "rojo=f06a0ba902b7,congo=t61rcWkgMzE", - } + if traceparent_sampled is not None: + headers = { + "traceparent": f"00-0af7651916cd43dd8448eb211c80319c-00f067aa0ba902b7-{int(traceparent_sampled):02x}", + "tracestate": "33@nr=0-0-33-2827902-7d3efb1b173fecfa-e8b91a159289ff74-1-1.23456-1518469636035" + "newrelic": "{\"v\":[0,1],\"d\":{\"ty\":\"Mobile\",\"ac\":\"123\",\"ap\":\"51424\",\"id\":\"5f474d64b9cc9b2a\",\"tr\":\"6e2fea0b173fdad0\",\"pr\":0.1234,\"sa\":true,\"ti\":1482959525577,\"tx\":\"27856f70d3d314b7\"}}" + } + else: + headers = { + "newrelic": "{\"v\":[0,1],\"d\":{\"ty\":\"Mobile\",\"ac\":\"123\",\"ap\":\"51424\",\"id\":\"5f474d64b9cc9b2a\",\"tr\":\"6e2fea0b173fdad0\",\"pr\":0.1234,\"sa\":%s,\"ti\":1482959525577,\"tx\":\"27856f70d3d314b7\"}}"%(str(newrelic_sampled).lower()) + } accept_distributed_trace_headers(headers) _test() From 670d454429a5c8986170759c3fcbbf5d649022d4 Mon Sep 17 00:00:00 2001 From: Hannah Stepanek Date: Tue, 14 Oct 2025 12:24:00 -0700 Subject: [PATCH 4/5] Fix duration calculation --- newrelic/core/node_mixin.py | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/newrelic/core/node_mixin.py b/newrelic/core/node_mixin.py index 95bb72667c..4e9842c4e5 100644 --- a/newrelic/core/node_mixin.py +++ b/newrelic/core/node_mixin.py @@ -106,12 +106,19 @@ def span_event(self, settings, base_attrs=None, parent_guid=None, attr_class=dic # ids is the list of span guids that share this unqiue exit span. a_minimized_attrs["nr.ids"] = [] a_minimized_attrs["nr.durations"] = self.duration - ct_exit_spans[span_attrs] = [a_minimized_attrs] + ct_exit_spans[span_attrs] = [i_attrs, a_minimized_attrs] return [i_attrs, {}, a_minimized_attrs] # If this is an exit span we've already seen, add it's guid to the list # of ids on the seen span and return None. - ct_exit_spans[span_attrs][0]["nr.ids"].append(self.guid) - ct_exit_spans[span_attrs][0]["nr.durations"] += self.duration + ct_exit_spans[span_attrs][1]["nr.ids"].append(self.guid) + # Compute the new start and end time for all compressed spans and use + # that to set the duration for all compressed spans. + new_start_time = min(ct_exit_spans[span_attrs][0]["timestamp"], i_attrs["timestamp"]) + new_end_time = max(i_attrs["timestamp"]/1000 + self.duration, ct_exit_spans[span_attrs][0]["timestamp"]/1000 + ct_exit_spans[span_attrs][1]["nr.durations"]) + ct_exit_spans[span_attrs][1]["nr.durations"] = new_end_time - new_start_time + # Reset the start time of the compressed span to be the start time of + # the oldest compressed span. + ct_exit_spans[span_attrs][0]["timestamp"] = new_start_time return None def span_events(self, settings, base_attrs=None, parent_guid=None, attr_class=dict, partial_granularity_sampled=False, ct_exit_spans=None): From ec3f7fef02c59dbb4d7a145333304e4696529288 Mon Sep 17 00:00:00 2001 From: Hannah Stepanek Date: Tue, 14 Oct 2025 15:53:13 -0700 Subject: [PATCH 5/5] Fixup tests --- newrelic/api/transaction.py | 9 ++- .../test_distributed_tracing.py | 64 +++++++++++-------- 2 files changed, 41 insertions(+), 32 deletions(-) diff --git a/newrelic/api/transaction.py b/newrelic/api/transaction.py index 70206004e2..453ab76b4d 100644 --- a/newrelic/api/transaction.py +++ b/newrelic/api/transaction.py @@ -1065,14 +1065,14 @@ def _make_sampling_decision(self): remote_parent_not_sampled_setting = self.settings.distributed_tracing.sampler.full_granularity.remote_parent_not_sampled, ) _logger.debug("Full granularity sampling decision was %s with priority=%s.", sampled, priority) - if computed_sampled: + if computed_sampled or not self.settings.distributed_tracing.sampler.partial_granularity.enabled: self._priority = computed_priority self._sampled = computed_sampled self._sampling_decision_made = True return # If full granularity is not going to sample, let partial granularity decide. - if self.settings.distributed_tracing.sampler.partial_granularity.enabled: + if not self.settings.distributed_tracing.sampler.partial_granularity.enabled: _logger.debug("Partial granularity tracing is enabled. Asking if partial granularity wants to sample.") self._priority, self._sampled = self._compute_sampled_and_priority( priority, @@ -1237,7 +1237,6 @@ def _accept_distributed_trace_payload(self, payload, transport_type="HTTP"): return False try: - self._remote_parent_sampled = payload.get("sa") version = payload.get("v") major_version = version and int(version[0]) @@ -1258,7 +1257,7 @@ def _accept_distributed_trace_payload(self, payload, transport_type="HTTP"): if not any(k in data for k in ("id", "tx")): self._record_supportability("Supportability/DistributedTrace/AcceptPayload/ParseException") return False - + self._remote_parent_sampled = data.get("sa") settings = self._settings account_id = data.get("ac") trusted_account_key = settings.trusted_account_key or ( @@ -1349,7 +1348,7 @@ def accept_distributed_trace_headers(self, headers, transport_type="HTTP"): try: traceparent = ensure_str(traceparent).strip() data = W3CTraceParent.decode(traceparent) - self._remote_parent_sampled = data.get("sa") + self._remote_parent_sampled = data.pop("sa", None) except: data = None diff --git a/tests/agent_features/test_distributed_tracing.py b/tests/agent_features/test_distributed_tracing.py index 6d244c192b..09b987516b 100644 --- a/tests/agent_features/test_distributed_tracing.py +++ b/tests/agent_features/test_distributed_tracing.py @@ -419,34 +419,43 @@ def _test_inbound_dt_payload_acceptance(): @pytest.mark.parametrize( - "traceparent_sampled,newrelic_sampled,remote_parent_sampled,remote_parent_not_sampled,expected_sampled,expected_priority,expected_adaptive_sampling_algo_called", + "traceparent_sampled,newrelic_sampled,remote_parent_sampled_setting,remote_parent_not_sampled_setting,expected_sampled,expected_priority,expected_adaptive_sampling_algo_called", ( - (True, None, "default", "default", None, None, True), # Uses sampling algo. - (True, None, "always_on", "default", True, 2, False), # Always sampled. - (True, None, "always_off", "default", False, 0, False), # Never sampled. - (False, None, "default", "default", None, None, True), # Uses sampling algo. - (False, None, "always_on", "default", None, None, True), # Uses sampling alog. - (False, None, "always_off", "default", None, None, True), # Uses sampling algo. - (True, None, "default", "always_on", None, None, True), # Uses sampling algo. - (True, None, "default", "always_off", None, None, True), # Uses sampling algo. - (False, None, "default", "always_on", True, 2, False), # Always sampled. - (False, None, "default", "always_off", False, 0, False), # Never sampled. - (None, True, "default", "default", None, None, True), # Uses sampling algo. + #(True, None, "default", "default", None, None, True), # Uses adaptive sampling algo. + #(True, None, "always_on", "default", True, 2, False), # Always sampled. + #(True, None, "always_off", "default", False, 0, False), # Never sampled. + #(False, None, "default", "default", None, None, True), # Uses adaptive sampling algo. + #(False, None, "always_on", "default", None, None, True), # Uses adaptive sampling alog. + #(False, None, "always_off", "default", None, None, True), # Uses adaptive sampling algo. + #(True, None, "default", "always_on", None, None, True), # Uses adaptive sampling algo. + #(True, None, "default", "always_off", None, None, True), # Uses adaptive sampling algo. + #(False, None, "default", "always_on", True, 2, False), # Always sampled. + #(False, None, "default", "always_off", False, 0, False), # Never sampled. + + #(True, True, "default", "default", True, 1.23456, False), # Uses sampling decision in W3C TraceState header. + #(True, False, "default", "default", False, 1.23456, False), # Uses sampling decision in W3C TraceState header. + #(False, False, "default", "default", False, 1.23456, False), # Uses sampling decision in W3C TraceState header. + #(True, False, "always_on", "default", True, 2, False), # Always sampled. + #(True, True, "always_off", "default", False, 0, False), # Never sampled. + #(False, False, "default", "always_on", True, 2, False), # Always sampled. + #(False, True, "default", "always_off", False, 0, False), # Never sampled. + + #(None, True, "default", "default", True, .1234, False), # Uses sampling and priority from newrelic header. (None, True, "always_on", "default", True, 2, False), # Always sampled. (None, True, "always_off", "default", False, 0, False), # Never sampled. - (None, False, "default", "default", None, None, True), # Uses sampling algo. - (None, False, "always_on", "default", None, None, True), # Uses sampling alog. - (None, False, "always_off", "default", None, None, True), # Uses sampling algo. - (None, True, "default", "always_on", None, None, True), # Uses sampling algo. - (None, True, "default", "always_off", None, None, True), # Uses sampling algo. + (None, False, "default", "default", False, .1234, False), # Uses sampling and priority from newrelic header. + (None, False, "always_on", "default", False, .1234, False), # Uses sampling and priority from newrelic header. + (None, True, "default", "always_on", True, .1234, False), # Uses sampling and priority from newrelic header. (None, False, "default", "always_on", True, 2, False), # Always sampled. (None, False, "default", "always_off", False, 0, False), # Never sampled. + (None, None, "default", "default", None, None, True), # Uses adaptive sampling algo. ), ) def test_distributed_trace_remote_parent_sampling_decision( - sampled, - remote_parent_sampled, - remote_parent_not_sampled, + traceparent_sampled, + newrelic_sampled, + remote_parent_sampled_setting, + remote_parent_not_sampled_setting, expected_sampled, expected_priority, expected_adaptive_sampling_algo_called, @@ -460,18 +469,18 @@ def test_distributed_trace_remote_parent_sampling_decision( test_settings = _override_settings.copy() test_settings.update( { - "distributed_tracing.sampler.remote_parent_sampled": remote_parent_sampled, - "distributed_tracing.sampler.remote_parent_not_sampled": remote_parent_not_sampled, + "distributed_tracing.sampler.full_granularity.remote_parent_sampled": remote_parent_sampled_setting, + "distributed_tracing.sampler.full_granularity.remote_parent_not_sampled": remote_parent_not_sampled_setting, "span_events.enabled": True, } ) if expected_adaptive_sampling_algo_called: function_called_decorator = validate_function_called( - "newrelic.api.transaction", "Transaction.sampling_algo_compute_sampled_and_priority" + "newrelic.core.adaptive_sampler", "AdaptiveSampler.compute_sampled" ) else: function_called_decorator = validate_function_not_called( - "newrelic.api.transaction", "Transaction.sampling_algo_compute_sampled_and_priority" + "newrelic.core.adaptive_sampler", "AdaptiveSampler.compute_sampled" ) @function_called_decorator @@ -484,12 +493,13 @@ def _test(): if traceparent_sampled is not None: headers = { "traceparent": f"00-0af7651916cd43dd8448eb211c80319c-00f067aa0ba902b7-{int(traceparent_sampled):02x}", - "tracestate": "33@nr=0-0-33-2827902-7d3efb1b173fecfa-e8b91a159289ff74-1-1.23456-1518469636035" - "newrelic": "{\"v\":[0,1],\"d\":{\"ty\":\"Mobile\",\"ac\":\"123\",\"ap\":\"51424\",\"id\":\"5f474d64b9cc9b2a\",\"tr\":\"6e2fea0b173fdad0\",\"pr\":0.1234,\"sa\":true,\"ti\":1482959525577,\"tx\":\"27856f70d3d314b7\"}}" + "newrelic": "{\"v\":[0,1],\"d\":{\"ty\":\"Mobile\",\"ac\":\"123\",\"ap\":\"51424\",\"id\":\"5f474d64b9cc9b2a\",\"tr\":\"6e2fea0b173fdad0\",\"pr\":0.1234,\"sa\":true,\"ti\":1482959525577,\"tx\":\"27856f70d3d314b7\"}}" # This header should be ignored. } + if newrelic_sampled is not None: + headers["tracestate"] = f"1@nr=0-0-1-2827902-0af7651916cd43dd-00f067aa0ba902b7-{int(newrelic_sampled)}-1.23456-1518469636035" else: headers = { - "newrelic": "{\"v\":[0,1],\"d\":{\"ty\":\"Mobile\",\"ac\":\"123\",\"ap\":\"51424\",\"id\":\"5f474d64b9cc9b2a\",\"tr\":\"6e2fea0b173fdad0\",\"pr\":0.1234,\"sa\":%s,\"ti\":1482959525577,\"tx\":\"27856f70d3d314b7\"}}"%(str(newrelic_sampled).lower()) + "newrelic": "{\"v\":[0,1],\"d\":{\"ty\":\"Mobile\",\"ac\":\"1\",\"ap\":\"51424\",\"id\":\"00f067aa0ba902b7\",\"tr\":\"0af7651916cd43dd8448eb211c80319c\",\"pr\":0.1234,\"sa\":%s,\"ti\":1482959525577,\"tx\":\"0af7651916cd43dd\"}}"%(str(newrelic_sampled).lower()) } accept_distributed_trace_headers(headers)