Skip to content

Commit 4afcfee

Browse files
authored
Merge pull request #76 from dynatrace-oss/LOG-6790-Improve-logs-in-case-of-config-parsing-failure
Do not log entire stack trace for incorrect rules
2 parents 329df9a + d9d0a08 commit 4afcfee

File tree

4 files changed

+16
-8
lines changed

4 files changed

+16
-8
lines changed

.gitignore

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -253,6 +253,7 @@ samconfig.toml
253253
.vscode/settings.json
254254

255255
config
256+
.idea
256257
.dev
257258
.tmp
258259
dev/env_vars.cfg

src/log/forwarding/log_forwarding_rules.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -189,8 +189,7 @@ def load_forwarding_rules_from_local_folder():
189189
log_forwarding_rules[bucket_name][rule['name']] = rule_obj
190190
except IncorrectLogForwardingRuleFormat as ex:
191191
logger.warning(
192-
'Skipping incorrect log forwarding rule: %s in %s', rule, rule_file.name)
193-
logger.error(ex)
192+
'Skipping incorrect log forwarding rule: %s in %s; %s', rule, rule_file.name, ex.message)
194193
continue
195194

196195
except InvalidLogForwardingRuleFile as ex:

src/log/processing/processing.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -149,6 +149,9 @@ def process_log_object(log_processing_rule: LogProcessingRule, bucket: str, key:
149149
context_log_attributes.update(
150150
log_processing_rule.get_processing_log_annotations())
151151

152+
for log_sink in log_sinks:
153+
log_sink.set_s3_source(bucket, key)
154+
152155
# Count log entries (can't len() a stream)
153156
num_log_entries = 0
154157
decompressed_log_object_size = 0

src/log/sinks/dynatrace.py

Lines changed: 11 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -69,6 +69,7 @@ def __init__(self, dt_url: str, dt_api_key_parameter: str, verify_ssl: bool = Tr
6969
self._approx_buffered_messages_size = LIST_BRACKETS_LENGTH
7070
self._messages = []
7171
self._batch_num = 1
72+
self._s3_source = ""
7273

7374
retry_strategy = Retry(
7475
total = 3,
@@ -96,6 +97,9 @@ def is_empty(self):
9697
def get_environment_url(self):
9798
return self._environment_url
9899

100+
def set_s3_source(self, bucket: str, key: str):
101+
self._s3_source = f"{bucket}/{key}"
102+
99103
def push(self, message: dict):
100104
# Validate that the message size doesn't reach DT limits. If so,
101105
# truncate the "content" field.
@@ -128,6 +132,7 @@ def empty_sink(self):
128132
self._messages = []
129133
self._approx_buffered_messages_size = LIST_BRACKETS_LENGTH
130134
self._batch_num = 1
135+
self._s3_source = ""
131136

132137
def check_log_message_size_and_truncate(self, message: dict):
133138
'''
@@ -211,28 +216,28 @@ def ingest_logs(self, logs: list, session=None,
211216
unit=MetricUnit.Count, value=1)
212217
elif resp.status_code == 200:
213218
logger.warning(
214-
'%s: Parts of batch %s were not successfully posted: %s',tenant_id, batch_num, resp.text)
219+
'%s: Parts of batch %s were not successfully posted: %s. Source file: %s',tenant_id, batch_num, resp.text, self._s3_source)
215220
metrics.add_metric(
216221
name='DynatraceHTTP200PartialSuccess', unit=MetricUnit.Count, value=1)
217222
elif resp.status_code == 400:
218223
logger.warning(
219-
'%s: Parts of batch %s were not successfully posted: %s',tenant_id, batch_num, resp.text)
224+
'%s: Parts of batch %s were not successfully posted: %s. Source file: %s',tenant_id, batch_num, resp.text, self._s3_source)
220225
metrics.add_metric(
221226
name='DynatraceHTTP400InvalidLogEntries', unit=MetricUnit.Count, value=1)
222227
elif resp.status_code == 429:
223-
logger.error("%s: Throttled by Dynatrace. Exhausted retry attempts...", tenant_id)
228+
logger.error("%s: Throttled by Dynatrace. Exhausted retry attempts... Source file: %s", tenant_id, self._s3_source)
224229
metrics.add_metric(name='DynatraceHTTP429Throttled',unit=MetricUnit.Count, value=1)
225230
metrics.add_metric(name='DynatraceHTTPErrors', unit=MetricUnit.Count, value=1)
226231
raise DynatraceThrottlingException
227232
elif resp.status_code == 503:
228-
logger.error("%s: Usable space limit reached. Exhausted retry attempts...",tenant_id)
233+
logger.error("%s: Usable space limit reached. Exhausted retry attempts... Source file: %s", tenant_id, self._s3_source)
229234
metrics.add_metric(name='DynatraceHTTP503SpaceLimitReached',unit=MetricUnit.Count, value=1)
230235
metrics.add_metric(name='DynatraceHTTPErrors', unit=MetricUnit.Count, value=1)
231236
raise DynatraceThrottlingException
232237
else:
233238
logger.error(
234-
"%s: There was a HTTP %d error posting batch %d to Dynatrace. %s",
235-
tenant_id,resp.status_code, batch_num, resp.text)
239+
"%s: There was a HTTP %d error posting batch %d to Dynatrace. %s. Source file: %s",
240+
tenant_id,resp.status_code, batch_num, resp.text, self._s3_source)
236241
metrics.add_metric(name='DynatraceHTTPErrors',
237242
unit=MetricUnit.Count, value=1)
238243
raise DynatraceIngestionException

0 commit comments

Comments
 (0)