Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix: log level has no impact on integration error telemetry #13045

Draft
wants to merge 1 commit into
base: main
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 5 additions & 4 deletions ddtrace/contrib/dbapi.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
from ddtrace.internal import core
from ddtrace.internal.constants import COMPONENT
from ddtrace.internal.logger import get_logger
from ddtrace.internal.telemetry import telemetry_writer
from ddtrace.internal.utils import ArgumentError
from ddtrace.internal.utils import get_argument_value
from ddtrace.settings.asm import config as asm_config
Expand Down Expand Up @@ -113,8 +114,8 @@ def _trace_method(self, method, name, resource, extra_tags, dbm_propagator, *arg
_set_metric_iast_executed_sink(SqlInjection.vulnerability_type)
if check_tainted_dbapi_args(args, kwargs, pin.tracer, self._self_config.integration_name, method):
SqlInjection.report(evidence_value=args[0], dialect=self._self_config.integration_name)
except Exception:
log.debug("Unexpected exception while reporting vulnerability", exc_info=True)
except Exception as e:
telemetry_writer.add_integration_error_log("Unexpected exception while reporting vulnerability", e)

# set analytics sample rate if enabled but only for non-FetchTracedCursor
if not isinstance(self, FetchTracedCursor):
Expand Down Expand Up @@ -344,8 +345,8 @@ def _get_vendor(conn):
"""
try:
name = _get_module_name(conn)
except Exception:
log.debug("couldn't parse module name", exc_info=True)
except Exception as e:
telemetry_writer.add_integration_error_log("couldn't parse module name", e)
name = "sql"
return sql.normalize_vendor(name)

Expand Down
7 changes: 5 additions & 2 deletions ddtrace/contrib/internal/anthropic/_streaming.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@

from ddtrace.contrib.internal.anthropic.utils import tag_tool_use_output_on_span
from ddtrace.internal.logger import get_logger
from ddtrace.internal.telemetry import telemetry_writer
from ddtrace.llmobs._utils import _get_attr


Expand Down Expand Up @@ -157,8 +158,10 @@ def _process_finished_stream(integration, span, args, kwargs, streamed_chunks):
if integration.is_pc_sampled_span(span):
_tag_streamed_chat_completion_response(integration, span, resp_message)
integration.llmobs_set_tags(span, args=[], kwargs=kwargs, response=resp_message)
except Exception:
log.warning("Error processing streamed completion/chat response.", exc_info=True)
except Exception as e:
telemetry_writer.add_integration_error_log(
"Error processing streamed completion/chat response", e, warning=True
)


def _construct_message(streamed_chunks):
Expand Down
21 changes: 12 additions & 9 deletions ddtrace/contrib/internal/asgi/middleware.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
from ddtrace.internal.logger import get_logger
from ddtrace.internal.schema import schematize_url_operation
from ddtrace.internal.schema.span_attribute_schema import SpanDirection
from ddtrace.internal.telemetry import telemetry_writer
from ddtrace.internal.utils import get_blocked
from ddtrace.internal.utils import set_blocked
from ddtrace.trace import Span
Expand Down Expand Up @@ -98,8 +99,8 @@ def _parse_response_cookies(response_headers):
if len(result) == 2:
cookie_key, cookie_value = result
cookies[cookie_key] = cookie_value
except Exception:
log.debug("failed to extract response cookies", exc_info=True)
except Exception as e:
telemetry_writer.add_integration_error_log("failed to extract response cookies", e)
return cookies


Expand Down Expand Up @@ -136,8 +137,10 @@ async def __call__(self, scope, receive, send):
return await self.app(scope, receive, send)
try:
headers = _extract_headers(scope)
except Exception:
log.warning("failed to decode headers for distributed tracing", exc_info=True)
except Exception as e:
telemetry_writer.add_integration_error_log(
"failed to decode headers for distributed tracing", e, warning=True
)
headers = {}
else:
trace_utils.activate_distributed_headers(
Expand Down Expand Up @@ -191,9 +194,9 @@ async def __call__(self, scope, receive, send):
if key.encode() == b"host":
try:
host_header = value
except UnicodeDecodeError:
log.warning(
"failed to decode host header, host from http headers will not be considered", exc_info=True
except UnicodeDecodeError as e:
telemetry_writer.add_integration_error_log(
"failed to decode host header, host from http headers will not be considered", e
)
break
method = scope.get("method")
Expand Down Expand Up @@ -246,8 +249,8 @@ async def __call__(self, scope, receive, send):
async def wrapped_send(message):
try:
response_headers = _extract_headers(message)
except Exception:
log.warning("failed to extract response headers", exc_info=True)
except Exception as e:
telemetry_writer.add_integration_error_log("failed to extract response headers", e, warning=True)
response_headers = None
if span and message.get("type") == "http.response.start" and "status" in message:
cookies = _parse_response_cookies(response_headers)
Expand Down
17 changes: 10 additions & 7 deletions ddtrace/contrib/internal/botocore/patch.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@
from ddtrace.internal.schema import schematize_cloud_faas_operation
from ddtrace.internal.schema import schematize_cloud_messaging_operation
from ddtrace.internal.schema import schematize_service_name
from ddtrace.internal.telemetry import telemetry_writer
from ddtrace.internal.utils import get_argument_value
from ddtrace.internal.utils.formats import asbool
from ddtrace.internal.utils.formats import deep_getattr
Expand Down Expand Up @@ -72,19 +73,21 @@ def _load_dynamodb_primary_key_names_for_tables() -> Dict[str, Set[str]]:
if not isinstance(table, str):
raise ValueError(f"expected string table name: {table}")

if not isinstance(primary_keys, list):
raise ValueError(f"expected list of primary keys: {primary_keys}")
if not isinstance(primary_keys, list):
raise ValueError(f"expected list of primary keys: {primary_keys}")

unique_primary_keys = set(primary_keys)
if not len(unique_primary_keys) == len(primary_keys):
raise ValueError(f"expected unique primary keys: {primary_keys}")
unique_primary_keys = set(primary_keys)
if not len(unique_primary_keys) == len(primary_keys):
raise ValueError(f"expected unique primary keys: {primary_keys}")

table_primary_keys[table] = unique_primary_keys
table_primary_keys[table] = unique_primary_keys

return table_primary_keys

except Exception as e:
log.warning("failed to load DD_BOTOCORE_DYNAMODB_TABLE_PRIMARY_KEYS: %s", e)
telemetry_writer.add_integration_error_log(
"failed to load DD_BOTOCORE_DYNAMODB_TABLE_PRIMARY_KEYS", e, warning=True
)
return {}


Expand Down
17 changes: 13 additions & 4 deletions ddtrace/contrib/internal/botocore/services/bedrock.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
from ddtrace.internal import core
from ddtrace.internal.logger import get_logger
from ddtrace.internal.schema import schematize_service_name
from ddtrace.internal.telemetry import telemetry_writer


log = get_logger(__name__)
Expand Down Expand Up @@ -268,8 +269,12 @@ def _extract_text_and_response_reason(ctx: core.ExecutionContext, body: Dict[str
elif provider == _STABILITY:
# TODO: request/response formats are different for image-based models. Defer for now
pass
except (IndexError, AttributeError, TypeError):
log.warning("Unable to extract text/finish_reason from response body. Defaulting to empty text/finish_reason.")
except (IndexError, AttributeError, TypeError) as e:
telemetry_writer.add_integration_error_log(
"Unable to extract text/finish_reason from response body. Defaulting to empty text/finish_reason.",
e,
warning=True,
)

if not isinstance(text, list):
text = [text]
Expand Down Expand Up @@ -323,8 +328,12 @@ def _extract_streamed_response(ctx: core.ExecutionContext, streamed_body: List[D
finish_reason = streamed_body[-1]["stop_reason"]
elif provider == _STABILITY:
pass # DEV: we do not yet support image modality models
except (IndexError, AttributeError):
log.warning("Unable to extract text/finish_reason from response body. Defaulting to empty text/finish_reason.")
except (IndexError, AttributeError) as e:
telemetry_writer.add_integration_error_log(
"Unable to extract text/finish_reason from response body. Defaulting to empty text/finish_reason.",
e,
warning=True,
)

if not isinstance(text, list):
text = [text]
Expand Down
5 changes: 3 additions & 2 deletions ddtrace/contrib/internal/botocore/services/kinesis.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
from ddtrace.internal.schema import schematize_cloud_messaging_operation
from ddtrace.internal.schema import schematize_service_name
from ddtrace.internal.schema.span_attribute_schema import SpanDirection
from ddtrace.internal.telemetry import telemetry_writer

from ..utils import extract_DD_json
from ..utils import get_kinesis_data_object
Expand All @@ -43,8 +44,8 @@ def update_record(ctx, record: Dict[str, Any], stream: str, inject_trace_context

try:
data_json = json.dumps(data_obj)
except Exception:
log.warning("Unable to update kinesis record", exc_info=True)
except Exception as e:
telemetry_writer.add_integration_error_log("Unable to update kinesis record", e, warning=True)

if line_break is not None:
data_json += line_break
Expand Down
5 changes: 3 additions & 2 deletions ddtrace/contrib/internal/botocore/services/stepfunctions.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
from ddtrace.internal.schema import SpanDirection
from ddtrace.internal.schema import schematize_cloud_messaging_operation
from ddtrace.internal.schema import schematize_service_name
from ddtrace.internal.telemetry import telemetry_writer


log = get_logger(__name__)
Expand All @@ -27,8 +28,8 @@ def update_stepfunction_input(ctx: core.ExecutionContext, params: Any) -> None:
if isinstance(input_obj, str):
try:
input_obj = json.loads(params["input"])
except ValueError:
log.warning("Input is not a valid JSON string")
except ValueError as e:
telemetry_writer.add_integration_error_log("Input is not a valid JSON string", e, warning=True)
return

if not isinstance(input_obj, dict) or "_datadog" in input_obj:
Expand Down
41 changes: 25 additions & 16 deletions ddtrace/contrib/internal/botocore/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
from ddtrace.internal import core
from ddtrace.internal.core import ExecutionContext
from ddtrace.internal.logger import get_logger
from ddtrace.internal.telemetry import telemetry_writer


log = get_logger(__name__)
Expand Down Expand Up @@ -42,22 +43,26 @@ def get_kinesis_data_object(data: str) -> Tuple[Optional[str], Optional[Dict[str
# check if data is a json string
try:
return get_json_from_str(data)
except Exception:
log.debug("Kinesis data is not a JSON string. Trying Byte encoded JSON string.")
except Exception as e:
telemetry_writer.add_integration_error_log(
"Kinesis data is not a JSON string. Trying Byte encoded JSON string", e
)

# check if data is an encoded json string
try:
data_str = data.decode("ascii")
return get_json_from_str(data_str)
except Exception:
log.debug("Kinesis data is not a JSON string encoded. Trying Base64 encoded JSON string.")
except Exception as e:
telemetry_writer.add_integration_error_log(
"Kinesis data is not a JSON string encoded. Trying Base64 encoded JSON string", e
)

# check if data is a base64 encoded json string
try:
data_str = base64.b64decode(data).decode("ascii")
return get_json_from_str(data_str)
except Exception:
log.debug("Unable to parse payload, unable to inject trace context.")
except Exception as e:
telemetry_writer.add_integration_error_log("Unable to parse payload, unable to inject trace context", e)

return None, None

Expand All @@ -73,8 +78,8 @@ def update_eventbridge_detail(ctx: ExecutionContext) -> None:
if "Detail" in entry:
try:
detail = json.loads(entry["Detail"])
except ValueError:
log.warning("Detail is not a valid JSON string")
except ValueError as e:
telemetry_writer.add_integration_error_log("Detail is not a valid JSON string", e, warning=True)
continue

detail["_datadog"] = {}
Expand All @@ -99,14 +104,18 @@ def update_client_context(ctx: ExecutionContext) -> None:
try:
client_context_json = base64.b64decode(params["ClientContext"]).decode("utf-8")
client_context_object = json.loads(client_context_json)
except Exception:
log.warning("malformed client_context=%s", params["ClientContext"], exc_info=True)
except Exception as e:
telemetry_writer.add_integration_error_log(
"malformed client_context=%s" % params["ClientContext"], e, warning=True
)
return
modify_client_context(client_context_object, trace_headers)
try:
json_context = json.dumps(client_context_object).encode("utf-8")
except Exception:
log.warning("unable to encode modified client context as json: %s", client_context_object, exc_info=True)
except Exception as e:
telemetry_writer.add_integration_error_log(
"unable to encode modified client context as json: %s" % client_context_object, e, warning=True
)
return
params["ClientContext"] = base64.b64encode(json_context).decode("utf-8")

Expand Down Expand Up @@ -159,8 +168,8 @@ def extract_DD_json(message):
try:
body = json.loads(message["Body"])
return extract_DD_json(body)
except ValueError:
log.debug("Unable to parse AWS message body.")
except Exception:
log.debug("Unable to parse AWS message attributes for Datadog Context.")
except ValueError as e:
telemetry_writer.add_integration_error_log("Unable to parse AWS message body", e)
except Exception as e:
telemetry_writer.add_integration_error_log("Unable to parse AWS message attributes for Datadog Context", e)
return context_json
11 changes: 7 additions & 4 deletions ddtrace/contrib/internal/cassandra/session.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@
from ddtrace.internal.logger import get_logger
from ddtrace.internal.schema import schematize_database_operation
from ddtrace.internal.schema import schematize_service_name
from ddtrace.internal.telemetry import telemetry_writer
from ddtrace.internal.utils import get_argument_value
from ddtrace.internal.utils.formats import deep_getattr
from ddtrace.trace import Pin
Expand Down Expand Up @@ -86,8 +87,8 @@ def _close_span_on_success(result, future):
return
try:
span.set_tags(_extract_result_metas(cassandra_cluster.ResultSet(future, result)))
except Exception:
log.debug("an exception occurred while setting tags", exc_info=True)
except Exception as e:
telemetry_writer.add_integration_error_log("an exception occurred while setting tags", e)
finally:
span.finish()
delattr(future, CURRENT_SPAN)
Expand All @@ -110,8 +111,10 @@ def _close_span_on_error(exc, future):
span.error = 1
span.set_tag_str(ERROR_MSG, exc.args[0])
span.set_tag_str(ERROR_TYPE, exc.__class__.__name__)
except Exception:
log.debug("traced_set_final_exception was not able to set the error, failed with error", exc_info=True)
except Exception as e:
telemetry_writer.add_integration_error_log(
"traced_set_final_exception was not able to set the error, failed with error", e
)
finally:
span.finish()
delattr(future, CURRENT_SPAN)
Expand Down
7 changes: 5 additions & 2 deletions ddtrace/contrib/internal/coverage/patch.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
from ddtrace.contrib.internal.coverage.data import _coverage_data
from ddtrace.contrib.internal.coverage.utils import is_coverage_loaded
from ddtrace.internal.logger import get_logger
from ddtrace.internal.telemetry import telemetry_writer
from ddtrace.internal.utils.wrappers import unwrap as _u


Expand Down Expand Up @@ -59,5 +60,7 @@ def run_coverage_report():
try:
current_coverage_object = coverage.Coverage.current()
_coverage_data[PCT_COVERED_KEY] = current_coverage_object.report()
except Exception:
log.warning("An exception occurred when running a coverage report")
except Exception as e:
telemetry_writer.add_integration_error_log(
"An exception occurred when running a coverage report", e, warning=True
)
5 changes: 3 additions & 2 deletions ddtrace/contrib/internal/dd_trace_api/patch.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@

import ddtrace
from ddtrace.internal.logger import get_logger
from ddtrace.internal.telemetry import telemetry_writer
from ddtrace.internal.wrapping.context import WrappingContext


Expand Down Expand Up @@ -45,8 +46,8 @@ def __return__(self, value: T) -> T:
"""Always return the original value no matter what our instrumentation does"""
try:
self._handle_return()
except Exception: # noqa: E722
log.debug("Error handling instrumentation return", exc_info=True)
except Exception as e:
telemetry_writer.add_integration_error_log("Error handling instrumentation return", e)

return value

Expand Down
Loading