|
3 | 3 | from typing import Dict |
4 | 4 | from typing import List |
5 | 5 | from typing import NamedTuple |
| 6 | +from typing import Optional |
6 | 7 |
|
7 | 8 | from ddtrace._trace._span_pointer import _SpanPointerDescription |
8 | 9 | from ddtrace._trace._span_pointer import _SpanPointerDirection |
@@ -94,47 +95,60 @@ def _extract_span_pointers_for_s3_response_with_helper( |
94 | 95 | record_span_pointer_calculation_issue(operation=f"S3.{operation_name}", issue_tag="request_parameters") |
95 | 96 | return [] |
96 | 97 |
|
97 | | - try: |
98 | | - return [ |
99 | | - _aws_s3_object_span_pointer_description( |
100 | | - pointer_direction=_SpanPointerDirection.DOWNSTREAM, |
101 | | - bucket=bucket, |
102 | | - key=key, |
103 | | - etag=etag, |
104 | | - ) |
105 | | - ] |
106 | | - except Exception as e: |
107 | | - log.debug( |
108 | | - "failed to generate S3.%s span pointer: %s", |
109 | | - operation_name, |
110 | | - str(e), |
111 | | - ) |
112 | | - record_span_pointer_calculation_issue(operation=f"S3.{operation_name}", issue_tag="calculation") |
| 98 | + span_pointer_description = _aws_s3_object_span_pointer_description( |
| 99 | + operation=f"S3.{operation_name}", |
| 100 | + pointer_direction=_SpanPointerDirection.DOWNSTREAM, |
| 101 | + bucket=bucket, |
| 102 | + key=key, |
| 103 | + etag=etag, |
| 104 | + ) |
| 105 | + if span_pointer_description is None: |
113 | 106 | return [] |
114 | 107 |
|
| 108 | + return [span_pointer_description] |
| 109 | + |
115 | 110 |
|
116 | 111 | def _aws_s3_object_span_pointer_description( |
| 112 | + operation: str, |
117 | 113 | pointer_direction: _SpanPointerDirection, |
118 | 114 | bucket: str, |
119 | 115 | key: str, |
120 | 116 | etag: str, |
121 | | -) -> _SpanPointerDescription: |
| 117 | +) -> Optional[_SpanPointerDescription]: |
| 118 | + pointer_hash = _aws_s3_object_span_pointer_hash(operation, bucket, key, etag) |
| 119 | + if pointer_hash is None: |
| 120 | + return None |
| 121 | + |
122 | 122 | return _SpanPointerDescription( |
123 | 123 | pointer_kind="aws.s3.object", |
124 | 124 | pointer_direction=pointer_direction, |
125 | | - pointer_hash=_aws_s3_object_span_pointer_hash(bucket, key, etag), |
| 125 | + pointer_hash=pointer_hash, |
126 | 126 | extra_attributes={}, |
127 | 127 | ) |
128 | 128 |
|
129 | 129 |
|
130 | | -def _aws_s3_object_span_pointer_hash(bucket: str, key: str, etag: str) -> str: |
| 130 | +def _aws_s3_object_span_pointer_hash(operation: str, bucket: str, key: str, etag: str) -> Optional[str]: |
131 | 131 | if '"' in etag: |
132 | 132 | # Some AWS API endpoints put the ETag in double quotes. We expect the |
133 | 133 | # calling code to have correctly fixed this already. |
134 | | - raise ValueError(f"ETag should not have double quotes: {etag}") |
| 134 | + log.debug( |
| 135 | + "ETag should not have double quotes: %s", |
| 136 | + etag, |
| 137 | + ) |
| 138 | + record_span_pointer_calculation_issue(operation=operation, issue_tag="etag_quotes") |
| 139 | + return None |
135 | 140 |
|
136 | | - return _standard_hashing_function( |
137 | | - bucket.encode("ascii"), |
138 | | - key.encode("utf-8"), |
139 | | - etag.encode("ascii"), |
140 | | - ) |
| 141 | + try: |
| 142 | + return _standard_hashing_function( |
| 143 | + bucket.encode("ascii"), |
| 144 | + key.encode("utf-8"), |
| 145 | + etag.encode("ascii"), |
| 146 | + ) |
| 147 | + |
| 148 | + except Exception as e: |
| 149 | + log.debug( |
| 150 | + "failed to hash S3 object span pointer: %s", |
| 151 | + e, |
| 152 | + ) |
| 153 | + record_span_pointer_calculation_issue(operation=operation, issue_tag="hashing") |
| 154 | + return None |
0 commit comments