|
11 | 11 | # See the License for the specific language governing permissions and |
12 | 12 | # limitations under the License. |
13 | 13 |
|
14 | | -import logging |
| 14 | +from logging import getLogger |
15 | 15 | from os import environ |
16 | 16 | from typing import Optional, Sequence |
17 | 17 | from grpc import ChannelCredentials, Compression |
|
40 | 40 | from opentelemetry.sdk._metrics.export import ( |
41 | 41 | MetricExporter, |
42 | 42 | MetricExportResult, |
| 43 | + MetricsData, |
43 | 44 | ) |
44 | 45 |
|
45 | | -logger = logging.getLogger(__name__) |
| 46 | +_logger = getLogger(__name__) |
46 | 47 |
|
47 | 48 |
|
48 | 49 | class OTLPMetricExporter( |
@@ -79,103 +80,127 @@ def __init__( |
79 | 80 | ) |
80 | 81 |
|
81 | 82 | def _translate_data( |
82 | | - self, data: Sequence[Metric] |
| 83 | + self, data: MetricsData |
83 | 84 | ) -> ExportMetricsServiceRequest: |
84 | | - sdk_resource_scope_metrics = {} |
85 | | - |
86 | | - for metric in data: |
87 | | - resource = metric.resource |
88 | | - scope_map = sdk_resource_scope_metrics.get(resource, {}) |
89 | | - if not scope_map: |
90 | | - sdk_resource_scope_metrics[resource] = scope_map |
91 | | - |
92 | | - scope_metrics = scope_map.get(metric.instrumentation_scope) |
93 | | - |
94 | | - if not scope_metrics: |
95 | | - if metric.instrumentation_scope is not None: |
96 | | - scope_map[metric.instrumentation_scope] = pb2.ScopeMetrics( |
97 | | - scope=InstrumentationScope( |
98 | | - name=metric.instrumentation_scope.name, |
99 | | - version=metric.instrumentation_scope.version, |
100 | | - ) |
101 | | - ) |
102 | | - else: |
103 | | - scope_map[ |
104 | | - metric.instrumentation_scope |
105 | | - ] = pb2.ScopeMetrics() |
106 | 85 |
|
107 | | - scope_metrics = scope_map.get(metric.instrumentation_scope) |
| 86 | + resource_metrics_dict = {} |
108 | 87 |
|
109 | | - pbmetric = pb2.Metric( |
110 | | - name=metric.name, |
111 | | - description=metric.description, |
112 | | - unit=metric.unit, |
113 | | - ) |
114 | | - if isinstance(metric.point, Gauge): |
115 | | - pt = pb2.NumberDataPoint( |
116 | | - attributes=self._translate_attributes(metric.attributes), |
117 | | - time_unix_nano=metric.point.time_unix_nano, |
118 | | - ) |
119 | | - if isinstance(metric.point.value, int): |
120 | | - pt.as_int = metric.point.value |
121 | | - else: |
122 | | - pt.as_double = metric.point.value |
123 | | - pbmetric.gauge.data_points.append(pt) |
124 | | - elif isinstance(metric.point, Histogram): |
125 | | - pt = pb2.HistogramDataPoint( |
126 | | - attributes=self._translate_attributes(metric.attributes), |
127 | | - time_unix_nano=metric.point.time_unix_nano, |
128 | | - start_time_unix_nano=metric.point.start_time_unix_nano, |
129 | | - count=sum(metric.point.bucket_counts), |
130 | | - sum=metric.point.sum, |
131 | | - bucket_counts=metric.point.bucket_counts, |
132 | | - explicit_bounds=metric.point.explicit_bounds, |
133 | | - ) |
134 | | - pbmetric.histogram.aggregation_temporality = ( |
135 | | - metric.point.aggregation_temporality |
136 | | - ) |
137 | | - pbmetric.histogram.data_points.append(pt) |
138 | | - elif isinstance(metric.point, Sum): |
139 | | - pt = pb2.NumberDataPoint( |
140 | | - attributes=self._translate_attributes(metric.attributes), |
141 | | - start_time_unix_nano=metric.point.start_time_unix_nano, |
142 | | - time_unix_nano=metric.point.time_unix_nano, |
143 | | - ) |
144 | | - if isinstance(metric.point.value, int): |
145 | | - pt.as_int = metric.point.value |
146 | | - else: |
147 | | - pt.as_double = metric.point.value |
148 | | - # note that because sum is a message type, the fields must be |
149 | | - # set individually rather than instantiating a pb2.Sum and setting |
150 | | - # it once |
151 | | - pbmetric.sum.aggregation_temporality = ( |
152 | | - metric.point.aggregation_temporality |
| 88 | + for resource_metrics in data.resource_metrics: |
| 89 | + |
| 90 | + resource = resource_metrics.resource |
| 91 | + |
| 92 | + # It is safe to assume that each entry in data.resource_metrics is |
| 93 | + # associated with an unique resource. |
| 94 | + scope_metrics_dict = {} |
| 95 | + |
| 96 | + resource_metrics_dict[resource] = scope_metrics_dict |
| 97 | + |
| 98 | + for scope_metrics in resource_metrics.scope_metrics: |
| 99 | + |
| 100 | + instrumentation_scope = scope_metrics.scope |
| 101 | + |
| 102 | + # The SDK groups metrics in instrumentation scopes already so |
| 103 | + # there is no need to check for existing instrumentation scopes |
| 104 | + # here. |
| 105 | + pb2_scope_metrics = pb2.ScopeMetrics( |
| 106 | + scope=InstrumentationScope( |
| 107 | + name=instrumentation_scope.name, |
| 108 | + version=instrumentation_scope.version, |
| 109 | + ) |
153 | 110 | ) |
154 | | - pbmetric.sum.is_monotonic = metric.point.is_monotonic |
155 | | - pbmetric.sum.data_points.append(pt) |
156 | | - else: |
157 | | - logger.warn("unsupported datapoint type %s", metric.point) |
158 | | - continue |
159 | | - |
160 | | - scope_metrics.metrics.append( |
161 | | - pbmetric, |
162 | | - ) |
| 111 | + |
| 112 | + scope_metrics_dict[instrumentation_scope] = pb2_scope_metrics |
| 113 | + |
| 114 | + for metric in scope_metrics.metrics: |
| 115 | + pb2_metric = pb2.Metric( |
| 116 | + name=metric.name, |
| 117 | + description=metric.description, |
| 118 | + unit=metric.unit, |
| 119 | + ) |
| 120 | + |
| 121 | + if isinstance(metric.data, Gauge): |
| 122 | + for data_point in metric.data.data_points: |
| 123 | + pt = pb2.NumberDataPoint( |
| 124 | + attributes=self._translate_attributes( |
| 125 | + data_point.attributes |
| 126 | + ), |
| 127 | + time_unix_nano=data_point.time_unix_nano, |
| 128 | + ) |
| 129 | + if isinstance(data_point.value, int): |
| 130 | + pt.as_int = data_point.value |
| 131 | + else: |
| 132 | + pt.as_double = data_point.value |
| 133 | + pb2_metric.gauge.data_points.append(pt) |
| 134 | + |
| 135 | + elif isinstance(metric.data, Histogram): |
| 136 | + for data_point in metric.data.data_points: |
| 137 | + pt = pb2.HistogramDataPoint( |
| 138 | + attributes=self._translate_attributes( |
| 139 | + data_point.attributes |
| 140 | + ), |
| 141 | + time_unix_nano=data_point.time_unix_nano, |
| 142 | + start_time_unix_nano=( |
| 143 | + data_point.start_time_unix_nano |
| 144 | + ), |
| 145 | + count=data_point.count, |
| 146 | + sum=data_point.sum, |
| 147 | + bucket_counts=data_point.bucket_counts, |
| 148 | + explicit_bounds=data_point.explicit_bounds, |
| 149 | + ) |
| 150 | + pb2_metric.histogram.aggregation_temporality = ( |
| 151 | + metric.data.aggregation_temporality |
| 152 | + ) |
| 153 | + pb2_metric.histogram.data_points.append(pt) |
| 154 | + |
| 155 | + elif isinstance(metric.data, Sum): |
| 156 | + for data_point in metric.data.data_points: |
| 157 | + pt = pb2.NumberDataPoint( |
| 158 | + attributes=self._translate_attributes( |
| 159 | + data_point.attributes |
| 160 | + ), |
| 161 | + start_time_unix_nano=( |
| 162 | + data_point.start_time_unix_nano |
| 163 | + ), |
| 164 | + time_unix_nano=data_point.time_unix_nano, |
| 165 | + ) |
| 166 | + if isinstance(data_point.value, int): |
| 167 | + pt.as_int = data_point.value |
| 168 | + else: |
| 169 | + pt.as_double = data_point.value |
| 170 | + # note that because sum is a message type, the |
| 171 | + # fields must be set individually rather than |
| 172 | + # instantiating a pb2.Sum and setting it once |
| 173 | + pb2_metric.sum.aggregation_temporality = ( |
| 174 | + metric.data.aggregation_temporality |
| 175 | + ) |
| 176 | + pb2_metric.sum.is_monotonic = ( |
| 177 | + metric.data.is_monotonic |
| 178 | + ) |
| 179 | + pb2_metric.sum.data_points.append(pt) |
| 180 | + else: |
| 181 | + _logger.warn( |
| 182 | + "unsupported datapoint type %s", metric.point |
| 183 | + ) |
| 184 | + continue |
| 185 | + |
| 186 | + pb2_scope_metrics.metrics.append(pb2_metric) |
| 187 | + |
163 | 188 | return ExportMetricsServiceRequest( |
164 | 189 | resource_metrics=get_resource_data( |
165 | | - sdk_resource_scope_metrics, |
| 190 | + resource_metrics_dict, |
166 | 191 | pb2.ResourceMetrics, |
167 | 192 | "metrics", |
168 | 193 | ) |
169 | 194 | ) |
170 | 195 |
|
171 | 196 | def export( |
172 | 197 | self, |
173 | | - metrics: Sequence[Metric], |
| 198 | + metrics_data: MetricsData, |
174 | 199 | timeout_millis: float = 10_000, |
175 | 200 | **kwargs, |
176 | 201 | ) -> MetricExportResult: |
177 | 202 | # TODO(#2663): OTLPExporterMixin should pass timeout to gRPC |
178 | | - return self._export(metrics) |
| 203 | + return self._export(metrics_data) |
179 | 204 |
|
180 | 205 | def shutdown(self, timeout_millis: float = 30_000, **kwargs) -> None: |
181 | 206 | pass |
0 commit comments