github.com/alibaba/ilogtail/pkg@v0.0.0-20250526110833-c53b480d046c/protocol/decoder/opentelemetry/decoder_test.go (about) 1 // Copyright 2021 iLogtail Authors 2 // 3 // Licensed under the Apache License, Version 2.0 (the "License"); 4 // you may not use this file except in compliance with the License. 5 // You may obtain a copy of the License at 6 // 7 // http://www.apache.org/licenses/LICENSE-2.0 8 // 9 // Unless required by applicable law or agreed to in writing, software 10 // distributed under the License is distributed on an "AS IS" BASIS, 11 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 // See the License for the specific language governing permissions and 13 // limitations under the License. 14 15 package opentelemetry 16 17 import ( 18 "encoding/json" 19 "fmt" 20 "math" 21 "net/http" 22 "strconv" 23 "testing" 24 "time" 25 26 "github.com/stretchr/testify/assert" 27 "go.opentelemetry.io/collector/pdata/pcommon" 28 "go.opentelemetry.io/collector/pdata/plog" 29 "go.opentelemetry.io/collector/pdata/plog/plogotlp" 30 "go.opentelemetry.io/collector/pdata/pmetric" 31 "go.opentelemetry.io/collector/pdata/ptrace" 32 33 "github.com/alibaba/ilogtail/pkg/config" 34 "github.com/alibaba/ilogtail/pkg/models" 35 "github.com/alibaba/ilogtail/pkg/protocol/decoder/common" 36 "github.com/alibaba/ilogtail/pkg/protocol/otlp" 37 ) 38 39 var textFormat = `{"resourceLogs":[{"resource":{"attributes":[{"key":"service.name","value":{"stringValue":"OtlpExporterExample"}},{"key":"telemetry.sdk.language","value":{"stringValue":"java"}},{"key":"telemetry.sdk.name","value":{"stringValue":"opentelemetry"}},{"key":"telemetry.sdk.version","value":{"stringValue":"1.18.0"}}]},"scopeLogs":[{"scope":{"name":"io.opentelemetry.example"},"logRecords":[{"timeUnixNano":"1663904182348000000","severityNumber":9,"severityText":"INFO","body":{"stringValue":"log body1"},"attributes":[{"key":"k1","value":{"stringValue":"v1"}},{"key":"k2","value":{"stringValue":"v2"}}],"traceId":"","spanId":""},{"timeUnixNano":"1663904182348000000","severityNumber":9,"severityText":"INFO","body":{"stringValue":"log body2"},"attributes":[{"key":"k1","value":{"stringValue":"v1"}},{"key":"k2","value":{"stringValue":"v2"}}],"traceId":"","spanId":""}]}]}]}` 40 41 func TestNormal(t *testing.T) { 42 httpReq, _ := http.NewRequest("Post", "", nil) 43 httpReq.Header.Set("Content-Type", jsonContentType) 44 decoder := &Decoder{Format: common.ProtocolOTLPLogV1} 45 46 logs, err := decoder.Decode([]byte(textFormat), httpReq, nil) 47 assert.Nil(t, err) 48 assert.Equal(t, len(logs), 2) 49 log := logs[1] 50 assert.Equal(t, int(log.Time), 1663904182) 51 for _, cont := range log.Contents { 52 if cont.Key == "attributes" { 53 assert.NotEmpty(t, cont.Value) 54 } else if cont.Key == "resources" { 55 assert.NotEmpty(t, cont.Value) 56 } 57 } 58 data, _ := json.Marshal(logs) 59 fmt.Printf("%s\n", string(data)) 60 data = []byte("") 61 fmt.Printf("empty = [%s]\n", string(data)) 62 } 63 64 func TestConvertOtlpLogV1(t *testing.T) { 65 logs := plog.NewLogs() 66 resourceLogs := logs.ResourceLogs() 67 68 resourceLog := resourceLogs.AppendEmpty() 69 resource := resourceLog.Resource() 70 resource.Attributes().PutStr("serviceName", "test-service") 71 72 scopeLog := resourceLog.ScopeLogs().AppendEmpty() 73 scopeLog.Scope().Attributes().PutStr("scope_key1", "scope_value1") 74 75 now := pcommon.NewTimestampFromTime(time.Now()) 76 77 logRecord := scopeLog.LogRecords().AppendEmpty() 78 logRecord.SetTimestamp(now) 79 logRecord.Body().SetStr("test-message") 80 logRecord.Attributes().PutInt("attr1", 123) 81 logRecord.Attributes().PutBool("attr2", true) 82 83 otlpLogReq := plogotlp.NewExportRequestFromLogs(logs) 84 85 result, err := ConvertOtlpLogV1(otlpLogReq.Logs()) 86 if err != nil { 87 t.Errorf("Error: %v", err) 88 return 89 } 90 91 // Check if the result is not nil 92 assert.NotNil(t, result) 93 94 // Check if the number of logs is correct 95 assert.Equal(t, 1, len(result)) 96 97 // Check if the contents of the log are correct 98 log1 := result[0] 99 assert.Equal(t, uint32(now/1e9), log1.Time) 100 101 assert.Equal(t, "time_unix_nano", log1.Contents[0].Key) 102 assert.Equal(t, strconv.FormatInt(int64(now), 10), log1.Contents[0].Value) 103 104 assert.Equal(t, "severity_number", log1.Contents[1].Key) 105 assert.Equal(t, "0", log1.Contents[1].Value) 106 107 assert.Equal(t, "severity_text", log1.Contents[2].Key) 108 assert.Equal(t, "", log1.Contents[2].Value) 109 110 assert.Equal(t, "content", log1.Contents[3].Key) 111 assert.Equal(t, "test-message", log1.Contents[3].Value) 112 113 // Check if the attribute is correct 114 assert.Equal(t, "attributes", log1.Contents[4].Key) 115 116 expectedAttr := make(map[string]interface{}) 117 expectedAttr["attr1"] = float64(123) 118 expectedAttr["attr2"] = true 119 120 expectedAttrBytes, err := json.Marshal(expectedAttr) 121 if err != nil { 122 t.Errorf("Error: %v", err) 123 return 124 } 125 assert.Equal(t, string(expectedAttrBytes), log1.Contents[4].Value) 126 127 // Check if the resources is correct 128 assert.Equal(t, "resources", log1.Contents[5].Key) 129 130 expectedRes := make(map[string]interface{}) 131 expectedRes["serviceName"] = "test-service" 132 133 expectedResBytes, err := json.Marshal(expectedRes) 134 if err != nil { 135 t.Errorf("Error: %v", err) 136 return 137 } 138 assert.Equal(t, string(expectedResBytes), log1.Contents[5].Value) 139 } 140 141 func TestDecoder_Decode_Logs(t *testing.T) { 142 // complcated case 143 encoder := &plog.JSONMarshaler{} 144 jsonBuf, err := encoder.MarshalLogs(logsOTLP) 145 assert.NoError(t, err) 146 httpReq, _ := http.NewRequest("Post", "", nil) 147 httpReq.Header.Set("Content-Type", jsonContentType) 148 decoder := &Decoder{Format: common.ProtocolOTLPLogV1} 149 150 logs, err := decoder.Decode(jsonBuf, httpReq, nil) 151 assert.NoError(t, err) 152 assert.Equal(t, 1, len(logs)) 153 154 for _, log := range logs { 155 assert.Equal(t, 6, len(log.Contents)) 156 assert.Equal(t, "time_unix_nano", log.Contents[0].Key) 157 158 assert.Equal(t, "severity_number", log.Contents[1].Key) 159 assert.Equal(t, "17", log.Contents[1].Value) 160 161 assert.Equal(t, "severity_text", log.Contents[2].Key) 162 assert.Equal(t, "Error", log.Contents[2].Value) 163 164 assert.Equal(t, "content", log.Contents[3].Key) 165 assert.Equal(t, "hello world", log.Contents[3].Value) 166 167 assert.Equal(t, "attributes", log.Contents[4].Key) 168 expectedAttr := make(map[string]interface{}) 169 expectedAttr["sdkVersion"] = "1.0.1" 170 expectedAttrBytes, err := json.Marshal(expectedAttr) 171 if err != nil { 172 t.Errorf("Error: %v", err) 173 return 174 } 175 assert.Equal(t, string(expectedAttrBytes), log.Contents[4].Value) 176 177 assert.Equal(t, "resources", log.Contents[5].Key) 178 expectedRes := make(map[string]interface{}) 179 expectedRes["host.name"] = "testHost" 180 expectedResBytes, err := json.Marshal(expectedRes) 181 if err != nil { 182 t.Errorf("Error: %v", err) 183 return 184 } 185 assert.Equal(t, string(expectedResBytes), log.Contents[5].Value) 186 } 187 } 188 189 func TestDecoder_Decode_MetricsUntyped(t *testing.T) { 190 var metricsJSON = `{"resourceMetrics":[{"resource":{"attributes":[{"key":"host.name","value":{"stringValue":"testHost"}}]},"scopeMetrics":[{"scope":{"name":"name","version":"version"},"metrics":[{"name":"testMetric"}]}]}]}` 191 httpReq, _ := http.NewRequest("Post", "", nil) 192 httpReq.Header.Set("Content-Type", jsonContentType) 193 decoder := &Decoder{Format: common.ProtocolOTLPMetricV1} 194 195 logs, err := decoder.Decode([]byte(metricsJSON), httpReq, nil) 196 assert.Nil(t, err) 197 assert.Equal(t, 1, len(logs)) 198 199 for _, log := range logs { 200 assert.Equal(t, "__name__", log.Contents[0].Key) 201 assert.Equal(t, "testMetric", log.Contents[0].Value) 202 assert.Equal(t, "__labels__", log.Contents[1].Key) 203 assert.Equal(t, "Empty", log.Contents[1].Value) 204 assert.Equal(t, "__time_nano__", log.Contents[2].Key) 205 assert.Equal(t, "__value__", log.Contents[3].Key) 206 assert.Equal(t, "", log.Contents[3].Value) 207 } 208 } 209 210 func TestDecoder_Decode_MetricsAll(t *testing.T) { 211 config.LoongcollectorGlobalConfig.EnableSlsMetricsFormat = true 212 type args struct { 213 md func() pmetric.Metrics 214 } 215 216 tests := []struct { 217 name string 218 args args 219 }{ 220 { 221 name: "sum", 222 args: args{ 223 md: metricsSumOTLPFull, 224 }}, 225 { 226 name: "gauge", 227 args: args{ 228 md: metricsGaugeOTLPFull, 229 }, 230 }, 231 { 232 name: "Histogram", 233 args: args{ 234 md: metricsHistogramOTLPFull, 235 }, 236 }, 237 { 238 name: "ExponentialHistogram", 239 args: args{ 240 md: metricsExponentialHistogramOTLPFull, 241 }, 242 }, 243 { 244 name: "Summary", 245 args: args{ 246 md: metricsSummaryOTLPFull, 247 }, 248 }, 249 } 250 251 encoder := &pmetric.JSONMarshaler{} 252 httpReq, _ := http.NewRequest("Post", "", nil) 253 httpReq.Header.Set("Content-Type", jsonContentType) 254 decoder := &Decoder{Format: common.ProtocolOTLPMetricV1} 255 256 for _, tt := range tests { 257 t.Run(tt.name, func(t *testing.T) { 258 m := tt.args.md() 259 jsonBuf, err := encoder.MarshalMetrics(m) 260 assert.NoError(t, err) 261 logs, err := decoder.Decode(jsonBuf, httpReq, nil) 262 assert.NoError(t, err) 263 264 expectedLogsLen := func() int { 265 count := 0 266 for i := 0; i < m.ResourceMetrics().Len(); i++ { 267 resourceMetric := m.ResourceMetrics().At(i) 268 for j := 0; j < resourceMetric.ScopeMetrics().Len(); j++ { 269 metrics := resourceMetric.ScopeMetrics().At(j).Metrics() 270 for k := 0; k < metrics.Len(); k++ { 271 metric := metrics.At(k) 272 switch metric.Type() { 273 case pmetric.MetricTypeGauge: 274 count += metric.Gauge().DataPoints().Len() 275 for i := 0; i < metric.Gauge().DataPoints().Len(); i++ { 276 count += metric.Gauge().DataPoints().At(i).Exemplars().Len() 277 } 278 case pmetric.MetricTypeSum: 279 count += metric.Sum().DataPoints().Len() 280 for i := 0; i < metric.Sum().DataPoints().Len(); i++ { 281 count += metric.Sum().DataPoints().At(i).Exemplars().Len() 282 } 283 case pmetric.MetricTypeSummary: 284 for l := 0; l < metric.Summary().DataPoints().Len(); l++ { 285 dataPoint := metric.Summary().DataPoints().At(l) 286 count += 2 287 count += dataPoint.QuantileValues().Len() 288 } 289 case pmetric.MetricTypeHistogram: 290 for l := 0; l < metric.Histogram().DataPoints().Len(); l++ { 291 dataPoint := metric.Histogram().DataPoints().At(l) 292 count += dataPoint.Exemplars().Len() 293 if dataPoint.HasSum() { 294 count++ 295 } 296 if dataPoint.HasMin() { 297 count++ 298 } 299 if dataPoint.HasMax() { 300 count++ 301 } 302 count += dataPoint.BucketCounts().Len() + 1 303 } 304 case pmetric.MetricTypeExponentialHistogram: 305 for l := 0; l < metric.ExponentialHistogram().DataPoints().Len(); l++ { 306 dataPoint := metric.ExponentialHistogram().DataPoints().At(l) 307 count += dataPoint.Exemplars().Len() 308 if dataPoint.HasSum() { 309 count++ 310 } 311 if dataPoint.HasMin() { 312 count++ 313 } 314 if dataPoint.HasMax() { 315 count++ 316 } 317 count += dataPoint.Negative().BucketCounts().Len() + dataPoint.Positive().BucketCounts().Len() + 4 318 } 319 } 320 } 321 } 322 } 323 return count 324 }() 325 assert.Equal(t, expectedLogsLen, len(logs)) 326 327 metric := m.ResourceMetrics().At(0).ScopeMetrics().At(0).Metrics().At(0) 328 switch metric.Type() { 329 case pmetric.MetricTypeGauge: 330 assert.Equal(t, "__name__", logs[0].Contents[0].Key) 331 assert.Equal(t, "test_gauge_exemplars", logs[0].Contents[0].Value) 332 assert.Equal(t, "__labels__", logs[0].Contents[2].Key) 333 assert.Equal(t, "bool#$#true|bytes#$#Zm9v|double#$#1.1|host_name#$#testHost|int#$#1|service_name#$#testService|service_name#$#testService|spanId#$#1112131415161718|string#$#value|traceId#$#0102030405060708090a0b0c0d0e0f10", logs[0].Contents[2].Value) 334 assert.Equal(t, "__time_nano__", logs[0].Contents[1].Key) 335 assert.Equal(t, "__value__", logs[0].Contents[3].Key) 336 assert.Equal(t, "99.3", logs[0].Contents[3].Value) 337 338 assert.Equal(t, "__name__", logs[1].Contents[0].Key) 339 assert.Equal(t, "test_gauge", logs[1].Contents[0].Value) 340 assert.Equal(t, "__labels__", logs[1].Contents[2].Key) 341 assert.Equal(t, "bool#$#true|bytes#$#Zm9v|double#$#1.1|host_name#$#testHost|int#$#1|service_name#$#testService|string#$#value", logs[1].Contents[2].Value) 342 assert.Equal(t, "__time_nano__", logs[1].Contents[1].Key) 343 assert.Equal(t, "__value__", logs[1].Contents[3].Key) 344 assert.Equal(t, "10.2", logs[1].Contents[3].Value) 345 case pmetric.MetricTypeSum: 346 assert.Equal(t, "__name__", logs[0].Contents[0].Key) 347 assert.Equal(t, "test_sum_exemplars", logs[0].Contents[0].Value) 348 assert.Equal(t, "__labels__", logs[0].Contents[2].Key) 349 assert.Equal(t, "bool#$#true|bytes#$#Zm9v|double#$#1.1|host_name#$#testHost|int#$#1|otlp_metric_aggregation_temporality#$#Cumulative|otlp_metric_ismonotonic#$#true|service_name#$#testService|service_name#$#testService|spanId#$#1112131415161718|string#$#value|traceId#$#0102030405060708090a0b0c0d0e0f10", logs[0].Contents[2].Value) 350 assert.Equal(t, "__time_nano__", logs[0].Contents[1].Key) 351 assert.Equal(t, "__value__", logs[0].Contents[3].Key) 352 assert.Equal(t, "99.3", logs[0].Contents[3].Value) 353 354 assert.Equal(t, "__name__", logs[1].Contents[0].Key) 355 assert.Equal(t, "test_sum", logs[1].Contents[0].Value) 356 assert.Equal(t, "__labels__", logs[1].Contents[2].Key) 357 assert.Equal(t, "bool#$#true|bytes#$#Zm9v|double#$#1.1|host_name#$#testHost|int#$#1|otlp_metric_aggregation_temporality#$#Cumulative|otlp_metric_ismonotonic#$#true|service_name#$#testService|string#$#value", logs[1].Contents[2].Value) 358 assert.Equal(t, "__time_nano__", logs[1].Contents[1].Key) 359 assert.Equal(t, "__value__", logs[1].Contents[3].Key) 360 assert.Equal(t, "100", logs[1].Contents[3].Value) 361 362 assert.Equal(t, "__name__", logs[2].Contents[0].Key) 363 assert.Equal(t, "test_sum", logs[2].Contents[0].Value) 364 assert.Equal(t, "__labels__", logs[2].Contents[2].Key) 365 assert.Equal(t, "bool#$#false|bytes#$#YmFy|double#$#2.2|host_name#$#testHost|int#$#2|otlp_metric_aggregation_temporality#$#Cumulative|otlp_metric_ismonotonic#$#true|service_name#$#testService|string#$#value2", logs[2].Contents[2].Value) 366 assert.Equal(t, "__time_nano__", logs[2].Contents[1].Key) 367 assert.Equal(t, "__value__", logs[2].Contents[3].Key) 368 assert.Equal(t, "50", logs[2].Contents[3].Value) 369 case pmetric.MetricTypeSummary: 370 assert.Equal(t, "__name__", logs[0].Contents[0].Key) 371 assert.Equal(t, "test_summary_sum", logs[0].Contents[0].Value) 372 assert.Equal(t, "__labels__", logs[0].Contents[2].Key) 373 assert.Equal(t, "bool#$#true|bytes#$#Zm9v|double#$#1.1|host_name#$#testHost|int#$#1|service_name#$#testService|string#$#value", logs[0].Contents[2].Value) 374 assert.Equal(t, "__time_nano__", logs[0].Contents[1].Key) 375 assert.Equal(t, "__value__", logs[0].Contents[3].Key) 376 assert.Equal(t, "1000", logs[0].Contents[3].Value) 377 378 assert.Equal(t, "__name__", logs[1].Contents[0].Key) 379 assert.Equal(t, "test_summary_count", logs[1].Contents[0].Value) 380 assert.Equal(t, "__labels__", logs[1].Contents[2].Key) 381 assert.Equal(t, "bool#$#true|bytes#$#Zm9v|double#$#1.1|host_name#$#testHost|int#$#1|service_name#$#testService|string#$#value", logs[1].Contents[2].Value) 382 assert.Equal(t, "__time_nano__", logs[1].Contents[1].Key) 383 assert.Equal(t, "__value__", logs[1].Contents[3].Key) 384 assert.Equal(t, "100", logs[1].Contents[3].Value) 385 386 assert.Equal(t, "__name__", logs[2].Contents[0].Key) 387 assert.Equal(t, "test_summary", logs[2].Contents[0].Value) 388 assert.Equal(t, "__labels__", logs[2].Contents[2].Key) 389 assert.Equal(t, "bool#$#true|bytes#$#Zm9v|double#$#1.1|host_name#$#testHost|int#$#1|quantile#$#0.5|service_name#$#testService|string#$#value", logs[2].Contents[2].Value) 390 assert.Equal(t, "__time_nano__", logs[2].Contents[1].Key) 391 assert.Equal(t, "__value__", logs[2].Contents[3].Key) 392 assert.Equal(t, "1.2", logs[2].Contents[3].Value) 393 case pmetric.MetricTypeHistogram: 394 assert.Equal(t, "__name__", logs[0].Contents[0].Key) 395 assert.Equal(t, "test_Histogram_sum", logs[0].Contents[0].Value) 396 assert.Equal(t, "__labels__", logs[0].Contents[2].Key) 397 assert.Equal(t, "bool#$#true|bytes#$#Zm9v|double#$#1.1|host_name#$#testHost|int#$#1|otlp_metric_aggregation_temporality#$#Cumulative|otlp_metric_histogram_type#$#Histogram|service_name#$#testService|string#$#value", logs[0].Contents[2].Value) 398 assert.Equal(t, "__time_nano__", logs[0].Contents[1].Key) 399 assert.Equal(t, "__value__", logs[0].Contents[3].Key) 400 assert.Equal(t, fmt.Sprint(metric.Histogram().DataPoints().At(0).Sum()), logs[0].Contents[3].Value) 401 402 assert.Equal(t, "__name__", logs[1].Contents[0].Key) 403 assert.Equal(t, "test_Histogram_min", logs[1].Contents[0].Value) 404 assert.Equal(t, "__labels__", logs[1].Contents[2].Key) 405 assert.Equal(t, "bool#$#true|bytes#$#Zm9v|double#$#1.1|host_name#$#testHost|int#$#1|otlp_metric_aggregation_temporality#$#Cumulative|otlp_metric_histogram_type#$#Histogram|service_name#$#testService|string#$#value", logs[1].Contents[2].Value) 406 assert.Equal(t, "__time_nano__", logs[1].Contents[1].Key) 407 assert.Equal(t, "__value__", logs[1].Contents[3].Key) 408 assert.Equal(t, fmt.Sprint(metric.Histogram().DataPoints().At(0).Min()), logs[1].Contents[3].Value) 409 410 assert.Equal(t, "__name__", logs[2].Contents[0].Key) 411 assert.Equal(t, "test_Histogram_max", logs[2].Contents[0].Value) 412 assert.Equal(t, "__labels__", logs[2].Contents[2].Key) 413 assert.Equal(t, "bool#$#true|bytes#$#Zm9v|double#$#1.1|host_name#$#testHost|int#$#1|otlp_metric_aggregation_temporality#$#Cumulative|otlp_metric_histogram_type#$#Histogram|service_name#$#testService|string#$#value", logs[2].Contents[2].Value) 414 assert.Equal(t, "__time_nano__", logs[2].Contents[1].Key) 415 assert.Equal(t, "__value__", logs[2].Contents[3].Key) 416 assert.Equal(t, fmt.Sprint(metric.Histogram().DataPoints().At(0).Max()), logs[2].Contents[3].Value) 417 418 assert.Equal(t, "__name__", logs[3].Contents[0].Key) 419 assert.Equal(t, "test_Histogram_count", logs[3].Contents[0].Value) 420 assert.Equal(t, "__labels__", logs[3].Contents[2].Key) 421 assert.Equal(t, "bool#$#true|bytes#$#Zm9v|double#$#1.1|host_name#$#testHost|int#$#1|otlp_metric_aggregation_temporality#$#Cumulative|otlp_metric_histogram_type#$#Histogram|service_name#$#testService|string#$#value", logs[3].Contents[2].Value) 422 assert.Equal(t, "__time_nano__", logs[3].Contents[1].Key) 423 assert.Equal(t, "__value__", logs[3].Contents[3].Key) 424 assert.Equal(t, fmt.Sprint(metric.Histogram().DataPoints().At(0).Count()), logs[3].Contents[3].Value) 425 426 assert.Equal(t, "__name__", logs[4].Contents[0].Key) 427 assert.Equal(t, "test_Histogram_exemplars", logs[4].Contents[0].Value) 428 assert.Equal(t, "__labels__", logs[4].Contents[2].Key) 429 assert.Equal(t, "bool#$#true|bytes#$#Zm9v|double#$#1.1|host_name#$#testHost|int#$#1|otlp_metric_aggregation_temporality#$#Cumulative|otlp_metric_histogram_type#$#Histogram|service_name#$#testService|service_name#$#testService|spanId#$#1112131415161718|string#$#value|traceId#$#0102030405060708090a0b0c0d0e0f10", logs[4].Contents[2].Value) 430 assert.Equal(t, "__time_nano__", logs[4].Contents[1].Key) 431 assert.Equal(t, "__value__", logs[4].Contents[3].Key) 432 assert.Equal(t, "99.3", logs[4].Contents[3].Value) 433 434 assert.Equal(t, "__name__", logs[5].Contents[0].Key) 435 assert.Equal(t, "test_Histogram_bucket", logs[5].Contents[0].Value) 436 assert.Equal(t, "__labels__", logs[5].Contents[2].Key) 437 assert.Equal(t, "bool#$#true|bytes#$#Zm9v|double#$#1.1|host_name#$#testHost|int#$#1|le#$#10|otlp_metric_aggregation_temporality#$#Cumulative|otlp_metric_histogram_type#$#Histogram|service_name#$#testService|string#$#value", logs[5].Contents[2].Value) 438 assert.Equal(t, "__time_nano__", logs[5].Contents[1].Key) 439 assert.Equal(t, "__value__", logs[5].Contents[3].Key) 440 assert.Equal(t, "1", logs[5].Contents[3].Value) 441 442 assert.Equal(t, "__name__", logs[6].Contents[0].Key) 443 assert.Equal(t, "test_Histogram_bucket", logs[6].Contents[0].Value) 444 assert.Equal(t, "__labels__", logs[6].Contents[2].Key) 445 assert.Equal(t, "bool#$#true|bytes#$#Zm9v|double#$#1.1|host_name#$#testHost|int#$#1|le#$#100|otlp_metric_aggregation_temporality#$#Cumulative|otlp_metric_histogram_type#$#Histogram|service_name#$#testService|string#$#value", logs[6].Contents[2].Value) 446 assert.Equal(t, "__time_nano__", logs[6].Contents[1].Key) 447 assert.Equal(t, "__value__", logs[6].Contents[3].Key) 448 assert.Equal(t, "2", logs[6].Contents[3].Value) 449 450 assert.Equal(t, "__name__", logs[7].Contents[0].Key) 451 assert.Equal(t, "test_Histogram_bucket", logs[7].Contents[0].Value) 452 assert.Equal(t, "__labels__", logs[7].Contents[2].Key) 453 assert.Equal(t, "bool#$#true|bytes#$#Zm9v|double#$#1.1|host_name#$#testHost|int#$#1|le#$#+Inf|otlp_metric_aggregation_temporality#$#Cumulative|otlp_metric_histogram_type#$#Histogram|service_name#$#testService|string#$#value", logs[7].Contents[2].Value) 454 assert.Equal(t, "__time_nano__", logs[7].Contents[1].Key) 455 assert.Equal(t, "__value__", logs[7].Contents[3].Key) 456 assert.Equal(t, "4", logs[7].Contents[3].Value) 457 case pmetric.MetricTypeExponentialHistogram: 458 assert.Equal(t, "__name__", logs[0].Contents[0].Key) 459 assert.Equal(t, "test_ExponentialHistogram_sum", logs[0].Contents[0].Value) 460 assert.Equal(t, "__labels__", logs[0].Contents[2].Key) 461 assert.Equal(t, "bool#$#true|bytes#$#Zm9v|double#$#1.1|host_name#$#testHost|int#$#1|otlp_metric_aggregation_temporality#$#Cumulative|otlp_metric_histogram_type#$#ExponentialHistogram|service_name#$#testService|string#$#value", logs[0].Contents[2].Value) 462 assert.Equal(t, "__time_nano__", logs[0].Contents[1].Key) 463 assert.Equal(t, "__value__", logs[0].Contents[3].Key) 464 assert.Equal(t, fmt.Sprint(metric.ExponentialHistogram().DataPoints().At(0).Sum()), logs[0].Contents[3].Value) 465 466 assert.Equal(t, "__name__", logs[1].Contents[0].Key) 467 assert.Equal(t, "test_ExponentialHistogram_min", logs[1].Contents[0].Value) 468 assert.Equal(t, "__labels__", logs[1].Contents[2].Key) 469 assert.Equal(t, "bool#$#true|bytes#$#Zm9v|double#$#1.1|host_name#$#testHost|int#$#1|otlp_metric_aggregation_temporality#$#Cumulative|otlp_metric_histogram_type#$#ExponentialHistogram|service_name#$#testService|string#$#value", logs[1].Contents[2].Value) 470 assert.Equal(t, "__time_nano__", logs[1].Contents[1].Key) 471 assert.Equal(t, "__value__", logs[1].Contents[3].Key) 472 assert.Equal(t, fmt.Sprint(metric.ExponentialHistogram().DataPoints().At(0).Min()), logs[1].Contents[3].Value) 473 474 assert.Equal(t, "__name__", logs[2].Contents[0].Key) 475 assert.Equal(t, "test_ExponentialHistogram_max", logs[2].Contents[0].Value) 476 assert.Equal(t, "__labels__", logs[2].Contents[2].Key) 477 assert.Equal(t, "bool#$#true|bytes#$#Zm9v|double#$#1.1|host_name#$#testHost|int#$#1|otlp_metric_aggregation_temporality#$#Cumulative|otlp_metric_histogram_type#$#ExponentialHistogram|service_name#$#testService|string#$#value", logs[2].Contents[2].Value) 478 assert.Equal(t, "__time_nano__", logs[2].Contents[1].Key) 479 assert.Equal(t, "__value__", logs[2].Contents[3].Key) 480 assert.Equal(t, fmt.Sprint(metric.ExponentialHistogram().DataPoints().At(0).Max()), logs[2].Contents[3].Value) 481 482 assert.Equal(t, "__name__", logs[3].Contents[0].Key) 483 assert.Equal(t, "test_ExponentialHistogram_count", logs[3].Contents[0].Value) 484 assert.Equal(t, "__labels__", logs[3].Contents[2].Key) 485 assert.Equal(t, "bool#$#true|bytes#$#Zm9v|double#$#1.1|host_name#$#testHost|int#$#1|otlp_metric_aggregation_temporality#$#Cumulative|otlp_metric_histogram_type#$#ExponentialHistogram|service_name#$#testService|string#$#value", logs[3].Contents[2].Value) 486 assert.Equal(t, "__time_nano__", logs[3].Contents[1].Key) 487 assert.Equal(t, "__value__", logs[3].Contents[3].Key) 488 assert.Equal(t, fmt.Sprint(metric.ExponentialHistogram().DataPoints().At(0).Count()), logs[3].Contents[3].Value) 489 } 490 }) 491 } 492 } 493 494 func TestConvertOtlpLogsToGroupEvents(t *testing.T) { 495 plogs := plog.NewLogs() 496 rsLogs := plogs.ResourceLogs().AppendEmpty() 497 rsLogs.Resource().Attributes().PutStr("meta_attr1", "attr_value1") 498 rsLogs.Resource().Attributes().PutStr("meta_attr2", "attr_value2") 499 scopeLog := rsLogs.ScopeLogs().AppendEmpty() 500 scopeLog.Scope().Attributes().PutStr("scope_key1", "scope_value1") 501 scopeLog.Scope().Attributes().PutStr(otlp.TagKeyScopeVersion, "") // skip 502 scopeLog.Scope().Attributes().PutStr(otlp.TagKeyScopeName, "scope_name") // keep 503 504 logRecord := scopeLog.LogRecords().AppendEmpty() 505 logRecord.Body().SetStr("some log message") 506 logRecord.SetSeverityNumber(plog.SeverityNumberInfo) 507 logRecord.SetTimestamp(1234567890) 508 logRecord.Attributes().PutStr("process", "process_name") 509 logRecord.SetTraceID(pcommon.TraceID([16]byte{1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16})) 510 logRecord.SetSpanID(pcommon.SpanID([8]byte{1, 2, 3, 4, 5, 6, 7, 8})) 511 512 groupEventsSlice, err := ConvertOtlpLogsToGroupEvents(plogs) 513 514 // Check the results 515 assert.Nil(t, err) 516 assert.NotNil(t, groupEventsSlice) 517 assert.Equal(t, 1, len(groupEventsSlice)) 518 group := groupEventsSlice[0].Group 519 assert.NotNil(t, group) 520 assert.Equal(t, 2, group.Metadata.Len()) 521 assert.Equal(t, "attr_value1", group.Metadata.Get("meta_attr1")) 522 assert.Equal(t, "attr_value2", group.Metadata.Get("meta_attr2")) 523 524 assert.Equal(t, 1+2, group.Tags.Len()) 525 assert.Equal(t, "scope_value1", group.Tags.Get("scope_key1")) 526 527 events := groupEventsSlice[0].Events 528 assert.Equal(t, 1, len(events)) 529 actualLog := events[0].(*models.Log) 530 assert.Equal(t, models.EventTypeLogging, events[0].GetType()) 531 assert.Equal(t, []byte("some log message"), actualLog.GetBody()) 532 assert.Equal(t, "Info", actualLog.Level) 533 assert.Equal(t, "0102030405060708", actualLog.SpanID) 534 assert.Equal(t, "0102030405060708090a0b0c0d0e0f10", actualLog.TraceID) 535 assert.Equal(t, uint64(1234567890), actualLog.Timestamp) 536 assert.Equal(t, "process_name", actualLog.Tags.Get("process")) 537 } 538 539 func TestDecoder_DecodeV2_Logs(t *testing.T) { 540 // complcated case 541 encoder := &plog.JSONMarshaler{} 542 jsonBuf, err := encoder.MarshalLogs(logsOTLP) 543 assert.NoError(t, err) 544 httpReq, _ := http.NewRequest("Post", "", nil) 545 httpReq.Header.Set("Content-Type", jsonContentType) 546 decoder := &Decoder{Format: common.ProtocolOTLPLogV1} 547 548 pipelineGrouptEventsSlice, err := decoder.DecodeV2(jsonBuf, httpReq) 549 assert.NoError(t, err) 550 assert.Equal(t, 1, len(pipelineGrouptEventsSlice)) 551 otlpResLogs := logsOTLP.ResourceLogs().At(0) 552 553 for i, groupEvents := range pipelineGrouptEventsSlice { 554 resource := groupEvents.Group.Metadata 555 assert.Equal(t, "testHost", resource.Get("host.name")) 556 557 scopeAttributes := groupEvents.Group.Tags 558 assert.False(t, scopeAttributes.Contains(otlp.TagKeyScopeVersion)) 559 assert.Equal(t, "name", scopeAttributes.Get(otlp.TagKeyScopeName)) 560 561 otlpLogs := otlpResLogs.ScopeLogs().At(i).LogRecords() 562 assert.Equal(t, 1, len(groupEvents.Events)) 563 564 for j, event := range groupEvents.Events { 565 otlpLog := otlpLogs.At(j) 566 name := event.GetName() 567 assert.Equal(t, logEventName, name) 568 569 eventType := event.GetType() 570 assert.Equal(t, models.EventTypeLogging, eventType) 571 log, ok := event.(*models.Log) 572 assert.True(t, ok) 573 assert.Equal(t, otlpLog.TraceID().String(), log.TraceID) 574 assert.Equal(t, otlpLog.SpanID().String(), log.SpanID) 575 576 otlpLog.Attributes().Range( 577 func(k string, v pcommon.Value) bool { 578 assert.True(t, log.Tags.Contains(k)) 579 return true 580 }, 581 ) 582 } 583 } 584 } 585 586 func TestDecoder_DecodeV2_MetricsUntyped(t *testing.T) { 587 var metricsJSON = `{"resourceMetrics":[{"resource":{"attributes":[{"key":"host.name","value":{"stringValue":"testHost"}}]},"scopeMetrics":[{"scope":{"name":"name","version":"version"},"metrics":[{"name":"testMetric"}]}]}]}` 588 httpReq, _ := http.NewRequest("Post", "", nil) 589 httpReq.Header.Set("Content-Type", jsonContentType) 590 decoder := &Decoder{Format: common.ProtocolOTLPMetricV1} 591 592 pipelineGrouptEventsSlice, err := decoder.DecodeV2([]byte(metricsJSON), httpReq) 593 assert.Nil(t, err) 594 assert.Equal(t, 1, len(pipelineGrouptEventsSlice)) 595 596 for _, groupEvents := range pipelineGrouptEventsSlice { 597 assert.Equal(t, 1, len(groupEvents.Events)) 598 for _, event := range groupEvents.Events { 599 name := event.GetName() 600 assert.Equal(t, "testMetric", name) 601 eventType := event.GetType() 602 assert.Equal(t, models.EventTypeMetric, eventType) 603 metric, ok := event.(*models.Metric) 604 assert.True(t, ok) 605 assert.Equal(t, models.MetricTypeUntyped, metric.MetricType) 606 } 607 } 608 } 609 610 func TestDecoder_DecodeV2_MetricsAll(t *testing.T) { 611 type args struct { 612 md func() pmetric.Metrics 613 } 614 615 tests := []struct { 616 name string 617 args args 618 }{ 619 { 620 name: "sum", 621 args: args{ 622 md: metricsSumOTLPFull, 623 }}, 624 { 625 name: "gauge", 626 args: args{ 627 md: metricsGaugeOTLPFull, 628 }, 629 }, 630 { 631 name: "Histogram", 632 args: args{ 633 md: metricsHistogramOTLPFull, 634 }, 635 }, 636 { 637 name: "ExponentialHistogram", 638 args: args{ 639 md: metricsExponentialHistogramOTLPFull, 640 }, 641 }, 642 { 643 name: "Summary", 644 args: args{ 645 md: metricsSummaryOTLPFull, 646 }, 647 }, 648 } 649 650 encoder := &pmetric.JSONMarshaler{} 651 httpReq, _ := http.NewRequest("Post", "", nil) 652 httpReq.Header.Set("Content-Type", jsonContentType) 653 decoder := &Decoder{Format: common.ProtocolOTLPMetricV1} 654 655 for _, tt := range tests { 656 t.Run(tt.name, func(t *testing.T) { 657 m := tt.args.md() 658 jsonBuf, err := encoder.MarshalMetrics(m) 659 assert.NoError(t, err) 660 pipelineGrouptEventsSlice, err := decoder.DecodeV2(jsonBuf, httpReq) 661 assert.NoError(t, err) 662 663 expectedGroupEventsLen := func() int { 664 count := 0 665 for i := 0; i < m.ResourceMetrics().Len(); i++ { 666 resourceMetric := m.ResourceMetrics().At(i) 667 count += resourceMetric.ScopeMetrics().Len() 668 } 669 return count 670 }() 671 assert.Equal(t, expectedGroupEventsLen, len(pipelineGrouptEventsSlice)) 672 673 groupindex, eventIndex := 0, 0 674 for i := 0; i < m.ResourceMetrics().Len(); i++ { 675 resourceMetric := m.ResourceMetrics().At(i) 676 for j := 0; j < resourceMetric.ScopeMetrics().Len(); j++ { 677 scopeMetric := resourceMetric.ScopeMetrics().At(j) 678 679 for m := 0; m < scopeMetric.Metrics().Len(); m++ { 680 otMetric := scopeMetric.Metrics().At(m) 681 otUnit := otMetric.Unit() 682 otDescription := otMetric.Description() 683 684 switch otMetric.Type() { 685 case pmetric.MetricTypeGauge: 686 otGauge := otMetric.Gauge() 687 otDatapoints := otGauge.DataPoints() 688 for l := 0; l < otDatapoints.Len(); l++ { 689 datapoint := otDatapoints.At(l) 690 event := pipelineGrouptEventsSlice[groupindex].Events[eventIndex] 691 // check tags 692 datapoint.Attributes().Range( 693 func(k string, v pcommon.Value) bool { 694 assert.Equal(t, v.AsString(), event.GetTags().Get(k)) 695 return true 696 }) 697 698 metric, ok := event.(*models.Metric) 699 assert.True(t, ok) 700 assert.Equal(t, models.MetricTypeGauge, metric.MetricType) 701 assert.Equal(t, otUnit, metric.Unit) 702 assert.Equal(t, otDescription, metric.Description) 703 704 assert.Equal(t, otMetric.Name(), event.GetName()) 705 assert.Equal(t, uint64(datapoint.Timestamp()), event.GetTimestamp()) 706 otValue := float64(datapoint.IntValue()) 707 if datapoint.ValueType() == pmetric.NumberDataPointValueTypeDouble { 708 otValue = datapoint.DoubleValue() 709 } 710 assert.Equal(t, otValue, metric.Value.GetSingleValue()) 711 712 eventIndex++ 713 } 714 case pmetric.MetricTypeSum: 715 otSum := otMetric.Sum() 716 isMonotonic := strconv.FormatBool(otSum.IsMonotonic()) 717 aggregationTemporality := otSum.AggregationTemporality() 718 otDatapoints := otSum.DataPoints() 719 720 for l := 0; l < otDatapoints.Len(); l++ { 721 datapoint := otDatapoints.At(l) 722 event := pipelineGrouptEventsSlice[groupindex].Events[eventIndex] 723 724 // check tags 725 datapoint.Attributes().Range( 726 func(k string, v pcommon.Value) bool { 727 assert.Equal(t, v.AsString(), event.GetTags().Get(k)) 728 return true 729 }) 730 assert.Equal(t, aggregationTemporality.String(), event.GetTags().Get(otlp.TagKeyMetricAggregationTemporality)) 731 assert.Equal(t, isMonotonic, event.GetTags().Get(otlp.TagKeyMetricIsMonotonic)) 732 733 metric, ok := event.(*models.Metric) 734 assert.True(t, ok) 735 if aggregationTemporality == pmetric.AggregationTemporalityCumulative { 736 assert.Equal(t, models.MetricTypeCounter, metric.MetricType) 737 } else { 738 assert.Equal(t, models.MetricTypeRateCounter, metric.MetricType) 739 } 740 741 assert.Equal(t, otUnit, metric.Unit) 742 assert.Equal(t, otDescription, metric.Description) 743 744 // check values 745 otValue := float64(datapoint.IntValue()) 746 if datapoint.ValueType() == pmetric.NumberDataPointValueTypeDouble { 747 otValue = datapoint.DoubleValue() 748 } 749 assert.Equal(t, otValue, metric.Value.GetSingleValue()) 750 751 assert.Equal(t, otMetric.Name(), event.GetName()) 752 assert.Equal(t, uint64(datapoint.Timestamp()), event.GetTimestamp()) 753 eventIndex++ 754 } 755 case pmetric.MetricTypeSummary: 756 otSummary := otMetric.Summary() 757 otDatapoints := otSummary.DataPoints() 758 for l := 0; l < otDatapoints.Len(); l++ { 759 datapoint := otDatapoints.At(l) 760 event := pipelineGrouptEventsSlice[groupindex].Events[eventIndex] 761 762 // check tags 763 datapoint.Attributes().Range( 764 func(k string, v pcommon.Value) bool { 765 assert.Equal(t, v.AsString(), event.GetTags().Get(k)) 766 return true 767 }) 768 769 metric, ok := event.(*models.Metric) 770 assert.True(t, ok) 771 // check type 772 assert.Equal(t, models.MetricTypeSummary, metric.MetricType) 773 assert.Equal(t, otUnit, metric.Unit) 774 assert.Equal(t, otDescription, metric.Description) 775 assert.Equal(t, otMetric.Name(), event.GetName()) 776 assert.Equal(t, uint64(datapoint.Timestamp()), event.GetTimestamp()) 777 778 // check values 779 quantiles := datapoint.QuantileValues() 780 for n := 0; n < quantiles.Len(); n++ { 781 quantile := quantiles.At(n) 782 assert.Equal(t, quantile.Value(), metric.Value.GetMultiValues().Get(strconv.FormatFloat(quantile.Quantile(), 'f', -1, 64))) 783 } 784 785 assert.Equal(t, float64(datapoint.Count()), metric.Value.GetMultiValues().Get(otlp.FieldCount)) 786 assert.Equal(t, datapoint.Sum(), metric.Value.GetMultiValues().Get(otlp.FieldSum)) 787 eventIndex++ 788 } 789 case pmetric.MetricTypeHistogram: 790 otHistogram := otMetric.Histogram() 791 aggregationTemporality := otHistogram.AggregationTemporality() 792 otDatapoints := otHistogram.DataPoints() 793 794 for l := 0; l < otDatapoints.Len(); l++ { 795 datapoint := otDatapoints.At(l) 796 event := pipelineGrouptEventsSlice[groupindex].Events[eventIndex] 797 798 // check tags 799 datapoint.Attributes().Range( 800 func(k string, v pcommon.Value) bool { 801 assert.Equal(t, v.AsString(), event.GetTags().Get(k)) 802 return true 803 }) 804 assert.Equal(t, aggregationTemporality.String(), event.GetTags().Get(otlp.TagKeyMetricAggregationTemporality)) 805 assert.Equal(t, pmetric.MetricTypeHistogram.String(), event.GetTags().Get(otlp.TagKeyMetricHistogramType)) 806 807 metric, ok := event.(*models.Metric) 808 assert.True(t, ok) 809 assert.Equal(t, models.MetricTypeHistogram, metric.MetricType) 810 assert.Equal(t, otUnit, metric.Unit) 811 assert.Equal(t, otDescription, metric.Description) 812 813 assert.Equal(t, otMetric.Name(), event.GetName()) 814 assert.Equal(t, uint64(datapoint.Timestamp()), event.GetTimestamp()) 815 816 // check values 817 otBucketCounts := datapoint.BucketCounts() 818 otBucketBoundarys := datapoint.ExplicitBounds() 819 assert.Equal(t, otBucketCounts.Len(), otBucketBoundarys.Len()+1) 820 bucketBounds, bucketCounts := otlp.ComputeBuckets(metric.Value.GetMultiValues(), true) 821 otBucketCountFloat := make([]float64, 0, len(otBucketCounts.AsRaw())) 822 for _, v := range otBucketCounts.AsRaw() { 823 otBucketCountFloat = append(otBucketCountFloat, float64(v)) 824 } 825 assert.Equal(t, otBucketCountFloat, bucketCounts) 826 assert.Equal(t, otBucketBoundarys.AsRaw(), bucketBounds[1:]) 827 828 assert.Equal(t, float64(datapoint.Count()), metric.Value.GetMultiValues().Get(otlp.FieldCount)) 829 assert.Equal(t, datapoint.Sum(), metric.Value.GetMultiValues().Get(otlp.FieldSum)) 830 eventIndex++ 831 } 832 case pmetric.MetricTypeExponentialHistogram: 833 otExponentialHistogram := otMetric.ExponentialHistogram() 834 aggregationTemporality := otExponentialHistogram.AggregationTemporality() 835 otDatapoints := otExponentialHistogram.DataPoints() 836 837 for l := 0; l < otDatapoints.Len(); l++ { 838 datapoint := otDatapoints.At(l) 839 event := pipelineGrouptEventsSlice[groupindex].Events[eventIndex] 840 841 datapoint.Attributes().Range( 842 func(k string, v pcommon.Value) bool { 843 assert.Equal(t, v.AsString(), event.GetTags().Get(k)) 844 return true 845 }) 846 assert.Equal(t, aggregationTemporality.String(), event.GetTags().Get(otlp.TagKeyMetricAggregationTemporality)) 847 assert.Equal(t, pmetric.MetricTypeExponentialHistogram.String(), event.GetTags().Get(otlp.TagKeyMetricHistogramType)) 848 metric, ok := event.(*models.Metric) 849 assert.True(t, ok) 850 assert.Equal(t, otUnit, metric.Unit) 851 assert.Equal(t, otDescription, metric.Description) 852 assert.Equal(t, models.MetricTypeHistogram, metric.MetricType) 853 assert.Equal(t, otMetric.Name(), event.GetName()) 854 assert.Equal(t, uint64(datapoint.Timestamp()), event.GetTimestamp()) 855 856 // check values 857 scale := datapoint.Scale() 858 base := math.Pow(2, math.Pow(2, float64(-scale))) 859 assert.Equal(t, scale, int32(metric.Value.GetMultiValues().Get(otlp.FieldScale))) 860 861 positiveOffset := datapoint.Positive().Offset() 862 assert.Equal(t, positiveOffset, int32(metric.Value.GetMultiValues().Get(otlp.FieldPositiveOffset))) 863 otPositiveBucketCounts := datapoint.Positive().BucketCounts() 864 865 otPositiveBucketBounds, otPositiveBucketCountsFloat := make([]float64, 0), make([]float64, 0) 866 for i, v := range otPositiveBucketCounts.AsRaw() { 867 lowerBoundary := math.Pow(base, float64(int(positiveOffset)+i)) 868 otPositiveBucketBounds = append(otPositiveBucketBounds, lowerBoundary) 869 otPositiveBucketCountsFloat = append(otPositiveBucketCountsFloat, float64(v)) 870 } 871 postiveBucketBounds, positveBucketCounts := otlp.ComputeBuckets(metric.Value.GetMultiValues(), true) 872 873 assert.Equal(t, otPositiveBucketBounds, postiveBucketBounds) 874 assert.Equal(t, otPositiveBucketCountsFloat, positveBucketCounts) 875 876 negativeOffset := datapoint.Negative().Offset() 877 assert.Equal(t, negativeOffset, int32(metric.Value.GetMultiValues().Get(otlp.FieldNegativeOffset))) 878 otNegativeBucetCounts := datapoint.Negative().BucketCounts() 879 880 otNegativeBucketBounds, otNegativeBucketCountsFloat := make([]float64, 0), make([]float64, 0) 881 882 for i, v := range otNegativeBucetCounts.AsRaw() { 883 upperBoundary := -math.Pow(base, float64(int(negativeOffset)+i)) 884 otNegativeBucketBounds = append(otNegativeBucketBounds, upperBoundary) 885 otNegativeBucketCountsFloat = append(otNegativeBucketCountsFloat, float64(v)) 886 } 887 888 netativeBucketBounds, netativeBucketCounts := otlp.ComputeBuckets(metric.Value.GetMultiValues(), false) 889 assert.Equal(t, otNegativeBucketBounds, netativeBucketBounds) 890 assert.Equal(t, otNegativeBucketCountsFloat, netativeBucketCounts) 891 892 assert.Equal(t, float64(datapoint.Count()), metric.Value.GetMultiValues().Get(otlp.FieldCount)) 893 assert.Equal(t, datapoint.Sum(), metric.Value.GetMultiValues().Get(otlp.FieldSum)) 894 assert.Equal(t, float64(datapoint.ZeroCount()), metric.Value.GetMultiValues().Get(otlp.FieldZeroCount)) 895 eventIndex++ 896 } 897 } 898 899 } 900 901 assert.Equal(t, eventIndex, len(pipelineGrouptEventsSlice[groupindex].Events)) 902 eventIndex = 0 903 groupindex++ 904 } 905 906 } 907 908 }) 909 } 910 911 } 912 913 func TestDecoder_DecodeV2_TracesEmpty(t *testing.T) { 914 var tracesJSON = `{"resourceSpans":[{"resource":{"attributes":[{"key":"host.name","value":{"stringValue":"testHost"}}]},"scopeSpans":[{"scope":{"name":"name","version":"version"},"spans":[{"traceId":"","spanId":"","parentSpanId":"","name":"testSpan","status":{}}]}]}]}` 915 httpReq, _ := http.NewRequest("Post", "", nil) 916 httpReq.Header.Set("Content-Type", jsonContentType) 917 decoder := &Decoder{Format: common.ProtocolOTLPTraceV1} 918 919 pipelineGrouptEventsSlice, err := decoder.DecodeV2([]byte(tracesJSON), httpReq) 920 assert.Nil(t, err) 921 assert.Equal(t, 1, len(pipelineGrouptEventsSlice)) 922 923 for _, groupEvents := range pipelineGrouptEventsSlice { 924 resource := groupEvents.Group.Metadata 925 assert.True(t, resource.Contains("host.name")) 926 scopeAttributes := groupEvents.Group.Tags 927 assert.True(t, scopeAttributes.Contains(otlp.TagKeyScopeVersion)) 928 929 assert.Equal(t, 1, len(groupEvents.Events)) 930 for _, event := range groupEvents.Events { 931 name := event.GetName() 932 assert.Equal(t, "testSpan", name) 933 eventType := event.GetType() 934 assert.Equal(t, models.EventTypeSpan, eventType) 935 span, ok := event.(*models.Span) 936 assert.True(t, ok) 937 fmt.Println(span.TraceID, span.SpanID) 938 assert.Equal(t, "", span.TraceID) 939 assert.Equal(t, "", span.SpanID) 940 } 941 } 942 } 943 944 func TestDecoder_DecodeV2_Traces(t *testing.T) { 945 // complcated case 946 encoder := &ptrace.JSONMarshaler{} 947 jsonBuf, err := encoder.MarshalTraces(tracesOTLPFull) 948 assert.NoError(t, err) 949 httpReq, _ := http.NewRequest("Post", "", nil) 950 httpReq.Header.Set("Content-Type", jsonContentType) 951 decoder := &Decoder{Format: common.ProtocolOTLPTraceV1} 952 953 pipelineGrouptEventsSlice, err := decoder.DecodeV2(jsonBuf, httpReq) 954 assert.NoError(t, err) 955 assert.Equal(t, 1, len(pipelineGrouptEventsSlice)) 956 otlpScopeSpans := tracesOTLPFull.ResourceSpans().At(0) 957 958 for i, groupEvents := range pipelineGrouptEventsSlice { 959 resource := groupEvents.Group.Metadata 960 assert.Equal(t, "testHost", resource.Get("host.name")) 961 assert.Equal(t, "testService", resource.Get("service.name")) 962 963 scopeAttributes := groupEvents.Group.Tags 964 assert.Equal(t, "scope version", scopeAttributes.Get(otlp.TagKeyScopeVersion)) 965 assert.Equal(t, "scope name", scopeAttributes.Get(otlp.TagKeyScopeName)) 966 967 otlpSpans := otlpScopeSpans.ScopeSpans().At(i).Spans() 968 assert.Equal(t, 2, len(groupEvents.Events)) 969 970 for j, event := range groupEvents.Events { 971 otlpSpan := otlpSpans.At(j) 972 name := event.GetName() 973 assert.Equal(t, otlpSpan.Name(), name) 974 eventType := event.GetType() 975 assert.Equal(t, models.EventTypeSpan, eventType) 976 span, ok := event.(*models.Span) 977 assert.True(t, ok) 978 assert.Equal(t, otlpSpan.TraceID().String(), span.TraceID) 979 assert.Equal(t, otlpSpan.SpanID().String(), span.SpanID) 980 assert.Equal(t, int(otlpSpan.Status().Code()), int(span.Status)) 981 982 otlpSpan.Attributes().Range( 983 func(k string, v pcommon.Value) bool { 984 assert.True(t, span.Tags.Contains(k)) 985 return true 986 }, 987 ) 988 989 for m := 0; m < otlpSpan.Links().Len(); m++ { 990 otLink := otlpSpan.Links().At(m) 991 assert.Equal(t, otLink.SpanID().String(), span.Links[m].SpanID) 992 assert.Equal(t, otLink.TraceID().String(), span.Links[m].TraceID) 993 assert.Equal(t, otLink.TraceState().AsRaw(), span.Links[m].TraceState) 994 995 otLink.Attributes().Range(func(k string, v pcommon.Value) bool { 996 assert.True(t, span.Links[m].Tags.Contains(k)) 997 return true 998 }) 999 } 1000 1001 for m := 0; m < otlpSpan.Events().Len(); m++ { 1002 otEvent := otlpSpan.Events().At(m) 1003 assert.Equal(t, otEvent.Name(), span.Events[m].Name) 1004 assert.Equal(t, int64(otEvent.Timestamp()), span.Events[m].Timestamp) 1005 otEvent.Attributes().Range(func(k string, v pcommon.Value) bool { 1006 assert.True(t, span.Links[m].Tags.Contains(k)) 1007 return true 1008 }) 1009 } 1010 } 1011 } 1012 } 1013 1014 // reference: https://github.com/open-telemetry/opentelemetry-collector/blob/main/pdata/plog/json_test.go#L28 1015 var logsOTLP = func() plog.Logs { 1016 ld := plog.NewLogs() 1017 rl := ld.ResourceLogs().AppendEmpty() 1018 rl.Resource().Attributes().PutStr("host.name", "testHost") 1019 rl.Resource().SetDroppedAttributesCount(1) 1020 rl.SetSchemaUrl("testSchemaURL") 1021 il := rl.ScopeLogs().AppendEmpty() 1022 il.Scope().SetName("name") 1023 il.Scope().SetVersion("") 1024 il.Scope().SetDroppedAttributesCount(1) 1025 il.SetSchemaUrl("ScopeLogsSchemaURL") 1026 lg := il.LogRecords().AppendEmpty() 1027 lg.SetSeverityNumber(plog.SeverityNumberError) 1028 lg.SetSeverityText("Error") 1029 lg.SetDroppedAttributesCount(1) 1030 lg.SetFlags(plog.DefaultLogRecordFlags) 1031 traceID := pcommon.TraceID([16]byte{0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0A, 0x0B, 0x0C, 0x0D, 0x0E, 0x0F, 0x10}) 1032 spanID := pcommon.SpanID([8]byte{0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18}) 1033 lg.SetTraceID(traceID) 1034 lg.SetSpanID(spanID) 1035 lg.Body().SetStr("hello world") 1036 t := pcommon.NewTimestampFromTime(time.Now()) 1037 lg.SetTimestamp(t) 1038 lg.SetObservedTimestamp(t) 1039 lg.Attributes().PutStr("sdkVersion", "1.0.1") 1040 return ld 1041 }() 1042 1043 // reference: https://github.com/open-telemetry/opentelemetry-collector/blob/main/pdata/pmetric/json_test.go#L60 1044 var metricsSumOTLPFull = func() pmetric.Metrics { 1045 metric := pmetric.NewMetrics() 1046 rs := metric.ResourceMetrics().AppendEmpty() 1047 rs.SetSchemaUrl("schemaURL") 1048 // Add resource. 1049 rs.Resource().Attributes().PutStr("host.name", "testHost") 1050 rs.Resource().Attributes().PutStr("service.name", "testService") 1051 rs.Resource().SetDroppedAttributesCount(1) 1052 // Add InstrumentationLibraryMetrics. 1053 m := rs.ScopeMetrics().AppendEmpty() 1054 m.Scope().SetName("instrumentation name") 1055 m.Scope().SetVersion("instrumentation version") 1056 m.Scope().Attributes().PutStr("instrumentation.attribute", "test") 1057 m.SetSchemaUrl("schemaURL") 1058 // Add Metric 1059 sumMetric := m.Metrics().AppendEmpty() 1060 sumMetric.SetName("test sum") 1061 sumMetric.SetDescription("test sum") 1062 sumMetric.SetUnit("unit") 1063 sum := sumMetric.SetEmptySum() 1064 sum.SetAggregationTemporality(pmetric.AggregationTemporalityCumulative) 1065 sum.SetIsMonotonic(true) 1066 datapoint := sum.DataPoints().AppendEmpty() 1067 datapoint.SetStartTimestamp(pcommon.NewTimestampFromTime(time.Now())) 1068 datapoint.SetIntValue(100) 1069 datapoint.Attributes().PutStr("string", "value") 1070 datapoint.Attributes().PutBool("bool", true) 1071 datapoint.Attributes().PutInt("int", 1) 1072 datapoint.Attributes().PutDouble("double", 1.1) 1073 datapoint.Attributes().PutEmptyBytes("bytes").FromRaw([]byte("foo")) 1074 exemplar := datapoint.Exemplars().AppendEmpty() 1075 exemplar.SetDoubleValue(99.3) 1076 exemplar.SetTimestamp(pcommon.NewTimestampFromTime(time.Now())) 1077 traceID := pcommon.TraceID([16]byte{0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0A, 0x0B, 0x0C, 0x0D, 0x0E, 0x0F, 0x10}) 1078 spanID := pcommon.SpanID([8]byte{0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18}) 1079 exemplar.SetSpanID(spanID) 1080 exemplar.SetTraceID(traceID) 1081 exemplar.FilteredAttributes().PutStr("service.name", "testService") 1082 datapoint.SetTimestamp(pcommon.NewTimestampFromTime(time.Now())) 1083 1084 datapoint2 := sum.DataPoints().AppendEmpty() 1085 datapoint2.SetStartTimestamp(pcommon.NewTimestampFromTime(time.Now())) 1086 datapoint2.SetIntValue(50) 1087 datapoint2.Attributes().PutStr("string", "value2") 1088 datapoint2.Attributes().PutBool("bool", false) 1089 datapoint2.Attributes().PutInt("int", 2) 1090 datapoint2.Attributes().PutDouble("double", 2.2) 1091 datapoint2.Attributes().PutEmptyBytes("bytes").FromRaw([]byte("bar")) 1092 return metric 1093 } 1094 1095 var metricsGaugeOTLPFull = func() pmetric.Metrics { 1096 metric := pmetric.NewMetrics() 1097 rs := metric.ResourceMetrics().AppendEmpty() 1098 rs.SetSchemaUrl("schemaURL") 1099 // Add resource. 1100 rs.Resource().Attributes().PutStr("host.name", "testHost") 1101 rs.Resource().Attributes().PutStr("service.name", "testService") 1102 rs.Resource().SetDroppedAttributesCount(1) 1103 // Add InstrumentationLibraryMetrics. 1104 m := rs.ScopeMetrics().AppendEmpty() 1105 m.Scope().SetName("instrumentation name") 1106 m.Scope().SetVersion("instrumentation version") 1107 m.SetSchemaUrl("schemaURL") 1108 // Add Metric 1109 gaugeMetric := m.Metrics().AppendEmpty() 1110 gaugeMetric.SetName("test gauge") 1111 gaugeMetric.SetDescription("test gauge") 1112 gaugeMetric.SetUnit("unit") 1113 gauge := gaugeMetric.SetEmptyGauge() 1114 datapoint := gauge.DataPoints().AppendEmpty() 1115 datapoint.SetStartTimestamp(pcommon.NewTimestampFromTime(time.Now())) 1116 datapoint.SetDoubleValue(10.2) 1117 datapoint.Attributes().PutStr("string", "value") 1118 datapoint.Attributes().PutBool("bool", true) 1119 datapoint.Attributes().PutInt("int", 1) 1120 datapoint.Attributes().PutDouble("double", 1.1) 1121 datapoint.Attributes().PutEmptyBytes("bytes").FromRaw([]byte("foo")) 1122 exemplar := datapoint.Exemplars().AppendEmpty() 1123 exemplar.SetDoubleValue(99.3) 1124 exemplar.SetTimestamp(pcommon.NewTimestampFromTime(time.Now())) 1125 traceID := pcommon.TraceID([16]byte{0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0A, 0x0B, 0x0C, 0x0D, 0x0E, 0x0F, 0x10}) 1126 spanID := pcommon.SpanID([8]byte{0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18}) 1127 exemplar.SetSpanID(spanID) 1128 exemplar.SetTraceID(traceID) 1129 exemplar.FilteredAttributes().PutStr("service.name", "testService") 1130 datapoint.SetTimestamp(pcommon.NewTimestampFromTime(time.Now())) 1131 return metric 1132 } 1133 1134 var metricsHistogramOTLPFull = func() pmetric.Metrics { 1135 metric := pmetric.NewMetrics() 1136 rs := metric.ResourceMetrics().AppendEmpty() 1137 rs.SetSchemaUrl("schemaURL") 1138 // Add resource. 1139 rs.Resource().Attributes().PutStr("host.name", "testHost") 1140 rs.Resource().Attributes().PutStr("service.name", "testService") 1141 rs.Resource().SetDroppedAttributesCount(1) 1142 // Add InstrumentationLibraryMetrics. 1143 m := rs.ScopeMetrics().AppendEmpty() 1144 m.Scope().SetName("instrumentation name") 1145 m.Scope().SetVersion("instrumentation version") 1146 m.SetSchemaUrl("schemaURL") 1147 // Add Metric 1148 histogramMetric := m.Metrics().AppendEmpty() 1149 histogramMetric.SetName("test Histogram") 1150 histogramMetric.SetDescription("test Histogram") 1151 histogramMetric.SetUnit("unit") 1152 histogram := histogramMetric.SetEmptyHistogram() 1153 histogram.SetAggregationTemporality(pmetric.AggregationTemporalityCumulative) 1154 datapoint := histogram.DataPoints().AppendEmpty() 1155 datapoint.SetStartTimestamp(pcommon.NewTimestampFromTime(time.Now())) 1156 datapoint.Attributes().PutStr("string", "value") 1157 datapoint.Attributes().PutBool("bool", true) 1158 datapoint.Attributes().PutInt("int", 1) 1159 datapoint.Attributes().PutDouble("double", 1.1) 1160 datapoint.Attributes().PutEmptyBytes("bytes").FromRaw([]byte("foo")) 1161 datapoint.SetCount(4) 1162 datapoint.SetSum(345) 1163 datapoint.BucketCounts().FromRaw([]uint64{1, 1, 2}) 1164 datapoint.ExplicitBounds().FromRaw([]float64{10, 100}) 1165 exemplar := datapoint.Exemplars().AppendEmpty() 1166 exemplar.SetDoubleValue(99.3) 1167 exemplar.SetTimestamp(pcommon.NewTimestampFromTime(time.Now())) 1168 datapoint.SetMin(float64(time.Now().Unix())) 1169 traceID := pcommon.TraceID([16]byte{0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0A, 0x0B, 0x0C, 0x0D, 0x0E, 0x0F, 0x10}) 1170 spanID := pcommon.SpanID([8]byte{0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18}) 1171 exemplar.SetSpanID(spanID) 1172 exemplar.SetTraceID(traceID) 1173 exemplar.FilteredAttributes().PutStr("service.name", "testService") 1174 datapoint.SetMax(float64(time.Now().Unix())) 1175 datapoint.SetTimestamp(pcommon.NewTimestampFromTime(time.Now())) 1176 return metric 1177 } 1178 1179 var metricsExponentialHistogramOTLPFull = func() pmetric.Metrics { 1180 metric := pmetric.NewMetrics() 1181 rs := metric.ResourceMetrics().AppendEmpty() 1182 rs.SetSchemaUrl("schemaURL") 1183 // Add resource. 1184 rs.Resource().Attributes().PutStr("host.name", "testHost") 1185 rs.Resource().Attributes().PutStr("service.name", "testService") 1186 rs.Resource().SetDroppedAttributesCount(1) 1187 // Add InstrumentationLibraryMetrics. 1188 m := rs.ScopeMetrics().AppendEmpty() 1189 m.Scope().SetName("instrumentation name") 1190 m.Scope().SetVersion("instrumentation version") 1191 m.SetSchemaUrl("schemaURL") 1192 // Add Metric 1193 histogramMetric := m.Metrics().AppendEmpty() 1194 histogramMetric.SetName("test ExponentialHistogram") 1195 histogramMetric.SetDescription("test ExponentialHistogram") 1196 histogramMetric.SetUnit("unit") 1197 histogram := histogramMetric.SetEmptyExponentialHistogram() 1198 histogram.SetAggregationTemporality(pmetric.AggregationTemporalityCumulative) 1199 datapoint := histogram.DataPoints().AppendEmpty() 1200 datapoint.SetScale(1) 1201 datapoint.SetStartTimestamp(pcommon.NewTimestampFromTime(time.Now())) 1202 datapoint.Attributes().PutStr("string", "value") 1203 datapoint.Attributes().PutBool("bool", true) 1204 datapoint.Attributes().PutInt("int", 1) 1205 datapoint.Attributes().PutDouble("double", 1.1) 1206 datapoint.Attributes().PutEmptyBytes("bytes").FromRaw([]byte("foo")) 1207 datapoint.SetCount(4) 1208 datapoint.SetSum(345) 1209 datapoint.Positive().BucketCounts().FromRaw([]uint64{1, 1, 2}) 1210 datapoint.Positive().SetOffset(2) 1211 datapoint.Negative().BucketCounts().FromRaw([]uint64{1, 1, 2}) 1212 datapoint.Negative().SetOffset(3) 1213 1214 exemplar := datapoint.Exemplars().AppendEmpty() 1215 exemplar.SetDoubleValue(99.3) 1216 exemplar.SetTimestamp(pcommon.NewTimestampFromTime(time.Now())) 1217 datapoint.SetMin(float64(time.Now().Unix())) 1218 traceID := pcommon.TraceID([16]byte{0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0A, 0x0B, 0x0C, 0x0D, 0x0E, 0x0F, 0x10}) 1219 spanID := pcommon.SpanID([8]byte{0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18}) 1220 exemplar.SetSpanID(spanID) 1221 exemplar.SetTraceID(traceID) 1222 exemplar.FilteredAttributes().PutStr("service.name", "testService") 1223 datapoint.SetMax(float64(time.Now().Unix())) 1224 datapoint.Negative().BucketCounts().FromRaw([]uint64{1, 1, 2}) 1225 datapoint.Negative().SetOffset(2) 1226 datapoint.SetZeroCount(5) 1227 datapoint.SetTimestamp(pcommon.NewTimestampFromTime(time.Now())) 1228 return metric 1229 } 1230 1231 var metricsSummaryOTLPFull = func() pmetric.Metrics { 1232 metric := pmetric.NewMetrics() 1233 rs := metric.ResourceMetrics().AppendEmpty() 1234 rs.SetSchemaUrl("schemaURL") 1235 // Add resource. 1236 rs.Resource().Attributes().PutStr("host.name", "testHost") 1237 rs.Resource().Attributes().PutStr("service.name", "testService") 1238 rs.Resource().SetDroppedAttributesCount(1) 1239 // Add InstrumentationLibraryMetrics. 1240 m := rs.ScopeMetrics().AppendEmpty() 1241 m.Scope().SetName("instrumentation name") 1242 m.Scope().SetVersion("instrumentation version") 1243 m.SetSchemaUrl("schemaURL") 1244 // Add Metric 1245 summaryMetric := m.Metrics().AppendEmpty() 1246 summaryMetric.SetName("test summary") 1247 summaryMetric.SetDescription("test summary") 1248 summaryMetric.SetUnit("unit") 1249 summary := summaryMetric.SetEmptySummary() 1250 datapoint := summary.DataPoints().AppendEmpty() 1251 datapoint.SetStartTimestamp(pcommon.NewTimestampFromTime(time.Now())) 1252 datapoint.SetCount(100) 1253 datapoint.SetSum(1000) 1254 quantile := datapoint.QuantileValues().AppendEmpty() 1255 quantile.SetQuantile(0.5) 1256 quantile.SetValue(1.2) 1257 datapoint.Attributes().PutStr("string", "value") 1258 datapoint.Attributes().PutBool("bool", true) 1259 datapoint.Attributes().PutInt("int", 1) 1260 datapoint.Attributes().PutDouble("double", 1.1) 1261 datapoint.Attributes().PutEmptyBytes("bytes").FromRaw([]byte("foo")) 1262 datapoint.SetTimestamp(pcommon.NewTimestampFromTime(time.Now())) 1263 return metric 1264 } 1265 1266 // reference: https://github.com/open-telemetry/opentelemetry-collector/blob/main/pdata/ptrace/json_test.go#L59 1267 var tracesOTLPFull = func() ptrace.Traces { 1268 traceID := pcommon.TraceID([16]byte{0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0A, 0x0B, 0x0C, 0x0D, 0x0E, 0x0F, 0x10}) 1269 spanID := pcommon.SpanID([8]byte{0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18}) 1270 td := ptrace.NewTraces() 1271 // Add ResourceSpans. 1272 rs := td.ResourceSpans().AppendEmpty() 1273 rs.SetSchemaUrl("schemaURL") 1274 // Add resource. 1275 rs.Resource().Attributes().PutStr("host.name", "testHost") 1276 rs.Resource().Attributes().PutStr("service.name", "testService") 1277 rs.Resource().SetDroppedAttributesCount(1) 1278 // Add ScopeSpans. 1279 il := rs.ScopeSpans().AppendEmpty() 1280 il.Scope().SetName("scope name") 1281 il.Scope().SetVersion("scope version") 1282 il.SetSchemaUrl("schemaURL") 1283 // Add spans. 1284 sp := il.Spans().AppendEmpty() 1285 sp.SetName("testSpan") 1286 sp.SetKind(ptrace.SpanKindClient) 1287 sp.SetDroppedAttributesCount(1) 1288 sp.SetStartTimestamp(pcommon.NewTimestampFromTime(time.Now())) 1289 sp.SetTraceID(traceID) 1290 sp.SetSpanID(spanID) 1291 sp.SetDroppedEventsCount(1) 1292 sp.SetDroppedLinksCount(1) 1293 sp.SetEndTimestamp(pcommon.NewTimestampFromTime(time.Now())) 1294 sp.SetParentSpanID(spanID) 1295 sp.TraceState().FromRaw("state") 1296 sp.Status().SetCode(ptrace.StatusCodeOk) 1297 sp.Status().SetMessage("message") 1298 // Add attributes. 1299 sp.Attributes().PutStr("string", "value") 1300 sp.Attributes().PutBool("bool", true) 1301 sp.Attributes().PutInt("int", 1) 1302 sp.Attributes().PutDouble("double", 1.1) 1303 sp.Attributes().PutEmptyBytes("bytes").FromRaw([]byte("foo")) 1304 arr := sp.Attributes().PutEmptySlice("array") 1305 arr.AppendEmpty().SetInt(1) 1306 arr.AppendEmpty().SetStr("str") 1307 kvList := sp.Attributes().PutEmptyMap("kvList") 1308 kvList.PutInt("int", 1) 1309 kvList.PutStr("string", "string") 1310 // Add events. 1311 event := sp.Events().AppendEmpty() 1312 event.SetName("eventName") 1313 event.SetTimestamp(pcommon.NewTimestampFromTime(time.Now())) 1314 event.SetDroppedAttributesCount(1) 1315 event.Attributes().PutStr("string", "value") 1316 event.Attributes().PutBool("bool", true) 1317 event.Attributes().PutInt("int", 1) 1318 event.Attributes().PutDouble("double", 1.1) 1319 event.Attributes().PutEmptyBytes("bytes").FromRaw([]byte("foo")) 1320 // Add links. 1321 link := sp.Links().AppendEmpty() 1322 link.TraceState().FromRaw("state") 1323 link.SetTraceID(traceID) 1324 link.SetSpanID(spanID) 1325 link.SetDroppedAttributesCount(1) 1326 link.Attributes().PutStr("string", "value") 1327 link.Attributes().PutBool("bool", true) 1328 link.Attributes().PutInt("int", 1) 1329 link.Attributes().PutDouble("double", 1.1) 1330 link.Attributes().PutEmptyBytes("bytes").FromRaw([]byte("foo")) 1331 // Add another span. 1332 sp2 := il.Spans().AppendEmpty() 1333 sp2.SetName("testSpan2") 1334 return td 1335 }()