github.com/alibaba/ilogtail/pkg@v0.0.0-20250526110833-c53b480d046c/protocol/decoder/opentelemetry/otlp_metric_parser.go (about)

     1  package opentelemetry
     2  
     3  import (
     4  	"encoding/base64"
     5  	"encoding/hex"
     6  	"encoding/json"
     7  	"fmt"
     8  	"math"
     9  	"strconv"
    10  
    11  	"go.opentelemetry.io/collector/pdata/pcommon"
    12  	v1Common "go.opentelemetry.io/proto/otlp/common/v1"
    13  	v1 "go.opentelemetry.io/proto/otlp/metrics/v1"
    14  
    15  	"github.com/alibaba/ilogtail/pkg/helper"
    16  	"github.com/alibaba/ilogtail/pkg/protocol"
    17  	"github.com/alibaba/ilogtail/pkg/protocol/otlp"
    18  )
    19  
    20  func attrsToLabels(labels *helper.MetricLabels, attrs []*v1Common.KeyValue) {
    21  	for _, attribute := range attrs {
    22  		labels.Append(attribute.GetKey(), anyValueToString(attribute.GetValue()))
    23  	}
    24  }
    25  
    26  func anyValueToString(value *v1Common.AnyValue) string {
    27  	switch value.Value.(type) {
    28  	case *v1Common.AnyValue_StringValue:
    29  		return value.GetStringValue()
    30  	case *v1Common.AnyValue_IntValue:
    31  		return strconv.FormatInt(value.GetIntValue(), 10)
    32  	case *v1Common.AnyValue_DoubleValue:
    33  		return strconv.FormatFloat(value.GetDoubleValue(), 'f', -1, 64)
    34  	case *v1Common.AnyValue_BoolValue:
    35  		return strconv.FormatBool(value.GetBoolValue())
    36  	case *v1Common.AnyValue_KvlistValue:
    37  		data, _ := json.Marshal(value.GetKvlistValue())
    38  		return string(data)
    39  	case *v1Common.AnyValue_ArrayValue:
    40  		data, _ := json.Marshal(value.GetArrayValue())
    41  		return string(data)
    42  	case *v1Common.AnyValue_BytesValue:
    43  		return base64.StdEncoding.EncodeToString(value.GetBytesValue())
    44  	}
    45  	return ""
    46  }
    47  
    48  func ConvertOtlpMetrics(metrics *v1.ResourceMetrics) (logs []*protocol.Log, err error) {
    49  	if metrics == nil || len(metrics.GetScopeMetrics()) == 0 {
    50  		return nil, nil
    51  	}
    52  
    53  	var labels helper.MetricLabels
    54  	attrsToLabels(&labels, metrics.GetResource().GetAttributes())
    55  
    56  	for _, scopeMetrics := range metrics.GetScopeMetrics() {
    57  
    58  		for _, metric := range scopeMetrics.GetMetrics() {
    59  			switch metric.GetData().(type) {
    60  			case *v1.Metric_Gauge:
    61  				logs = append(logs, gauge2Logs(metric.GetName(), metric.GetGauge(), &labels)...)
    62  			case *v1.Metric_Histogram:
    63  				logs = append(logs, histogram2Logs(metric.GetName(), metric.GetHistogram(), &labels)...)
    64  			case *v1.Metric_Sum:
    65  				logs = append(logs, sum2Logs(metric.GetName(), metric.GetSum(), &labels)...)
    66  			case *v1.Metric_Summary:
    67  				logs = append(logs, summary2Logs(metric.GetName(), metric.GetSummary(), &labels)...)
    68  			case *v1.Metric_ExponentialHistogram:
    69  				logs = append(logs, exponentialHistogram2Logs(metric.GetName(), metric.GetExponentialHistogram(), &labels)...)
    70  			}
    71  		}
    72  	}
    73  
    74  	return logs, err
    75  }
    76  
    77  func exponentialHistogram2Logs(name string, histogram *v1.ExponentialHistogram, defaultLabels *helper.MetricLabels) (logs []*protocol.Log) {
    78  
    79  	for _, dataPoint := range histogram.GetDataPoints() {
    80  		labels := defaultLabels.Clone()
    81  		attrsToLabels(labels, dataPoint.GetAttributes())
    82  		// labels.Append(otlp.TagKeyMetricAggregationTemporality, histogram.GetAggregationTemporality().String())
    83  		// labels.Append(otlp.TagKeyMetricHistogramType, pmetric.MetricTypeExponentialHistogram.String())
    84  
    85  		if dataPoint.GetSum() != 0 {
    86  			logs = append(logs, newMetricLogFromRaw(name+metricNameSuffixSum, labels, int64(dataPoint.GetTimeUnixNano()), dataPoint.GetSum()))
    87  		}
    88  		if dataPoint.GetMin() != 0 {
    89  			logs = append(logs, newMetricLogFromRaw(name+metricNameSuffixMin, labels, int64(dataPoint.GetTimeUnixNano()), dataPoint.GetMin()))
    90  		}
    91  		if dataPoint.GetMax() != 0 {
    92  			logs = append(logs, newMetricLogFromRaw(name+metricNameSuffixMax, labels, int64(dataPoint.GetTimeUnixNano()), dataPoint.GetMax()))
    93  		}
    94  		logs = append(logs, newMetricLogFromRaw(name+metricNameSuffixCount, labels, int64(dataPoint.GetTimeUnixNano()), float64(dataPoint.GetCount())))
    95  
    96  		for _, exemplar := range dataPoint.GetExemplars() {
    97  			logs = append(logs, exemplarMetricToLogs(name, exemplar, labels.Clone()))
    98  		}
    99  
   100  		scale := dataPoint.GetScale()
   101  		base := math.Pow(2, math.Pow(2, float64(-scale)))
   102  		postiveFields := genExponentialHistogramValuesToValues(true, base, dataPoint.GetPositive())
   103  		negativeFields := genExponentialHistogramValuesToValues(false, base, dataPoint.GetNegative())
   104  
   105  		bucketLabels := labels.Clone()
   106  		bucketLabels.Append(bucketLabelKey, "")
   107  		for k, v := range postiveFields {
   108  			bucketLabels.Replace(bucketLabelKey, k)
   109  			logs = append(logs, newMetricLogFromRaw(name+metricNameSuffixBucket, bucketLabels, int64(dataPoint.GetTimeUnixNano()), v))
   110  		}
   111  		bucketLabels.Replace(bucketLabelKey, otlp.FieldZeroCount)
   112  		logs = append(logs, newMetricLogFromRaw(name+metricNameSuffixBucket, bucketLabels, int64(dataPoint.GetTimeUnixNano()), float64(dataPoint.GetZeroCount())))
   113  		for k, v := range negativeFields {
   114  			bucketLabels.Replace(bucketLabelKey, k)
   115  			logs = append(logs, newMetricLogFromRaw(name+metricNameSuffixBucket, bucketLabels, int64(dataPoint.GetTimeUnixNano()), v))
   116  		}
   117  	}
   118  	return logs
   119  }
   120  
   121  func genExponentialHistogramValuesToValues(isPositive bool, base float64, buckets *v1.ExponentialHistogramDataPoint_Buckets) map[string]float64 {
   122  	offset := buckets.GetOffset()
   123  	rawbucketCounts := buckets.GetBucketCounts()
   124  	res := make(map[string]float64, len(rawbucketCounts))
   125  	for i := 0; i < len(rawbucketCounts); i++ {
   126  		bucketCount := rawbucketCounts[i]
   127  		lowerBoundary := math.Pow(base, float64(int(offset)+i))
   128  		upperBoundary := lowerBoundary * base
   129  		fieldKey := otlp.ComposeBucketFieldName(lowerBoundary, upperBoundary, isPositive)
   130  		res[fieldKey] = float64(bucketCount)
   131  	}
   132  	if isPositive {
   133  		res[otlp.FieldPositiveOffset] = float64(offset)
   134  	} else {
   135  		res[otlp.FieldNegativeOffset] = float64(offset)
   136  	}
   137  	return res
   138  }
   139  
   140  func summary2Logs(name string, summary *v1.Summary, defaultLabels *helper.MetricLabels) (logs []*protocol.Log) {
   141  
   142  	for _, dataPoint := range summary.GetDataPoints() {
   143  		labels := defaultLabels.Clone()
   144  
   145  		attrsToLabels(labels, dataPoint.GetAttributes())
   146  
   147  		logs = append(logs, newMetricLogFromRaw(name+metricNameSuffixSum, labels, int64(toTimestamp(dataPoint.GetTimeUnixNano())), dataPoint.GetSum()))
   148  		logs = append(logs, newMetricLogFromRaw(name+metricNameSuffixCount, labels, int64(toTimestamp(dataPoint.GetTimeUnixNano())), float64(dataPoint.GetCount())))
   149  
   150  		summaryLabels := labels.Clone()
   151  		summaryLabels.Append(summaryLabelKey, "")
   152  
   153  		for _, quantileValue := range dataPoint.GetQuantileValues() {
   154  			summaryLabels.Replace(summaryLabelKey, strconv.FormatFloat(quantileValue.GetQuantile(), 'g', -1, 64))
   155  			logs = append(logs, newMetricLogFromRaw(name, summaryLabels, int64(dataPoint.GetTimeUnixNano()), quantileValue.GetValue()))
   156  		}
   157  	}
   158  
   159  	return logs
   160  }
   161  
   162  func sum2Logs(name string, sum *v1.Sum, defaultLabels *helper.MetricLabels) (logs []*protocol.Log) {
   163  	for _, dataPoint := range sum.GetDataPoints() {
   164  		labels := defaultLabels.Clone()
   165  		attrsToLabels(labels, dataPoint.GetAttributes())
   166  
   167  		labels.Append(otlp.TagKeyMetricIsMonotonic, strconv.FormatBool(sum.GetIsMonotonic()))
   168  		// labels.Append(otlp.TagKeyMetricAggregationTemporality, sum.GetAggregationTemporality().String())
   169  
   170  		for _, exemplar := range dataPoint.GetExemplars() {
   171  			logs = append(logs, exemplarMetricToLogs(name, exemplar, labels.Clone()))
   172  		}
   173  
   174  		logs = append(logs, newMetricLogFromRaw(name, labels, int64(toTimestamp(dataPoint.TimeUnixNano)), value2Float(dataPoint)))
   175  	}
   176  
   177  	return logs
   178  }
   179  
   180  func histogram2Logs(name string, histogram *v1.Histogram, defaultLabels *helper.MetricLabels) (logs []*protocol.Log) {
   181  
   182  	for _, dataPoint := range histogram.GetDataPoints() {
   183  		labels := defaultLabels.Clone()
   184  
   185  		attrsToLabels(labels, dataPoint.GetAttributes())
   186  		// labels.Append(otlp.TagKeyMetricAggregationTemporality, histogram.GetAggregationTemporality().String())
   187  		// labels.Append(otlp.TagKeyMetricHistogramType, pmetric.MetricTypeHistogram.String())
   188  
   189  		if dataPoint.GetSum() != 0 {
   190  			logs = append(logs, newMetricLogFromRaw(name+metricNameSuffixSum, labels, int64(dataPoint.GetTimeUnixNano()), dataPoint.GetSum()))
   191  		}
   192  		if dataPoint.GetMin() != 0 {
   193  			logs = append(logs, newMetricLogFromRaw(name+metricNameSuffixMin, labels, int64(dataPoint.GetTimeUnixNano()), dataPoint.GetMin()))
   194  		}
   195  		if dataPoint.GetMax() != 0 {
   196  			logs = append(logs, newMetricLogFromRaw(name+metricNameSuffixMax, labels, int64(dataPoint.GetTimeUnixNano()), dataPoint.GetMax()))
   197  		}
   198  
   199  		logs = append(logs, newMetricLogFromRaw(name+metricNameSuffixCount, labels, int64(dataPoint.GetTimeUnixNano()), float64(dataPoint.GetCount())))
   200  
   201  		for _, exemplar := range dataPoint.GetExemplars() {
   202  			logs = append(logs, exemplarMetricToLogs(name, exemplar, labels.Clone()))
   203  		}
   204  
   205  		bounds := dataPoint.GetExplicitBounds()
   206  		boundsStr := make([]string, len(bounds)+1)
   207  
   208  		for i, bound := range bounds {
   209  			boundsStr[i] = strconv.FormatFloat(bound, 'g', -1, 64)
   210  		}
   211  		boundsStr[len(boundsStr)-1] = infinityBoundValue
   212  
   213  		bucketCount := min(len(boundsStr), len(dataPoint.GetBucketCounts()))
   214  
   215  		bucketLabels := labels.Clone()
   216  		bucketLabels.Append(bucketLabelKey, "")
   217  
   218  		sumCount := uint64(0)
   219  
   220  		for j := 0; j < bucketCount; j++ {
   221  			bucket := dataPoint.GetBucketCounts()[j]
   222  			bucketLabels.Replace(bucketLabelKey, boundsStr[j])
   223  			sumCount += bucket
   224  			logs = append(logs, newMetricLogFromRaw(name+metricNameSuffixBucket, bucketLabels, int64(dataPoint.GetTimeUnixNano()), float64(sumCount)))
   225  		}
   226  
   227  	}
   228  
   229  	return logs
   230  }
   231  
   232  func gauge2Logs(name string, gauga *v1.Gauge, labels *helper.MetricLabels) (logs []*protocol.Log) {
   233  	for _, dataPoint := range gauga.GetDataPoints() {
   234  
   235  		newLabels := labels.Clone()
   236  		attrsToLabels(newLabels, dataPoint.GetAttributes())
   237  
   238  		for _, exemplar := range dataPoint.GetExemplars() {
   239  			logs = append(logs, exemplarMetricToLogs(name, exemplar, newLabels.Clone()))
   240  		}
   241  
   242  		logs = append(logs, newMetricLogFromRaw(name, newLabels.Clone(), int64(toTimestamp(dataPoint.TimeUnixNano)), value2Float(dataPoint)))
   243  	}
   244  	return logs
   245  }
   246  
   247  func exemplarMetricToLogs(name string, exemplar *v1.Exemplar, labels *helper.MetricLabels) *protocol.Log {
   248  	metricName := name + metricNameSuffixExemplars
   249  
   250  	if hex.EncodeToString(exemplar.GetTraceId()) != "" {
   251  		labels.Append("traceId", hex.EncodeToString(exemplar.GetTraceId()))
   252  	}
   253  
   254  	if hex.EncodeToString(exemplar.GetSpanId()) != "" {
   255  		labels.Append("spanId", hex.EncodeToString(exemplar.GetSpanId()))
   256  	}
   257  
   258  	filterAttributeMap := pcommon.NewMap()
   259  	copyAttributeToMap(exemplar.GetFilteredAttributes(), &filterAttributeMap)
   260  
   261  	for key, value := range filterAttributeMap.AsRaw() {
   262  		labels.Append(key, fmt.Sprintf("%v", value))
   263  	}
   264  
   265  	return helper.NewMetricLog(metricName, int64(exemplar.TimeUnixNano), exemplarValue2Float(exemplar), labels)
   266  }
   267  
   268  func copyAttributeToMap(attributes []*v1Common.KeyValue, attributeMap *pcommon.Map) {
   269  	for _, attribute := range attributes {
   270  		attributeMap.PutStr(attribute.GetKey(), anyValueToString(attribute.GetValue()))
   271  	}
   272  }
   273  
   274  func exemplarValue2Float(exemplar *v1.Exemplar) float64 {
   275  	switch exemplar.GetValue().(type) {
   276  	case *v1.Exemplar_AsDouble:
   277  		return exemplar.GetAsDouble()
   278  	case *v1.Exemplar_AsInt:
   279  		return float64(exemplar.GetAsInt())
   280  	default:
   281  		return 0
   282  	}
   283  }
   284  
   285  func value2Float(dataPoint *v1.NumberDataPoint) float64 {
   286  	switch dataPoint.GetValue().(type) {
   287  	case *v1.NumberDataPoint_AsDouble:
   288  		return dataPoint.GetAsDouble()
   289  	case *v1.NumberDataPoint_AsInt:
   290  		return float64(dataPoint.GetAsInt())
   291  	default:
   292  		return 0
   293  	}
   294  }
   295  
   296  func toTimestamp(timUnixNano uint64) pcommon.Timestamp {
   297  	return pcommon.Timestamp(timUnixNano)
   298  }