github.com/alibaba/ilogtail/pkg@v0.0.0-20250526110833-c53b480d046c/protocol/decoder/opentelemetry/otlpDataToPielineGroupEvents.go (about)

     1  package opentelemetry
     2  
     3  import (
     4  	"fmt"
     5  	"math"
     6  	"strconv"
     7  	"time"
     8  
     9  	"go.opentelemetry.io/collector/pdata/pcommon"
    10  	"go.opentelemetry.io/collector/pdata/plog"
    11  	"go.opentelemetry.io/collector/pdata/plog/plogotlp"
    12  	"go.opentelemetry.io/collector/pdata/pmetric"
    13  	"go.opentelemetry.io/collector/pdata/pmetric/pmetricotlp"
    14  	"go.opentelemetry.io/collector/pdata/ptrace"
    15  	"go.opentelemetry.io/collector/pdata/ptrace/ptraceotlp"
    16  
    17  	"github.com/alibaba/ilogtail/pkg/models"
    18  	"github.com/alibaba/ilogtail/pkg/protocol/otlp"
    19  	"github.com/alibaba/ilogtail/pkg/util"
    20  )
    21  
    22  func genScopeTags(scope pcommon.InstrumentationScope) models.Tags {
    23  	scopeTags := attrs2Tags(scope.Attributes())
    24  	if scope.Name() != "" {
    25  		scopeTags.Add(otlp.TagKeyScopeName, scope.Name())
    26  	}
    27  
    28  	if scope.Version() != "" {
    29  		scopeTags.Add(otlp.TagKeyScopeVersion, scope.Version())
    30  	}
    31  
    32  	scopeTags.Add(otlp.TagKeyScopeDroppedAttributesCount, strconv.Itoa(int(scope.DroppedAttributesCount())))
    33  	return scopeTags
    34  }
    35  
    36  func convertSpanLinks(srcLinks ptrace.SpanLinkSlice) []*models.SpanLink {
    37  	spanLinks := make([]*models.SpanLink, 0, srcLinks.Len())
    38  
    39  	for i := 0; i < srcLinks.Len(); i++ {
    40  		srcLink := srcLinks.At(i)
    41  
    42  		spanLink := models.SpanLink{
    43  			TraceID:    srcLink.TraceID().String(),
    44  			SpanID:     srcLink.SpanID().String(),
    45  			TraceState: srcLink.TraceState().AsRaw(),
    46  			Tags:       attrs2Tags(srcLink.Attributes()),
    47  		}
    48  		spanLinks = append(spanLinks, &spanLink)
    49  	}
    50  	return spanLinks
    51  }
    52  
    53  func convertSpanEvents(srcEvents ptrace.SpanEventSlice) []*models.SpanEvent {
    54  	events := make([]*models.SpanEvent, 0, srcEvents.Len())
    55  	for i := 0; i < srcEvents.Len(); i++ {
    56  		srcEvent := srcEvents.At(i)
    57  
    58  		spanEvent := &models.SpanEvent{
    59  			Name:      srcEvent.Name(),
    60  			Timestamp: int64(srcEvent.Timestamp()),
    61  			Tags:      attrs2Tags(srcEvent.Attributes()),
    62  		}
    63  		events = append(events, spanEvent)
    64  	}
    65  	return events
    66  }
    67  
    68  // attrs2Tags converts otlp attributes to tags, it skips empty values.
    69  func attrs2Tags(attributes pcommon.Map) models.Tags {
    70  	tags := models.NewTags()
    71  
    72  	attributes.Range(
    73  		func(k string, v pcommon.Value) bool {
    74  			if tagValue := v.AsString(); tagValue != "" {
    75  				tags.Add(k, tagValue)
    76  			}
    77  			return true
    78  		},
    79  	)
    80  	return tags
    81  }
    82  
    83  // attrs2Tags converts otlp attributes to metadata, it skips empty values.
    84  func attrs2Meta(attributes pcommon.Map) models.Metadata {
    85  	meta := models.NewMetadata()
    86  
    87  	attributes.Range(
    88  		func(k string, v pcommon.Value) bool {
    89  			if tagValue := v.AsString(); tagValue != "" {
    90  				meta.Add(k, tagValue)
    91  			}
    92  			return true
    93  		},
    94  	)
    95  	return meta
    96  }
    97  
    98  func getValue(intValue int64, doubleValue float64) float64 {
    99  	if intValue != 0 {
   100  		return float64(intValue)
   101  	}
   102  
   103  	return doubleValue
   104  }
   105  
   106  func newMetricFromGaugeDatapoint(datapoint pmetric.NumberDataPoint, metricName, metricUnit, metricDescription string) *models.Metric {
   107  	timestamp := int64(datapoint.Timestamp())
   108  	startTimestamp := datapoint.StartTimestamp()
   109  	tags := attrs2Tags(datapoint.Attributes())
   110  
   111  	value := getValue(datapoint.IntValue(), datapoint.DoubleValue())
   112  	metric := models.NewSingleValueMetric(metricName, models.MetricTypeGauge, tags, timestamp, value)
   113  	metric.Unit = metricUnit
   114  	metric.Description = metricDescription
   115  	metric.SetObservedTimestamp(uint64(startTimestamp))
   116  	return metric
   117  }
   118  
   119  func newMetricFromSumDatapoint(datapoint pmetric.NumberDataPoint, aggregationTemporality pmetric.AggregationTemporality, isMonotonic, metricName, metricUnit, metricDescription string) *models.Metric {
   120  	timestamp := int64(datapoint.Timestamp())
   121  	startTimestamp := datapoint.StartTimestamp()
   122  	tags := attrs2Tags(datapoint.Attributes())
   123  	tags.Add(otlp.TagKeyMetricIsMonotonic, isMonotonic)
   124  	tags.Add(otlp.TagKeyMetricAggregationTemporality, aggregationTemporality.String())
   125  
   126  	value := getValue(datapoint.IntValue(), datapoint.DoubleValue())
   127  	var metric *models.Metric
   128  	if aggregationTemporality == pmetric.AggregationTemporalityCumulative {
   129  		metric = models.NewSingleValueMetric(metricName, models.MetricTypeCounter, tags, timestamp, value)
   130  	} else {
   131  		metric = models.NewSingleValueMetric(metricName, models.MetricTypeRateCounter, tags, timestamp, value)
   132  	}
   133  	metric.Unit = metricUnit
   134  	metric.Description = metricDescription
   135  	metric.SetObservedTimestamp(uint64(startTimestamp))
   136  	return metric
   137  }
   138  
   139  func newMetricFromSummaryDatapoint(datapoint pmetric.SummaryDataPoint, metricName, metricUnit, metricDescription string) *models.Metric {
   140  	timestamp := int64(datapoint.Timestamp())
   141  	startTimestamp := datapoint.StartTimestamp()
   142  	tags := attrs2Tags(datapoint.Attributes())
   143  
   144  	multivalue := models.NewMetricMultiValue()
   145  	summaryValues := datapoint.QuantileValues()
   146  	for m := 0; m < summaryValues.Len(); m++ {
   147  		summaryValue := summaryValues.At(m)
   148  		quantile := summaryValue.Quantile()
   149  		value := summaryValue.Value()
   150  		multivalue.Add(strconv.FormatFloat(quantile, 'f', -1, 64), value)
   151  	}
   152  
   153  	multivalue.Add(otlp.FieldCount, float64(datapoint.Count()))
   154  	multivalue.Add(otlp.FieldSum, datapoint.Sum())
   155  
   156  	metric := models.NewMultiValuesMetric(metricName, models.MetricTypeSummary, tags, timestamp, multivalue.GetMultiValues())
   157  	metric.Unit = metricUnit
   158  	metric.Description = metricDescription
   159  	metric.SetObservedTimestamp(uint64(startTimestamp))
   160  	return metric
   161  }
   162  
   163  func newMetricFromHistogramDatapoint(datapoint pmetric.HistogramDataPoint, aggregationTemporality pmetric.AggregationTemporality, metricName, metricUnit, metricDescription string) *models.Metric {
   164  	timestamp := int64(datapoint.Timestamp())
   165  	startTimestamp := datapoint.StartTimestamp()
   166  
   167  	tags := attrs2Tags(datapoint.Attributes())
   168  	tags.Add(otlp.TagKeyMetricAggregationTemporality, aggregationTemporality.String())
   169  	tags.Add(otlp.TagKeyMetricHistogramType, pmetric.MetricTypeHistogram.String())
   170  
   171  	// TODO:
   172  	// handle datapoint's Exemplars, Flags
   173  	multivalue := models.NewMetricMultiValue()
   174  	multivalue.Add(otlp.FieldCount, float64(datapoint.Count()))
   175  
   176  	if datapoint.HasSum() {
   177  		multivalue.Add(otlp.FieldSum, datapoint.Sum())
   178  	}
   179  	if datapoint.HasMin() {
   180  		multivalue.Add(otlp.FieldMin, datapoint.Min())
   181  	}
   182  
   183  	if datapoint.HasMax() {
   184  		multivalue.Add(otlp.FieldMax, datapoint.Max())
   185  	}
   186  
   187  	bucketCounts, explicitBounds := datapoint.BucketCounts(), datapoint.ExplicitBounds()
   188  
   189  	// bucketsCounts can be 0, otherwise, #bucketCounts == #bounds + 1.
   190  	if bucketCounts.Len() == 0 || bucketCounts.Len() != explicitBounds.Len()+1 {
   191  		metric := models.NewMultiValuesMetric(metricName, models.MetricTypeHistogram, tags, timestamp, multivalue.GetMultiValues())
   192  		metric.Unit = metricUnit
   193  		metric.SetObservedTimestamp(uint64(startTimestamp))
   194  		return metric
   195  	}
   196  
   197  	// for bucket bounds: [0, 5, 10], there are 4 bucket counts: (-inf, 0], (0, 5], (5, 10], (10, +inf]
   198  	for m := 0; m < bucketCounts.Len(); m++ {
   199  		lowerBound := math.Inf(-1)
   200  		upperBound := math.Inf(+1)
   201  		if m > 0 {
   202  			lowerBound = explicitBounds.At(m - 1)
   203  		}
   204  		if m < explicitBounds.Len() {
   205  			upperBound = explicitBounds.At(m)
   206  		}
   207  		fieldName := otlp.ComposeBucketFieldName(lowerBound, upperBound, true)
   208  
   209  		count := bucketCounts.At(m)
   210  		multivalue.Add(fieldName, float64(count))
   211  	}
   212  
   213  	metric := models.NewMultiValuesMetric(metricName, models.MetricTypeHistogram, tags, timestamp, multivalue.GetMultiValues())
   214  	metric.Unit = metricUnit
   215  	metric.Description = metricDescription
   216  	metric.SetObservedTimestamp(uint64(startTimestamp))
   217  	return metric
   218  }
   219  
   220  // Note that ExponentialHistogram is still in experimental status.
   221  // https://opentelemetry.io/docs/reference/specification/metrics/data-model/#exponentialhistogram
   222  func newMetricFromExponentialHistogramDatapoint(datapoint pmetric.ExponentialHistogramDataPoint, aggregationTemporality pmetric.AggregationTemporality, metricName, metricUnit, metricDescription string) *models.Metric {
   223  	timestamp := int64(datapoint.Timestamp())
   224  	startTimestamp := datapoint.StartTimestamp()
   225  
   226  	tags := attrs2Tags(datapoint.Attributes())
   227  	tags.Add(otlp.TagKeyMetricAggregationTemporality, aggregationTemporality.String())
   228  	tags.Add(otlp.TagKeyMetricHistogramType, pmetric.MetricTypeExponentialHistogram.String())
   229  
   230  	// TODO:
   231  	// handle datapoint's Exemplars, Flags
   232  	multivalue := models.NewMetricMultiValue()
   233  	multivalue.Add(otlp.FieldCount, float64(datapoint.Count()))
   234  
   235  	if datapoint.HasSum() {
   236  		multivalue.Add(otlp.FieldSum, datapoint.Sum())
   237  	}
   238  	if datapoint.HasMin() {
   239  		multivalue.Add(otlp.FieldMin, datapoint.Min())
   240  	}
   241  
   242  	if datapoint.HasMax() {
   243  		multivalue.Add(otlp.FieldMax, datapoint.Max())
   244  	}
   245  
   246  	scale := datapoint.Scale()
   247  	base := math.Pow(2, math.Pow(2, float64(-scale)))
   248  	multivalue.Values.Add(otlp.FieldScale, float64(scale)) // store scale in multivalues
   249  
   250  	// For example, when scale is 3, base is 2 ** (2 ** -3) = 2 ** (1/8).
   251  	// The negative bucket bounds look like:
   252  	// ...., [-2 ** (2/8),  -2 ** (1/8)), [-2 ** (1/8), -2 ** (0/8)),
   253  	// The positive bucket bounds look like:
   254  	// (2 ** (0/8), 2 ** (1/8)], (2 ** (1/8), 2 ** (2/8)], ....
   255  	// Meanwhile, there is also a zero bucket.
   256  	postiveFields := genExponentialHistogramValues(true, base, datapoint.Positive())
   257  	negativeFields := genExponentialHistogramValues(false, base, datapoint.Negative())
   258  
   259  	multivalue.Values.AddAll(postiveFields)
   260  	multivalue.Values.AddAll(negativeFields)
   261  
   262  	multivalue.Add(otlp.FieldZeroCount, float64(datapoint.ZeroCount()))
   263  
   264  	metric := models.NewMultiValuesMetric(metricName, models.MetricTypeHistogram, tags, timestamp, multivalue.GetMultiValues())
   265  	metric.Unit = metricUnit
   266  	metric.Description = metricDescription
   267  	metric.SetObservedTimestamp(uint64(startTimestamp))
   268  	return metric
   269  }
   270  
   271  func genExponentialHistogramValues(isPositive bool, base float64, buckets pmetric.ExponentialHistogramDataPointBuckets) map[string]float64 {
   272  	offset := buckets.Offset()
   273  	rawbucketCounts := buckets.BucketCounts()
   274  	res := make(map[string]float64, rawbucketCounts.Len())
   275  	for i := 0; i < rawbucketCounts.Len(); i++ {
   276  		bucketCount := rawbucketCounts.At(i)
   277  		lowerBoundary := math.Pow(base, float64(int(offset)+i))
   278  		upperBoundary := lowerBoundary * base
   279  		fieldKey := otlp.ComposeBucketFieldName(lowerBoundary, upperBoundary, isPositive)
   280  		res[fieldKey] = float64(bucketCount)
   281  	}
   282  	if isPositive {
   283  		res[otlp.FieldPositiveOffset] = float64(offset)
   284  	} else {
   285  		res[otlp.FieldNegativeOffset] = float64(offset)
   286  	}
   287  	return res
   288  }
   289  
   290  func ConvertOtlpLogRequestToGroupEvents(otlpLogReq plogotlp.ExportRequest) ([]*models.PipelineGroupEvents, error) {
   291  	return ConvertOtlpLogsToGroupEvents(otlpLogReq.Logs())
   292  }
   293  
   294  func ConvertOtlpMetricRequestToGroupEvents(otlpMetricReq pmetricotlp.ExportRequest) ([]*models.PipelineGroupEvents, error) {
   295  	return ConvertOtlpMetricsToGroupEvents(otlpMetricReq.Metrics())
   296  }
   297  
   298  func ConvertOtlpTraceRequestToGroupEvents(otlpTraceReq ptraceotlp.ExportRequest) ([]*models.PipelineGroupEvents, error) {
   299  	return ConvertOtlpTracesToGroupEvents(otlpTraceReq.Traces())
   300  }
   301  
   302  func ConvertOtlpLogsToGroupEvents(logs plog.Logs) (groupEventsSlice []*models.PipelineGroupEvents, err error) {
   303  	resLogs := logs.ResourceLogs()
   304  	resLogsLen := resLogs.Len()
   305  
   306  	if resLogsLen == 0 {
   307  		return
   308  	}
   309  
   310  	for i := 0; i < resLogsLen; i++ {
   311  		resourceLog := resLogs.At(i)
   312  		resourceAttrs := resourceLog.Resource().Attributes()
   313  		scopeLogs := resourceLog.ScopeLogs()
   314  		scopeLogsLen := scopeLogs.Len()
   315  
   316  		for j := 0; j < scopeLogsLen; j++ {
   317  			scopeLog := scopeLogs.At(j)
   318  			scope := scopeLog.Scope()
   319  			scopeTags := genScopeTags(scope)
   320  			otLogs := scopeLog.LogRecords()
   321  			otLogsLen := otLogs.Len()
   322  
   323  			groupEvents := &models.PipelineGroupEvents{
   324  				Group:  models.NewGroup(attrs2Meta(resourceAttrs), scopeTags),
   325  				Events: make([]models.PipelineEvent, 0, otLogs.Len()),
   326  			}
   327  
   328  			for k := 0; k < otLogsLen; k++ {
   329  				logRecord := otLogs.At(k)
   330  
   331  				var body []byte
   332  				switch logRecord.Body().Type() {
   333  				case pcommon.ValueTypeBytes:
   334  					body = logRecord.Body().Bytes().AsRaw()
   335  				case pcommon.ValueTypeStr:
   336  					body = util.ZeroCopyStringToBytes(logRecord.Body().AsString())
   337  				default:
   338  					body = util.ZeroCopyStringToBytes(fmt.Sprintf("%#v", logRecord.Body().AsRaw()))
   339  				}
   340  
   341  				level := logRecord.SeverityText()
   342  				if level == "" {
   343  					level = logRecord.SeverityNumber().String()
   344  				}
   345  
   346  				event := models.NewLog(
   347  					logEventName,
   348  					body,
   349  					level,
   350  					logRecord.SpanID().String(),
   351  					logRecord.TraceID().String(),
   352  					attrs2Tags(logRecord.Attributes()),
   353  					uint64(logRecord.Timestamp().AsTime().UnixNano()),
   354  				)
   355  				event.ObservedTimestamp = uint64(logRecord.ObservedTimestamp().AsTime().UnixNano())
   356  				event.Tags.Add(otlp.TagKeyLogFlag, strconv.Itoa(int(logRecord.Flags())))
   357  				groupEvents.Events = append(groupEvents.Events, event)
   358  			}
   359  
   360  			groupEventsSlice = append(groupEventsSlice, groupEvents)
   361  		}
   362  	}
   363  
   364  	return groupEventsSlice, err
   365  }
   366  
   367  func ConvertOtlpMetricsToGroupEvents(metrics pmetric.Metrics) (groupEventsSlice []*models.PipelineGroupEvents, err error) {
   368  	resMetrics := metrics.ResourceMetrics()
   369  	if resMetrics.Len() == 0 {
   370  		return
   371  	}
   372  
   373  	for i := 0; i < resMetrics.Len(); i++ {
   374  		resourceMetric := resMetrics.At(i)
   375  		resourceAttrs := resourceMetric.Resource().Attributes()
   376  		scopeMetrics := resourceMetric.ScopeMetrics()
   377  
   378  		for j := 0; j < scopeMetrics.Len(); j++ {
   379  			scopeMetric := scopeMetrics.At(j)
   380  			scope := scopeMetric.Scope()
   381  			scopeTags := genScopeTags(scope)
   382  			otMetrics := scopeMetric.Metrics()
   383  
   384  			groupEvents := &models.PipelineGroupEvents{
   385  				Group:  models.NewGroup(attrs2Meta(resourceAttrs), scopeTags),
   386  				Events: make([]models.PipelineEvent, 0, otMetrics.Len()),
   387  			}
   388  
   389  			for k := 0; k < otMetrics.Len(); k++ {
   390  				otMetric := otMetrics.At(k)
   391  				metricName := otMetric.Name()
   392  				metricUnit := otMetric.Unit()
   393  				metricDescription := otMetric.Description()
   394  
   395  				switch otMetric.Type() {
   396  				case pmetric.MetricTypeGauge:
   397  					otGauge := otMetric.Gauge()
   398  					otDatapoints := otGauge.DataPoints()
   399  					for l := 0; l < otDatapoints.Len(); l++ {
   400  						datapoint := otDatapoints.At(l)
   401  						metric := newMetricFromGaugeDatapoint(datapoint, metricName, metricUnit, metricDescription)
   402  						groupEvents.Events = append(groupEvents.Events, metric)
   403  					}
   404  				case pmetric.MetricTypeSum:
   405  					otSum := otMetric.Sum()
   406  					isMonotonic := strconv.FormatBool(otSum.IsMonotonic())
   407  					aggregationTemporality := otSum.AggregationTemporality()
   408  					otDatapoints := otSum.DataPoints()
   409  
   410  					for l := 0; l < otDatapoints.Len(); l++ {
   411  						datapoint := otDatapoints.At(l)
   412  						metric := newMetricFromSumDatapoint(datapoint, aggregationTemporality, isMonotonic, metricName, metricUnit, metricDescription)
   413  						groupEvents.Events = append(groupEvents.Events, metric)
   414  					}
   415  				case pmetric.MetricTypeSummary:
   416  					otSummary := otMetric.Summary()
   417  					otDatapoints := otSummary.DataPoints()
   418  					for l := 0; l < otDatapoints.Len(); l++ {
   419  						datapoint := otDatapoints.At(l)
   420  						metric := newMetricFromSummaryDatapoint(datapoint, metricName, metricUnit, metricDescription)
   421  						groupEvents.Events = append(groupEvents.Events, metric)
   422  					}
   423  				case pmetric.MetricTypeHistogram:
   424  					otHistogram := otMetric.Histogram()
   425  					aggregationTemporality := otHistogram.AggregationTemporality()
   426  					otDatapoints := otHistogram.DataPoints()
   427  
   428  					for l := 0; l < otDatapoints.Len(); l++ {
   429  						datapoint := otDatapoints.At(l)
   430  						metric := newMetricFromHistogramDatapoint(datapoint, aggregationTemporality, metricName, metricUnit, metricDescription)
   431  						groupEvents.Events = append(groupEvents.Events, metric)
   432  					}
   433  				case pmetric.MetricTypeExponentialHistogram:
   434  					otExponentialHistogram := otMetric.ExponentialHistogram()
   435  					aggregationTemporality := otExponentialHistogram.AggregationTemporality()
   436  					otDatapoints := otExponentialHistogram.DataPoints()
   437  
   438  					for l := 0; l < otDatapoints.Len(); l++ {
   439  						datapoint := otDatapoints.At(l)
   440  						metric := newMetricFromExponentialHistogramDatapoint(datapoint, aggregationTemporality, metricName, metricUnit, metricDescription)
   441  						groupEvents.Events = append(groupEvents.Events, metric)
   442  					}
   443  				default:
   444  					// TODO:
   445  					// find a better way to handle metric with type MetricTypeEmpty.
   446  					metric := models.NewSingleValueMetric(metricName, models.MetricTypeUntyped, models.NewTags(), time.Now().Unix(), 0)
   447  					metric.Unit = metricUnit
   448  					metric.Description = otMetric.Description()
   449  					groupEvents.Events = append(groupEvents.Events, metric)
   450  				}
   451  
   452  			}
   453  			groupEventsSlice = append(groupEventsSlice, groupEvents)
   454  		}
   455  	}
   456  	return groupEventsSlice, err
   457  }
   458  
   459  func ConvertOtlpTracesToGroupEvents(traces ptrace.Traces) (groupEventsSlice []*models.PipelineGroupEvents, err error) {
   460  	resSpans := traces.ResourceSpans()
   461  
   462  	for i := 0; i < resSpans.Len(); i++ {
   463  		resourceSpan := resSpans.At(i)
   464  		// A Resource is an immutable representation of the entity producing telemetry as Attributes.
   465  		// These attributes should be stored in meta.
   466  		resourceAttrs := resourceSpan.Resource().Attributes()
   467  		scopeSpans := resourceSpan.ScopeSpans()
   468  
   469  		for j := 0; j < scopeSpans.Len(); j++ {
   470  			scopeSpan := scopeSpans.At(j)
   471  			scope := scopeSpan.Scope()
   472  			scopeTags := genScopeTags(scope)
   473  			otSpans := scopeSpan.Spans()
   474  
   475  			groupEvents := &models.PipelineGroupEvents{
   476  				Group:  models.NewGroup(attrs2Meta(resourceAttrs), scopeTags),
   477  				Events: make([]models.PipelineEvent, 0, otSpans.Len()),
   478  			}
   479  
   480  			for k := 0; k < otSpans.Len(); k++ {
   481  				otSpan := otSpans.At(k)
   482  
   483  				spanName := otSpan.Name()
   484  				traceID := otSpan.TraceID().String()
   485  				spanID := otSpan.SpanID().String()
   486  				spanKind := models.SpanKind(otSpan.Kind())
   487  				startTs := otSpan.StartTimestamp()
   488  				endTs := otSpan.EndTimestamp()
   489  				spanAttrs := otSpan.Attributes()
   490  				events := convertSpanEvents(otSpan.Events())
   491  				links := convertSpanLinks(otSpan.Links())
   492  
   493  				span := models.NewSpan(spanName, traceID, spanID, spanKind,
   494  					uint64(startTs), uint64(endTs), attrs2Tags(spanAttrs), events, links)
   495  
   496  				span.ParentSpanID = otSpan.ParentSpanID().String()
   497  				span.Status = models.StatusCode(otSpan.Status().Code())
   498  
   499  				if message := otSpan.Status().Message(); len(message) > 0 {
   500  					span.Tags.Add(otlp.TagKeySpanStatusMessage, message)
   501  				}
   502  
   503  				span.Tags.Add(otlp.TagKeySpanDroppedEventsCount, strconv.Itoa(int(otSpan.DroppedEventsCount())))
   504  				span.Tags.Add(otlp.TagKeySpanDroppedAttrsCount, strconv.Itoa(int(otSpan.DroppedAttributesCount())))
   505  				span.Tags.Add(otlp.TagKeySpanDroppedLinksCount, strconv.Itoa(int(otSpan.DroppedLinksCount())))
   506  
   507  				groupEvents.Events = append(groupEvents.Events, span)
   508  			}
   509  			groupEventsSlice = append(groupEventsSlice, groupEvents)
   510  		}
   511  
   512  	}
   513  	return groupEventsSlice, err
   514  }