github.com/thanos-io/thanos@v0.32.5/internal/cortex/cortexpb/timeseries.go (about) 1 // Copyright (c) The Cortex Authors. 2 // Licensed under the Apache License 2.0. 3 4 package cortexpb 5 6 import ( 7 "flag" 8 "fmt" 9 "io" 10 "strings" 11 "sync" 12 "unsafe" 13 14 "github.com/prometheus/prometheus/model/labels" 15 ) 16 17 var ( 18 expectedTimeseries = 100 19 expectedLabels = 20 20 expectedSamplesPerSeries = 10 21 expectedExemplarsPerSeries = 1 22 23 /* 24 We cannot pool these as pointer-to-slice because the place we use them is in WriteRequest which is generated from Protobuf 25 and we don't have an option to make it a pointer. There is overhead here 24 bytes of garbage every time a PreallocTimeseries 26 is re-used. But since the slices are far far larger, we come out ahead. 27 */ 28 slicePool = sync.Pool{ 29 New: func() interface{} { 30 return make([]PreallocTimeseries, 0, expectedTimeseries) 31 }, 32 } 33 34 timeSeriesPool = sync.Pool{ 35 New: func() interface{} { 36 return &TimeSeries{ 37 Labels: make([]LabelAdapter, 0, expectedLabels), 38 Samples: make([]Sample, 0, expectedSamplesPerSeries), 39 Exemplars: make([]Exemplar, 0, expectedExemplarsPerSeries), 40 } 41 }, 42 } 43 ) 44 45 // PreallocConfig configures how structures will be preallocated to optimise 46 // proto unmarshalling. 47 type PreallocConfig struct{} 48 49 // RegisterFlags registers configuration settings. 50 func (PreallocConfig) RegisterFlags(f *flag.FlagSet) { 51 f.IntVar(&expectedTimeseries, "ingester-client.expected-timeseries", expectedTimeseries, "Expected number of timeseries per request, used for preallocations.") 52 f.IntVar(&expectedLabels, "ingester-client.expected-labels", expectedLabels, "Expected number of labels per timeseries, used for preallocations.") 53 f.IntVar(&expectedSamplesPerSeries, "ingester-client.expected-samples-per-series", expectedSamplesPerSeries, "Expected number of samples per timeseries, used for preallocations.") 54 } 55 56 // PreallocWriteRequest is a WriteRequest which preallocs slices on Unmarshal. 57 type PreallocWriteRequest struct { 58 WriteRequest 59 } 60 61 // Unmarshal implements proto.Message. 62 func (p *PreallocWriteRequest) Unmarshal(dAtA []byte) error { 63 p.Timeseries = PreallocTimeseriesSliceFromPool() 64 return p.WriteRequest.Unmarshal(dAtA) 65 } 66 67 // PreallocTimeseries is a TimeSeries which preallocs slices on Unmarshal. 68 type PreallocTimeseries struct { 69 *TimeSeries 70 } 71 72 // Unmarshal implements proto.Message. 73 func (p *PreallocTimeseries) Unmarshal(dAtA []byte) error { 74 p.TimeSeries = TimeseriesFromPool() 75 return p.TimeSeries.Unmarshal(dAtA) 76 } 77 78 // LabelAdapter is a labels.Label that can be marshalled to/from protos. 79 type LabelAdapter labels.Label 80 81 // Marshal implements proto.Marshaller. 82 func (bs *LabelAdapter) Marshal() ([]byte, error) { 83 size := bs.Size() 84 buf := make([]byte, size) 85 n, err := bs.MarshalToSizedBuffer(buf[:size]) 86 if err != nil { 87 return nil, err 88 } 89 return buf[:n], err 90 } 91 92 func (bs *LabelAdapter) MarshalTo(dAtA []byte) (int, error) { 93 size := bs.Size() 94 return bs.MarshalToSizedBuffer(dAtA[:size]) 95 } 96 97 // MarshalTo implements proto.Marshaller. 98 func (bs *LabelAdapter) MarshalToSizedBuffer(buf []byte) (n int, err error) { 99 ls := (*labels.Label)(bs) 100 i := len(buf) 101 if len(ls.Value) > 0 { 102 i -= len(ls.Value) 103 copy(buf[i:], ls.Value) 104 i = encodeVarintCortex(buf, i, uint64(len(ls.Value))) 105 i-- 106 buf[i] = 0x12 107 } 108 if len(ls.Name) > 0 { 109 i -= len(ls.Name) 110 copy(buf[i:], ls.Name) 111 i = encodeVarintCortex(buf, i, uint64(len(ls.Name))) 112 i-- 113 buf[i] = 0xa 114 } 115 return len(buf) - i, nil 116 } 117 118 // Unmarshal a LabelAdapter, implements proto.Unmarshaller. 119 // NB this is a copy of the autogenerated code to unmarshal a LabelPair, 120 // with the byte copying replaced with a yoloString. 121 func (bs *LabelAdapter) Unmarshal(dAtA []byte) error { 122 l := len(dAtA) 123 iNdEx := 0 124 for iNdEx < l { 125 preIndex := iNdEx 126 var wire uint64 127 for shift := uint(0); ; shift += 7 { 128 if shift >= 64 { 129 return ErrIntOverflowCortex 130 } 131 if iNdEx >= l { 132 return io.ErrUnexpectedEOF 133 } 134 b := dAtA[iNdEx] 135 iNdEx++ 136 wire |= uint64(b&0x7F) << shift 137 if b < 0x80 { 138 break 139 } 140 } 141 fieldNum := int32(wire >> 3) 142 wireType := int(wire & 0x7) 143 if wireType == 4 { 144 return fmt.Errorf("proto: LabelPair: wiretype end group for non-group") 145 } 146 if fieldNum <= 0 { 147 return fmt.Errorf("proto: LabelPair: illegal tag %d (wire type %d)", fieldNum, wire) 148 } 149 switch fieldNum { 150 case 1: 151 if wireType != 2 { 152 return fmt.Errorf("proto: wrong wireType = %d for field Name", wireType) 153 } 154 var byteLen int 155 for shift := uint(0); ; shift += 7 { 156 if shift >= 64 { 157 return ErrIntOverflowCortex 158 } 159 if iNdEx >= l { 160 return io.ErrUnexpectedEOF 161 } 162 b := dAtA[iNdEx] 163 iNdEx++ 164 byteLen |= int(b&0x7F) << shift 165 if b < 0x80 { 166 break 167 } 168 } 169 if byteLen < 0 { 170 return ErrInvalidLengthCortex 171 } 172 postIndex := iNdEx + byteLen 173 if postIndex < 0 { 174 return ErrInvalidLengthCortex 175 } 176 if postIndex > l { 177 return io.ErrUnexpectedEOF 178 } 179 bs.Name = yoloString(dAtA[iNdEx:postIndex]) 180 iNdEx = postIndex 181 case 2: 182 if wireType != 2 { 183 return fmt.Errorf("proto: wrong wireType = %d for field Value", wireType) 184 } 185 var byteLen int 186 for shift := uint(0); ; shift += 7 { 187 if shift >= 64 { 188 return ErrIntOverflowCortex 189 } 190 if iNdEx >= l { 191 return io.ErrUnexpectedEOF 192 } 193 b := dAtA[iNdEx] 194 iNdEx++ 195 byteLen |= int(b&0x7F) << shift 196 if b < 0x80 { 197 break 198 } 199 } 200 if byteLen < 0 { 201 return ErrInvalidLengthCortex 202 } 203 postIndex := iNdEx + byteLen 204 if postIndex < 0 { 205 return ErrInvalidLengthCortex 206 } 207 if postIndex > l { 208 return io.ErrUnexpectedEOF 209 } 210 bs.Value = yoloString(dAtA[iNdEx:postIndex]) 211 iNdEx = postIndex 212 default: 213 iNdEx = preIndex 214 skippy, err := skipCortex(dAtA[iNdEx:]) 215 if err != nil { 216 return err 217 } 218 if skippy < 0 { 219 return ErrInvalidLengthCortex 220 } 221 if (iNdEx + skippy) < 0 { 222 return ErrInvalidLengthCortex 223 } 224 if (iNdEx + skippy) > l { 225 return io.ErrUnexpectedEOF 226 } 227 iNdEx += skippy 228 } 229 } 230 231 if iNdEx > l { 232 return io.ErrUnexpectedEOF 233 } 234 return nil 235 } 236 237 func yoloString(buf []byte) string { 238 return *((*string)(unsafe.Pointer(&buf))) 239 } 240 241 // Size implements proto.Sizer. 242 func (bs *LabelAdapter) Size() (n int) { 243 ls := (*labels.Label)(bs) 244 if bs == nil { 245 return 0 246 } 247 var l int 248 _ = l 249 l = len(ls.Name) 250 if l > 0 { 251 n += 1 + l + sovCortex(uint64(l)) 252 } 253 l = len(ls.Value) 254 if l > 0 { 255 n += 1 + l + sovCortex(uint64(l)) 256 } 257 return n 258 } 259 260 // Equal implements proto.Equaler. 261 func (bs *LabelAdapter) Equal(other LabelAdapter) bool { 262 return bs.Name == other.Name && bs.Value == other.Value 263 } 264 265 // Compare implements proto.Comparer. 266 func (bs *LabelAdapter) Compare(other LabelAdapter) int { 267 if c := strings.Compare(bs.Name, other.Name); c != 0 { 268 return c 269 } 270 return strings.Compare(bs.Value, other.Value) 271 } 272 273 // PreallocTimeseriesSliceFromPool retrieves a slice of PreallocTimeseries from a sync.Pool. 274 // ReuseSlice should be called once done. 275 func PreallocTimeseriesSliceFromPool() []PreallocTimeseries { 276 return slicePool.Get().([]PreallocTimeseries) 277 } 278 279 // TimeseriesFromPool retrieves a pointer to a TimeSeries from a sync.Pool. 280 // ReuseTimeseries should be called once done, unless ReuseSlice was called on the slice that contains this TimeSeries. 281 func TimeseriesFromPool() *TimeSeries { 282 return timeSeriesPool.Get().(*TimeSeries) 283 }