github.com/grafana/pyroscope@v1.18.0/pkg/operations/v2/profile_handlers.go (about)

     1  package v2
     2  
     3  import (
     4  	"bytes"
     5  	"compress/gzip"
     6  	"context"
     7  	"fmt"
     8  	"net/http"
     9  	"slices"
    10  	"strings"
    11  	"time"
    12  
    13  	"github.com/parquet-go/parquet-go"
    14  	"github.com/pkg/errors"
    15  
    16  	googlev1 "github.com/grafana/pyroscope/api/gen/proto/go/google/v1"
    17  	metastorev1 "github.com/grafana/pyroscope/api/gen/proto/go/metastore/v1"
    18  	"github.com/grafana/pyroscope/pkg/block"
    19  	"github.com/grafana/pyroscope/pkg/frontend/dot/measurement"
    20  	phlaremodel "github.com/grafana/pyroscope/pkg/model"
    21  	schemav1 "github.com/grafana/pyroscope/pkg/phlaredb/schemas/v1"
    22  	"github.com/grafana/pyroscope/pkg/phlaredb/symdb"
    23  	"github.com/grafana/pyroscope/pkg/pprof"
    24  	httputil "github.com/grafana/pyroscope/pkg/util/http"
    25  )
    26  
    27  func (h *Handlers) CreateDatasetProfilesHandler() func(http.ResponseWriter, *http.Request) {
    28  	return func(w http.ResponseWriter, r *http.Request) {
    29  		req, err := parseDatasetRequest(r)
    30  		if err != nil {
    31  			httputil.Error(w, err)
    32  			return
    33  		}
    34  
    35  		page := 1
    36  		if pageStr := r.URL.Query().Get("page"); pageStr != "" {
    37  			if _, err := fmt.Sscanf(pageStr, "%d", &page); err != nil || page < 1 {
    38  				page = 1
    39  			}
    40  		}
    41  
    42  		pageSize := 100
    43  		if pageSizeStr := r.URL.Query().Get("page_size"); pageSizeStr != "" {
    44  			if _, err := fmt.Sscanf(pageSizeStr, "%d", &pageSize); err != nil || pageSize < 1 || pageSize > 500 {
    45  				pageSize = 100
    46  			}
    47  		}
    48  
    49  		blockMeta, foundDataset, err := h.getDatasetMetadata(r.Context(), req)
    50  		if err != nil {
    51  			httputil.Error(w, err)
    52  			return
    53  		}
    54  
    55  		dataset := h.convertDataset(foundDataset, blockMeta.StringTable)
    56  
    57  		profiles, totalCount, err := h.readProfilesFromDataset(r.Context(), blockMeta, foundDataset, page, pageSize)
    58  		if err != nil {
    59  			httputil.Error(w, errors.Wrap(err, "failed to read profiles from dataset"))
    60  			return
    61  		}
    62  
    63  		totalPages := (totalCount + pageSize - 1) / pageSize
    64  		if totalPages == 0 {
    65  			totalPages = 1
    66  		}
    67  
    68  		err = pageTemplates.datasetProfilesTemplate.Execute(w, datasetProfilesPageContent{
    69  			User:        req.TenantID,
    70  			BlockID:     req.BlockID,
    71  			Shard:       req.Shard,
    72  			BlockTenant: req.BlockTenant,
    73  			Dataset:     &dataset,
    74  			Profiles:    profiles,
    75  			TotalCount:  totalCount,
    76  			Page:        page,
    77  			PageSize:    pageSize,
    78  			TotalPages:  totalPages,
    79  			HasPrevPage: page > 1,
    80  			HasNextPage: page < totalPages,
    81  			Now:         time.Now().UTC().Format(time.RFC3339),
    82  		})
    83  		if err != nil {
    84  			httputil.Error(w, err)
    85  			return
    86  		}
    87  	}
    88  }
    89  
    90  func (h *Handlers) readProfilesFromDataset(ctx context.Context, blockMeta *metastorev1.BlockMeta, dataset *metastorev1.Dataset, page, pageSize int) ([]profileInfo, int, error) {
    91  	obj := block.NewObject(h.Bucket, blockMeta)
    92  	if err := obj.Open(ctx); err != nil {
    93  		return nil, 0, fmt.Errorf("failed to open block object: %w", err)
    94  	}
    95  	defer obj.Close()
    96  
    97  	ds := block.NewDataset(dataset, obj)
    98  	if err := ds.Open(ctx, block.SectionProfiles, block.SectionTSDB); err != nil {
    99  		return nil, 0, fmt.Errorf("failed to open dataset: %w", err)
   100  	}
   101  	defer ds.Close()
   102  
   103  	it, err := block.NewProfileRowIterator(ds)
   104  	if err != nil {
   105  		return nil, 0, fmt.Errorf("failed to create profile iterator: %w", err)
   106  	}
   107  	defer it.Close()
   108  
   109  	var profiles []profileInfo
   110  	rowNumber := 0
   111  	totalCount := 0
   112  	startRow := (page - 1) * pageSize
   113  	endRow := startRow + pageSize
   114  
   115  	for it.Next() {
   116  		if rowNumber >= startRow && rowNumber < endRow {
   117  			entry := it.At()
   118  
   119  			profileType := entry.Labels.Get(phlaremodel.LabelNameProfileType)
   120  
   121  			var sampleCount int
   122  			entry.Row.ForStacktraceIdsAndValues(func(sids []parquet.Value, vals []parquet.Value) {
   123  				sampleCount = len(sids)
   124  			})
   125  
   126  			profiles = append(profiles, profileInfo{
   127  				RowNumber:   rowNumber,
   128  				Timestamp:   time.Unix(0, entry.Timestamp).UTC().Format(time.RFC3339),
   129  				SeriesIndex: entry.Row.SeriesIndex(),
   130  				ProfileType: profileType,
   131  				SampleCount: sampleCount,
   132  			})
   133  		}
   134  		rowNumber++
   135  		totalCount++
   136  	}
   137  
   138  	if err := it.Err(); err != nil {
   139  		return nil, 0, fmt.Errorf("error iterating profiles: %w", err)
   140  	}
   141  
   142  	return profiles, totalCount, nil
   143  }
   144  
   145  func (h *Handlers) CreateDatasetProfileDownloadHandler() func(http.ResponseWriter, *http.Request) {
   146  	return func(w http.ResponseWriter, r *http.Request) {
   147  		req, err := parseDatasetRequest(r)
   148  		if err != nil {
   149  			httputil.Error(w, err)
   150  			return
   151  		}
   152  
   153  		rowStr := r.URL.Query().Get("row")
   154  		if rowStr == "" {
   155  			httputil.Error(w, errors.New("No row number provided"))
   156  			return
   157  		}
   158  		var rowNum int64
   159  		if _, err := fmt.Sscanf(rowStr, "%d", &rowNum); err != nil {
   160  			httputil.Error(w, errors.Wrap(err, "invalid row parameter"))
   161  			return
   162  		}
   163  
   164  		blockMeta, foundDataset, err := h.getDatasetMetadata(r.Context(), req)
   165  		if err != nil {
   166  			httputil.Error(w, err)
   167  			return
   168  		}
   169  
   170  		_, _, profileMeta, err := h.buildProfileResolver(r.Context(), blockMeta, foundDataset, rowNum)
   171  		if err != nil {
   172  			httputil.Error(w, errors.Wrap(err, "failed to get profile metadata"))
   173  			return
   174  		}
   175  
   176  		profile, err := h.retrieveProfile(r.Context(), blockMeta, foundDataset, rowNum)
   177  		if err != nil {
   178  			httputil.Error(w, errors.Wrap(err, "failed to download profile"))
   179  			return
   180  		}
   181  
   182  		sanitizedDataset := strings.ReplaceAll(req.DatasetName, "/", "_")
   183  		sanitizedProfileType := strings.ReplaceAll(profileMeta.ProfileType, ":", "_")
   184  		sanitizedProfileType = strings.ReplaceAll(sanitizedProfileType, "/", "_")
   185  
   186  		timestampStr := time.Unix(0, profile.TimeNanos).UTC().Format("20060102-150405")
   187  		filename := fmt.Sprintf("%s-%s-%s.pb.gz", sanitizedDataset, sanitizedProfileType, timestampStr)
   188  		h.writeProfile(w, profile, filename)
   189  	}
   190  }
   191  
   192  func (h *Handlers) retrieveProfile(
   193  	ctx context.Context,
   194  	blockMeta *metastorev1.BlockMeta,
   195  	dataset *metastorev1.Dataset,
   196  	rowNum int64,
   197  ) (*googlev1.Profile, error) {
   198  	resolver, timestamp, meta, err := h.buildProfileResolver(ctx, blockMeta, dataset, rowNum)
   199  	if err != nil {
   200  		return nil, fmt.Errorf("failed to build profile resolver: %w", err)
   201  	}
   202  	defer resolver.Release()
   203  
   204  	profile, err := resolver.Pprof()
   205  	if err != nil {
   206  		return nil, fmt.Errorf("failed to build pprof profile: %w", err)
   207  	}
   208  
   209  	if t, err := phlaremodel.ParseProfileTypeSelector(meta.ProfileType); err == nil {
   210  		pprof.SetProfileMetadata(profile, t, timestamp, 0)
   211  	}
   212  
   213  	return profile, nil
   214  }
   215  
   216  func (h *Handlers) writeProfile(w http.ResponseWriter, profile *googlev1.Profile, filename string) {
   217  	data, err := profile.MarshalVT()
   218  	if err != nil {
   219  		httputil.Error(w, errors.Wrap(err, "failed to marshal profile"))
   220  		return
   221  	}
   222  
   223  	var buf bytes.Buffer
   224  	gzipWriter := gzip.NewWriter(&buf)
   225  	if _, err := gzipWriter.Write(data); err != nil {
   226  		httputil.Error(w, errors.Wrap(err, "failed to compress profile"))
   227  		return
   228  	}
   229  	if err := gzipWriter.Close(); err != nil {
   230  		httputil.Error(w, errors.Wrap(err, "failed to close gzip writer"))
   231  		return
   232  	}
   233  
   234  	w.Header().Set("Content-Type", "application/gzip")
   235  	w.Header().Set("Content-Encoding", "gzip")
   236  	w.Header().Set("Content-Disposition", fmt.Sprintf("attachment; filename=\"%s\"", filename))
   237  
   238  	if _, err := w.Write(buf.Bytes()); err != nil {
   239  		httputil.Error(w, errors.Wrap(err, "failed to write profile"))
   240  		return
   241  	}
   242  }
   243  
   244  func (h *Handlers) CreateDatasetProfileCallTreeHandler() func(http.ResponseWriter, *http.Request) {
   245  	return func(w http.ResponseWriter, r *http.Request) {
   246  		req, err := parseDatasetRequest(r)
   247  		if err != nil {
   248  			httputil.Error(w, err)
   249  			return
   250  		}
   251  
   252  		rowStr := r.URL.Query().Get("row")
   253  		if rowStr == "" {
   254  			httputil.Error(w, errors.New("No row number provided"))
   255  			return
   256  		}
   257  		var rowNum int64
   258  		if _, err := fmt.Sscanf(rowStr, "%d", &rowNum); err != nil {
   259  			httputil.Error(w, errors.Wrap(err, "invalid row parameter"))
   260  			return
   261  		}
   262  
   263  		blockMeta, foundDataset, err := h.getDatasetMetadata(r.Context(), req)
   264  		if err != nil {
   265  			httputil.Error(w, err)
   266  			return
   267  		}
   268  
   269  		dataset := h.convertDataset(foundDataset, blockMeta.StringTable)
   270  
   271  		tree, timestamp, profileMeta, err := h.buildProfileTree(r.Context(), blockMeta, foundDataset, rowNum)
   272  		if err != nil {
   273  			httputil.Error(w, errors.Wrap(err, "failed to build profile tree"))
   274  			return
   275  		}
   276  
   277  		err = pageTemplates.profileCallTreeTemplate.Execute(w, profileCallTreePageContent{
   278  			User:        req.TenantID,
   279  			BlockID:     req.BlockID,
   280  			Shard:       req.Shard,
   281  			BlockTenant: req.BlockTenant,
   282  			Dataset:     &dataset,
   283  			Timestamp:   time.Unix(0, timestamp).UTC().Format(time.RFC3339),
   284  			ProfileInfo: profileMeta,
   285  			Tree:        tree,
   286  			Now:         time.Now().UTC().Format(time.RFC3339),
   287  		})
   288  		if err != nil {
   289  			httputil.Error(w, err)
   290  			return
   291  		}
   292  	}
   293  }
   294  
   295  func (h *Handlers) buildProfileTree(
   296  	ctx context.Context,
   297  	blockMeta *metastorev1.BlockMeta,
   298  	dataset *metastorev1.Dataset,
   299  	rowNum int64,
   300  ) (*treeNode, int64, *profileMetadata, error) {
   301  	resolver, timestamp, profileMeta, err := h.buildProfileResolver(ctx, blockMeta, dataset, rowNum)
   302  	if err != nil {
   303  		return nil, 0, nil, fmt.Errorf("failed to build profile resolver: %w", err)
   304  	}
   305  	defer resolver.Release()
   306  
   307  	profile, err := resolver.Pprof()
   308  	if err != nil {
   309  		return nil, 0, nil, fmt.Errorf("failed to build pprof profile: %w", err)
   310  	}
   311  
   312  	tree := buildTreeFromPprof(profile, profileMeta.Unit)
   313  
   314  	return tree, timestamp, profileMeta, nil
   315  }
   316  
   317  func (h *Handlers) buildProfileResolver(
   318  	ctx context.Context,
   319  	blockMeta *metastorev1.BlockMeta,
   320  	dataset *metastorev1.Dataset,
   321  	rowNum int64,
   322  ) (*symdb.Resolver, int64, *profileMetadata, error) {
   323  	obj := block.NewObject(h.Bucket, blockMeta)
   324  	if err := obj.Open(ctx); err != nil {
   325  		return nil, 0, nil, fmt.Errorf("failed to open block object: %w", err)
   326  	}
   327  	defer obj.Close()
   328  
   329  	ds := block.NewDataset(dataset, obj)
   330  	if err := ds.Open(ctx, block.SectionProfiles, block.SectionTSDB, block.SectionSymbols); err != nil {
   331  		return nil, 0, nil, fmt.Errorf("failed to open dataset: %w", err)
   332  	}
   333  	defer ds.Close()
   334  
   335  	it, err := block.NewProfileRowIterator(ds)
   336  	if err != nil {
   337  		return nil, 0, nil, fmt.Errorf("failed to create profile iterator: %w", err)
   338  	}
   339  	defer it.Close()
   340  
   341  	var currentRow int64
   342  	var targetEntry block.ProfileEntry
   343  	found := false
   344  
   345  	for it.Next() {
   346  		if currentRow == rowNum {
   347  			targetEntry = it.At()
   348  			found = true
   349  			break
   350  		}
   351  		currentRow++
   352  	}
   353  
   354  	if err := it.Err(); err != nil {
   355  		return nil, 0, nil, fmt.Errorf("error iterating profiles: %w", err)
   356  	}
   357  
   358  	if !found {
   359  		return nil, 0, nil, fmt.Errorf("profile row %d not found", rowNum)
   360  	}
   361  
   362  	var labelPairs []labelPair
   363  	for _, label := range targetEntry.Labels {
   364  		labelPairs = append(labelPairs, labelPair{
   365  			Key:   label.Name,
   366  			Value: label.Value,
   367  		})
   368  	}
   369  
   370  	var sampleCount int
   371  	targetEntry.Row.ForStacktraceIdsAndValues(func(sids []parquet.Value, vals []parquet.Value) {
   372  		sampleCount = len(sids)
   373  	})
   374  
   375  	profileMeta := &profileMetadata{
   376  		Labels:      labelPairs,
   377  		SampleCount: sampleCount,
   378  		Unit:        targetEntry.Labels.Get(phlaremodel.LabelNameUnit),
   379  		ProfileType: targetEntry.Labels.Get(phlaremodel.LabelNameProfileType),
   380  	}
   381  
   382  	resolver := symdb.NewResolver(ctx, ds.Symbols())
   383  
   384  	partitionID := targetEntry.Row.StacktracePartitionID()
   385  	var stacktraceIDs []uint32
   386  	var values []uint64
   387  
   388  	targetEntry.Row.ForStacktraceIdsAndValues(func(sids []parquet.Value, vals []parquet.Value) {
   389  		stacktraceIDs = make([]uint32, len(sids))
   390  		values = make([]uint64, len(vals))
   391  		for i, sid := range sids {
   392  			stacktraceIDs[i] = sid.Uint32()
   393  		}
   394  		for i, val := range vals {
   395  			values[i] = uint64(val.Int64())
   396  		}
   397  	})
   398  
   399  	samples := schemav1.Samples{
   400  		StacktraceIDs: stacktraceIDs,
   401  		Values:        values,
   402  	}
   403  	resolver.AddSamples(partitionID, samples)
   404  
   405  	return resolver, targetEntry.Timestamp, profileMeta, nil
   406  }
   407  
   408  // formatValue formats a value according to the pprof unit specification
   409  func formatValue(value uint64, unit string) string {
   410  	scaledValue, scaledUnit := measurement.Scale(int64(value), unit, "auto")
   411  	formattedValue := strings.TrimSuffix(fmt.Sprintf("%.2f", scaledValue), ".00")
   412  	if scaledUnit == "" {
   413  		return formattedValue
   414  	}
   415  	return fmt.Sprintf("%s %s", formattedValue, scaledUnit)
   416  }
   417  
   418  func buildTreeFromPprof(profile *googlev1.Profile, unit string) *treeNode {
   419  	if profile == nil || len(profile.Sample) == 0 {
   420  		return nil
   421  	}
   422  
   423  	var grandTotal uint64
   424  	for _, sample := range profile.Sample {
   425  		if len(sample.Value) > 0 {
   426  			grandTotal += uint64(sample.Value[0])
   427  		}
   428  	}
   429  
   430  	if grandTotal == 0 {
   431  		return nil
   432  	}
   433  
   434  	functionMap := make(map[uint64]*googlev1.Function)
   435  	for _, fn := range profile.Function {
   436  		functionMap[fn.Id] = fn
   437  	}
   438  
   439  	locationMap := make(map[uint64]*googlev1.Location)
   440  	for _, loc := range profile.Location {
   441  		locationMap[loc.Id] = loc
   442  	}
   443  
   444  	root := &treeNode{
   445  		Name:           "root",
   446  		Value:          grandTotal,
   447  		Percent:        100.0,
   448  		Location:       "",
   449  		FormattedValue: formatValue(grandTotal, unit),
   450  		Children:       make([]*treeNode, 0),
   451  	}
   452  
   453  	nodeMap := make(map[string]*treeNode)
   454  	nodeMap[""] = root
   455  
   456  	for _, sample := range profile.Sample {
   457  		if len(sample.Value) == 0 || len(sample.LocationId) == 0 {
   458  			continue
   459  		}
   460  
   461  		value := uint64(sample.Value[0])
   462  		currentPath := ""
   463  		currentNode := root
   464  
   465  		for i := len(sample.LocationId) - 1; i >= 0; i-- {
   466  			locID := sample.LocationId[i]
   467  			location := locationMap[locID]
   468  			if location == nil {
   469  				continue
   470  			}
   471  
   472  			if len(location.Line) == 0 {
   473  				continue
   474  			}
   475  
   476  			line := location.Line[0]
   477  			function := functionMap[line.FunctionId]
   478  			if function == nil {
   479  				continue
   480  			}
   481  
   482  			funcName := profile.StringTable[function.Name]
   483  			fileName := profile.StringTable[function.Filename]
   484  			lineNum := line.Line
   485  
   486  			var locationStr string
   487  			if fileName != "" && lineNum > 0 {
   488  				filePath := fileName
   489  				if pkgIdx := strings.Index(filePath, "/pkg/"); pkgIdx != -1 {
   490  					filePath = filePath[pkgIdx+1:]
   491  				} else if srcIdx := strings.Index(filePath, "/src/"); srcIdx != -1 {
   492  					filePath = filePath[srcIdx+5:]
   493  				} else if pathIdx := strings.LastIndex(filePath, "/"); pathIdx != -1 {
   494  					filePath = filePath[pathIdx+1:]
   495  				}
   496  				locationStr = fmt.Sprintf("%s:L%d", filePath, lineNum)
   497  			}
   498  
   499  			parentPath := currentPath
   500  			currentPath = fmt.Sprintf("%s/%s@%d", parentPath, funcName, line.FunctionId)
   501  
   502  			node, exists := nodeMap[currentPath]
   503  			if !exists {
   504  				node = &treeNode{
   505  					Name:           funcName,
   506  					Percent:        0,
   507  					Location:       locationStr,
   508  					FormattedValue: formatValue(0, unit),
   509  					Children:       make([]*treeNode, 0),
   510  				}
   511  				nodeMap[currentPath] = node
   512  				currentNode.Children = append(currentNode.Children, node)
   513  			}
   514  
   515  			node.Value += value
   516  
   517  			currentNode = node
   518  		}
   519  	}
   520  
   521  	sortAndCalculatePercents(root, float64(grandTotal), unit)
   522  
   523  	return root
   524  }
   525  
   526  func sortAndCalculatePercents(node *treeNode, grandTotal float64, unit string) {
   527  	if grandTotal > 0 {
   528  		node.Percent = (float64(node.Value) / grandTotal) * 100.0
   529  	}
   530  
   531  	node.FormattedValue = formatValue(node.Value, unit)
   532  
   533  	if len(node.Children) > 0 {
   534  		slices.SortFunc(node.Children, func(a, b *treeNode) int {
   535  			if a.Value > b.Value {
   536  				return -1
   537  			}
   538  			if a.Value < b.Value {
   539  				return 1
   540  			}
   541  			return 0
   542  		})
   543  
   544  		for _, child := range node.Children {
   545  			sortAndCalculatePercents(child, grandTotal, unit)
   546  		}
   547  	}
   548  }