github.com/neatlab/neatio@v1.7.3-0.20220425043230-d903e92fcc75/utilities/metrics/influxdb/influxdb.go (about)

     1  package influxdb
     2  
     3  import (
     4  	"fmt"
     5  	"log"
     6  	uurl "net/url"
     7  	"time"
     8  
     9  	"github.com/influxdata/influxdb/client"
    10  	"github.com/neatlab/neatio/utilities/metrics"
    11  )
    12  
    13  type reporter struct {
    14  	reg      metrics.Registry
    15  	interval time.Duration
    16  
    17  	url       uurl.URL
    18  	database  string
    19  	username  string
    20  	password  string
    21  	namespace string
    22  	tags      map[string]string
    23  
    24  	client *client.Client
    25  
    26  	cache map[string]int64
    27  }
    28  
    29  func InfluxDB(r metrics.Registry, d time.Duration, url, database, username, password, namespace string) {
    30  	InfluxDBWithTags(r, d, url, database, username, password, namespace, nil)
    31  }
    32  
    33  func InfluxDBWithTags(r metrics.Registry, d time.Duration, url, database, username, password, namespace string, tags map[string]string) {
    34  	u, err := uurl.Parse(url)
    35  	if err != nil {
    36  		log.Printf("unable to parse InfluxDB url %s. err=%v", url, err)
    37  		return
    38  	}
    39  
    40  	rep := &reporter{
    41  		reg:       r,
    42  		interval:  d,
    43  		url:       *u,
    44  		database:  database,
    45  		username:  username,
    46  		password:  password,
    47  		namespace: namespace,
    48  		tags:      tags,
    49  		cache:     make(map[string]int64),
    50  	}
    51  	if err := rep.makeClient(); err != nil {
    52  		log.Printf("unable to make InfluxDB client. err=%v", err)
    53  		return
    54  	}
    55  
    56  	rep.run()
    57  }
    58  
    59  func (r *reporter) makeClient() (err error) {
    60  	r.client, err = client.NewClient(client.Config{
    61  		URL:      r.url,
    62  		Username: r.username,
    63  		Password: r.password,
    64  	})
    65  
    66  	return
    67  }
    68  
    69  func (r *reporter) run() {
    70  	intervalTicker := time.Tick(r.interval)
    71  	pingTicker := time.Tick(time.Second * 5)
    72  
    73  	for {
    74  		select {
    75  		case <-intervalTicker:
    76  			if err := r.send(); err != nil {
    77  				log.Printf("unable to send to InfluxDB. err=%v", err)
    78  			}
    79  		case <-pingTicker:
    80  			_, _, err := r.client.Ping()
    81  			if err != nil {
    82  				log.Printf("got error while sending a ping to InfluxDB, trying to recreate client. err=%v", err)
    83  
    84  				if err = r.makeClient(); err != nil {
    85  					log.Printf("unable to make InfluxDB client. err=%v", err)
    86  				}
    87  			}
    88  		}
    89  	}
    90  }
    91  
    92  func (r *reporter) send() error {
    93  	var pts []client.Point
    94  
    95  	r.reg.Each(func(name string, i interface{}) {
    96  		now := time.Now()
    97  		namespace := r.namespace
    98  
    99  		switch metric := i.(type) {
   100  		case metrics.Counter:
   101  			v := metric.Count()
   102  			l := r.cache[name]
   103  			pts = append(pts, client.Point{
   104  				Measurement: fmt.Sprintf("%s%s.count", namespace, name),
   105  				Tags:        r.tags,
   106  				Fields: map[string]interface{}{
   107  					"value": v - l,
   108  				},
   109  				Time: now,
   110  			})
   111  			r.cache[name] = v
   112  		case metrics.Gauge:
   113  			ms := metric.Snapshot()
   114  			pts = append(pts, client.Point{
   115  				Measurement: fmt.Sprintf("%s%s.gauge", namespace, name),
   116  				Tags:        r.tags,
   117  				Fields: map[string]interface{}{
   118  					"value": ms.Value(),
   119  				},
   120  				Time: now,
   121  			})
   122  		case metrics.GaugeFloat64:
   123  			ms := metric.Snapshot()
   124  			pts = append(pts, client.Point{
   125  				Measurement: fmt.Sprintf("%s%s.gauge", namespace, name),
   126  				Tags:        r.tags,
   127  				Fields: map[string]interface{}{
   128  					"value": ms.Value(),
   129  				},
   130  				Time: now,
   131  			})
   132  		case metrics.Histogram:
   133  			ms := metric.Snapshot()
   134  			ps := ms.Percentiles([]float64{0.5, 0.75, 0.95, 0.99, 0.999, 0.9999})
   135  			pts = append(pts, client.Point{
   136  				Measurement: fmt.Sprintf("%s%s.histogram", namespace, name),
   137  				Tags:        r.tags,
   138  				Fields: map[string]interface{}{
   139  					"count":    ms.Count(),
   140  					"max":      ms.Max(),
   141  					"mean":     ms.Mean(),
   142  					"min":      ms.Min(),
   143  					"stddev":   ms.StdDev(),
   144  					"variance": ms.Variance(),
   145  					"p50":      ps[0],
   146  					"p75":      ps[1],
   147  					"p95":      ps[2],
   148  					"p99":      ps[3],
   149  					"p999":     ps[4],
   150  					"p9999":    ps[5],
   151  				},
   152  				Time: now,
   153  			})
   154  		case metrics.Meter:
   155  			ms := metric.Snapshot()
   156  			pts = append(pts, client.Point{
   157  				Measurement: fmt.Sprintf("%s%s.meter", namespace, name),
   158  				Tags:        r.tags,
   159  				Fields: map[string]interface{}{
   160  					"count": ms.Count(),
   161  					"m1":    ms.Rate1(),
   162  					"m5":    ms.Rate5(),
   163  					"m15":   ms.Rate15(),
   164  					"mean":  ms.RateMean(),
   165  				},
   166  				Time: now,
   167  			})
   168  		case metrics.Timer:
   169  			ms := metric.Snapshot()
   170  			ps := ms.Percentiles([]float64{0.5, 0.75, 0.95, 0.99, 0.999, 0.9999})
   171  			pts = append(pts, client.Point{
   172  				Measurement: fmt.Sprintf("%s%s.timer", namespace, name),
   173  				Tags:        r.tags,
   174  				Fields: map[string]interface{}{
   175  					"count":    ms.Count(),
   176  					"max":      ms.Max(),
   177  					"mean":     ms.Mean(),
   178  					"min":      ms.Min(),
   179  					"stddev":   ms.StdDev(),
   180  					"variance": ms.Variance(),
   181  					"p50":      ps[0],
   182  					"p75":      ps[1],
   183  					"p95":      ps[2],
   184  					"p99":      ps[3],
   185  					"p999":     ps[4],
   186  					"p9999":    ps[5],
   187  					"m1":       ms.Rate1(),
   188  					"m5":       ms.Rate5(),
   189  					"m15":      ms.Rate15(),
   190  					"meanrate": ms.RateMean(),
   191  				},
   192  				Time: now,
   193  			})
   194  		case metrics.ResettingTimer:
   195  			t := metric.Snapshot()
   196  
   197  			if len(t.Values()) > 0 {
   198  				ps := t.Percentiles([]float64{50, 95, 99})
   199  				val := t.Values()
   200  				pts = append(pts, client.Point{
   201  					Measurement: fmt.Sprintf("%s%s.span", namespace, name),
   202  					Tags:        r.tags,
   203  					Fields: map[string]interface{}{
   204  						"count": len(val),
   205  						"max":   val[len(val)-1],
   206  						"mean":  t.Mean(),
   207  						"min":   val[0],
   208  						"p50":   ps[0],
   209  						"p95":   ps[1],
   210  						"p99":   ps[2],
   211  					},
   212  					Time: now,
   213  				})
   214  			}
   215  		}
   216  	})
   217  
   218  	bps := client.BatchPoints{
   219  		Points:   pts,
   220  		Database: r.database,
   221  	}
   222  
   223  	_, err := r.client.Write(bps)
   224  	return err
   225  }