github.com/zbc163001/genaro-core@v0.0.0-20190527213236-76f442bd51f4/metrics/influxdb/influxdb.go (about)

     1  package influxdb
     2  
     3  import (
     4  	"fmt"
     5  	"log"
     6  	uurl "net/url"
     7  	"time"
     8  
     9  	"github.com/GenaroNetwork/Genaro-Core/metrics"
    10  	"github.com/influxdata/influxdb/client"
    11  )
    12  
    13  type reporter struct {
    14  	reg      metrics.Registry
    15  	interval time.Duration
    16  
    17  	url       uurl.URL
    18  	database  string
    19  	username  string
    20  	password  string
    21  	namespace string
    22  	tags      map[string]string
    23  
    24  	client *client.Client
    25  
    26  	cache map[string]int64
    27  }
    28  
    29  // InfluxDB starts a InfluxDB reporter which will post the from the given metrics.Registry at each d interval.
    30  func InfluxDB(r metrics.Registry, d time.Duration, url, database, username, password, namespace string) {
    31  	InfluxDBWithTags(r, d, url, database, username, password, namespace, nil)
    32  }
    33  
    34  // InfluxDBWithTags starts a InfluxDB reporter which will post the from the given metrics.Registry at each d interval with the specified tags
    35  func InfluxDBWithTags(r metrics.Registry, d time.Duration, url, database, username, password, namespace string, tags map[string]string) {
    36  	u, err := uurl.Parse(url)
    37  	if err != nil {
    38  		log.Printf("unable to parse InfluxDB url %s. err=%v", url, err)
    39  		return
    40  	}
    41  
    42  	rep := &reporter{
    43  		reg:       r,
    44  		interval:  d,
    45  		url:       *u,
    46  		database:  database,
    47  		username:  username,
    48  		password:  password,
    49  		namespace: namespace,
    50  		tags:      tags,
    51  		cache:     make(map[string]int64),
    52  	}
    53  	if err := rep.makeClient(); err != nil {
    54  		log.Printf("unable to make InfluxDB client. err=%v", err)
    55  		return
    56  	}
    57  
    58  	rep.run()
    59  }
    60  
    61  func (r *reporter) makeClient() (err error) {
    62  	r.client, err = client.NewClient(client.Config{
    63  		URL:      r.url,
    64  		Username: r.username,
    65  		Password: r.password,
    66  	})
    67  
    68  	return
    69  }
    70  
    71  func (r *reporter) run() {
    72  	intervalTicker := time.Tick(r.interval)
    73  	pingTicker := time.Tick(time.Second * 5)
    74  
    75  	for {
    76  		select {
    77  		case <-intervalTicker:
    78  			if err := r.send(); err != nil {
    79  				log.Printf("unable to send to InfluxDB. err=%v", err)
    80  			}
    81  		case <-pingTicker:
    82  			_, _, err := r.client.Ping()
    83  			if err != nil {
    84  				log.Printf("got error while sending a ping to InfluxDB, trying to recreate client. err=%v", err)
    85  
    86  				if err = r.makeClient(); err != nil {
    87  					log.Printf("unable to make InfluxDB client. err=%v", err)
    88  				}
    89  			}
    90  		}
    91  	}
    92  }
    93  
    94  func (r *reporter) send() error {
    95  	var pts []client.Point
    96  
    97  	r.reg.Each(func(name string, i interface{}) {
    98  		now := time.Now()
    99  		namespace := r.namespace
   100  
   101  		switch metric := i.(type) {
   102  		case metrics.Counter:
   103  			v := metric.Count()
   104  			l := r.cache[name]
   105  			pts = append(pts, client.Point{
   106  				Measurement: fmt.Sprintf("%s%s.count", namespace, name),
   107  				Tags:        r.tags,
   108  				Fields: map[string]interface{}{
   109  					"value": v - l,
   110  				},
   111  				Time: now,
   112  			})
   113  			r.cache[name] = v
   114  		case metrics.Gauge:
   115  			ms := metric.Snapshot()
   116  			pts = append(pts, client.Point{
   117  				Measurement: fmt.Sprintf("%s%s.gauge", namespace, name),
   118  				Tags:        r.tags,
   119  				Fields: map[string]interface{}{
   120  					"value": ms.Value(),
   121  				},
   122  				Time: now,
   123  			})
   124  		case metrics.GaugeFloat64:
   125  			ms := metric.Snapshot()
   126  			pts = append(pts, client.Point{
   127  				Measurement: fmt.Sprintf("%s%s.gauge", namespace, name),
   128  				Tags:        r.tags,
   129  				Fields: map[string]interface{}{
   130  					"value": ms.Value(),
   131  				},
   132  				Time: now,
   133  			})
   134  		case metrics.Histogram:
   135  			ms := metric.Snapshot()
   136  			ps := ms.Percentiles([]float64{0.5, 0.75, 0.95, 0.99, 0.999, 0.9999})
   137  			pts = append(pts, client.Point{
   138  				Measurement: fmt.Sprintf("%s%s.histogram", namespace, name),
   139  				Tags:        r.tags,
   140  				Fields: map[string]interface{}{
   141  					"count":    ms.Count(),
   142  					"max":      ms.Max(),
   143  					"mean":     ms.Mean(),
   144  					"min":      ms.Min(),
   145  					"stddev":   ms.StdDev(),
   146  					"variance": ms.Variance(),
   147  					"p50":      ps[0],
   148  					"p75":      ps[1],
   149  					"p95":      ps[2],
   150  					"p99":      ps[3],
   151  					"p999":     ps[4],
   152  					"p9999":    ps[5],
   153  				},
   154  				Time: now,
   155  			})
   156  		case metrics.Meter:
   157  			ms := metric.Snapshot()
   158  			pts = append(pts, client.Point{
   159  				Measurement: fmt.Sprintf("%s%s.meter", namespace, name),
   160  				Tags:        r.tags,
   161  				Fields: map[string]interface{}{
   162  					"count": ms.Count(),
   163  					"m1":    ms.Rate1(),
   164  					"m5":    ms.Rate5(),
   165  					"m15":   ms.Rate15(),
   166  					"mean":  ms.RateMean(),
   167  				},
   168  				Time: now,
   169  			})
   170  		case metrics.Timer:
   171  			ms := metric.Snapshot()
   172  			ps := ms.Percentiles([]float64{0.5, 0.75, 0.95, 0.99, 0.999, 0.9999})
   173  			pts = append(pts, client.Point{
   174  				Measurement: fmt.Sprintf("%s%s.timer", namespace, name),
   175  				Tags:        r.tags,
   176  				Fields: map[string]interface{}{
   177  					"count":    ms.Count(),
   178  					"max":      ms.Max(),
   179  					"mean":     ms.Mean(),
   180  					"min":      ms.Min(),
   181  					"stddev":   ms.StdDev(),
   182  					"variance": ms.Variance(),
   183  					"p50":      ps[0],
   184  					"p75":      ps[1],
   185  					"p95":      ps[2],
   186  					"p99":      ps[3],
   187  					"p999":     ps[4],
   188  					"p9999":    ps[5],
   189  					"m1":       ms.Rate1(),
   190  					"m5":       ms.Rate5(),
   191  					"m15":      ms.Rate15(),
   192  					"meanrate": ms.RateMean(),
   193  				},
   194  				Time: now,
   195  			})
   196  		case metrics.ResettingTimer:
   197  			t := metric.Snapshot()
   198  
   199  			if len(t.Values()) > 0 {
   200  				ps := t.Percentiles([]float64{50, 95, 99})
   201  				val := t.Values()
   202  				pts = append(pts, client.Point{
   203  					Measurement: fmt.Sprintf("%s%s.span", namespace, name),
   204  					Tags:        r.tags,
   205  					Fields: map[string]interface{}{
   206  						"count": len(val),
   207  						"max":   val[len(val)-1],
   208  						"mean":  t.Mean(),
   209  						"min":   val[0],
   210  						"p50":   ps[0],
   211  						"p95":   ps[1],
   212  						"p99":   ps[2],
   213  					},
   214  					Time: now,
   215  				})
   216  			}
   217  		}
   218  	})
   219  
   220  	bps := client.BatchPoints{
   221  		Points:   pts,
   222  		Database: r.database,
   223  	}
   224  
   225  	_, err := r.client.Write(bps)
   226  	return err
   227  }