github.com/matrixorigin/matrixone@v0.7.0/pkg/util/metric/metric_collector_test.go (about)

     1  // Copyright 2022 Matrix Origin
     2  //
     3  // Licensed under the Apache License, Version 2.0 (the "License");
     4  // you may not use this file except in compliance with the License.
     5  // You may obtain a copy of the License at
     6  //
     7  //      http://www.apache.org/licenses/LICENSE-2.0
     8  //
     9  // Unless required by applicable law or agreed to in writing, software
    10  // distributed under the License is distributed on an "AS IS" BASIS,
    11  // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    12  // See the License for the specific language governing permissions and
    13  // limitations under the License.
    14  
    15  package metric
    16  
    17  import (
    18  	"bytes"
    19  	"context"
    20  	"regexp"
    21  	"testing"
    22  	"time"
    23  
    24  	pb "github.com/matrixorigin/matrixone/pkg/pb/metric"
    25  	"github.com/matrixorigin/matrixone/pkg/util/export/etl"
    26  	"github.com/matrixorigin/matrixone/pkg/util/export/table"
    27  	ie "github.com/matrixorigin/matrixone/pkg/util/internalExecutor"
    28  )
    29  
    30  func init() {
    31  	m1 := &SubSystem{"m1", "", false}
    32  	m2 := &SubSystem{"m2", "", false}
    33  	allSubSystem[m1.Name] = m1
    34  	allSubSystem[m2.Name] = m2
    35  }
    36  
    37  type dummySqlExecutor struct {
    38  	opts ie.SessionOverrideOptions
    39  	ch   chan<- string
    40  }
    41  
    42  func (e *dummySqlExecutor) ApplySessionOverride(opts ie.SessionOverrideOptions) {
    43  	e.opts = opts
    44  }
    45  
    46  func (e *dummySqlExecutor) Exec(ctx context.Context, sql string, opts ie.SessionOverrideOptions) error {
    47  	select {
    48  	case e.ch <- sql:
    49  	default:
    50  	}
    51  	return nil
    52  }
    53  
    54  func (e *dummySqlExecutor) Query(ctx context.Context, sql string, opts ie.SessionOverrideOptions) ie.InternalExecResult {
    55  	return nil
    56  }
    57  
    58  func newExecutorFactory(sqlch chan string) func() ie.InternalExecutor {
    59  	return func() ie.InternalExecutor {
    60  		return &dummySqlExecutor{
    61  			opts: ie.NewOptsBuilder().Finish(),
    62  			ch:   sqlch,
    63  		}
    64  	}
    65  }
    66  
    67  func TestCollectorOpts(t *testing.T) {
    68  	c := newMetricCollector(
    69  		nil, // this nil pointer won't be touched when SqlWorkerNum is set to 0
    70  		WithFlushInterval(time.Second),
    71  		WithMetricThreshold(3),
    72  		WithSampleThreshold(10),
    73  		WithSqlWorkerNum(0),
    74  	).(*metricCollector)
    75  	o := c.opts
    76  	if o.flushInterval != time.Second || o.metricThreshold != 3 || o.sampleThreshold != 10 {
    77  		t.Errorf("collectorOpts doesn't apply correctly")
    78  	}
    79  }
    80  
    81  func TestCollector(t *testing.T) {
    82  	sqlch := make(chan string, 100)
    83  	factory := newExecutorFactory(sqlch)
    84  	collector := newMetricCollector(factory, WithFlushInterval(200*time.Millisecond), WithMetricThreshold(2))
    85  	collector.Start(context.TODO())
    86  	defer collector.Stop(false)
    87  	names := []string{"m1", "m2"}
    88  	nodes := []string{"e669d136-24f3-11ed-ba8c-d6aee46d73fa", "e9b89520-24f3-11ed-ba8c-d6aee46d73fa"}
    89  	roles := []string{"ping", "pong"}
    90  	ts := time.Now().UnixMicro()
    91  	go func() {
    92  		_ = collector.SendMetrics(context.TODO(), []*pb.MetricFamily{
    93  			{Name: names[0], Type: pb.MetricType_COUNTER, Node: nodes[0], Role: roles[0], Metric: []*pb.Metric{
    94  				{
    95  					Counter: &pb.Counter{Value: 12.0}, Collecttime: ts,
    96  				},
    97  			}},
    98  			{Name: names[1], Type: pb.MetricType_RAWHIST, Metric: []*pb.Metric{
    99  				{
   100  					Label:   []*pb.LabelPair{{Name: "type", Value: "select"}, {Name: "account", Value: "user"}},
   101  					RawHist: &pb.RawHist{Samples: []*pb.Sample{{Datetime: ts, Value: 12.0}, {Datetime: ts, Value: 12.0}}},
   102  				},
   103  			}},
   104  		})
   105  
   106  		_ = collector.SendMetrics(context.TODO(), []*pb.MetricFamily{
   107  			{Name: names[0], Type: pb.MetricType_COUNTER, Node: nodes[1], Role: roles[1], Metric: []*pb.Metric{
   108  				{
   109  					Counter: &pb.Counter{Value: 21.0}, Collecttime: ts,
   110  				},
   111  				{
   112  					Counter: &pb.Counter{Value: 66.0}, Collecttime: ts,
   113  				},
   114  			}},
   115  		})
   116  	}()
   117  	instant := time.Now()
   118  	valuesRe := regexp.MustCompile(`\([^)]*\),?\s?`) // find pattern like (1,2,3)
   119  	nameRe := regexp.MustCompile(`\.(\w+)\svalues`)  // find table name
   120  	nameAndValueCnt := func(s string) (name string, cnt int) {
   121  		cnt = len(valuesRe.FindAllString(s, -1))
   122  		matches := nameRe.FindStringSubmatch(s)
   123  		if len(matches) > 1 {
   124  			name = matches[1]
   125  		} else {
   126  			name = "<nil>"
   127  		}
   128  		return name, cnt
   129  	}
   130  
   131  	name, cnt := nameAndValueCnt(<-sqlch)
   132  	if name != names[0] || cnt != 3 {
   133  		t.Errorf("m1 metric should be flushed first with 3 rows, got %s with %d rows", name, cnt)
   134  	}
   135  
   136  	sql := <-sqlch
   137  	if time.Since(instant) < 200*time.Millisecond {
   138  		t.Errorf("m2 should be flushed after a period")
   139  	}
   140  	name, cnt = nameAndValueCnt(sql)
   141  	if name != names[1] || cnt != 2 {
   142  		t.Errorf("m2 metric should be flushed first with 2 rows, got %s with %d rows", name, cnt)
   143  	}
   144  }
   145  
   146  type dummyStringWriter struct {
   147  	name string
   148  	ch   chan string
   149  	// csvWriter
   150  	writer table.RowWriter
   151  }
   152  
   153  func (w *dummyStringWriter) WriteString(s string) (n int, err error) {
   154  	n = len(s)
   155  	w.ch <- w.name
   156  	w.ch <- s
   157  	return n, nil
   158  }
   159  
   160  func (w *dummyStringWriter) WriteRow(row *table.Row) error {
   161  	return w.writer.WriteRow(row)
   162  }
   163  
   164  func (w *dummyStringWriter) FlushAndClose() (int, error) {
   165  	return w.writer.FlushAndClose()
   166  }
   167  
   168  func (w *dummyStringWriter) GetContent() string { return "" }
   169  
   170  func newDummyFSWriterFactory(csvCh chan string) table.WriterFactory {
   171  	return table.WriterFactory(func(_ context.Context, account string, tbl *table.Table, ts time.Time) table.RowWriter {
   172  		w := &dummyStringWriter{name: tbl.Table, ch: csvCh}
   173  		w.writer = etl.NewCSVWriter(context.TODO(), bytes.NewBuffer(nil), w)
   174  		return w
   175  	})
   176  }
   177  
   178  func dummyInitView(ctx context.Context, tbls []string) {
   179  	for _, tbl := range tbls {
   180  		GetMetricViewWithLabels(ctx, tbl, []string{metricTypeColumn.Name, metricAccountColumn.Name})
   181  	}
   182  }
   183  
   184  func TestFSCollector(t *testing.T) {
   185  	ctx := context.Background()
   186  	csvCh := make(chan string, 100)
   187  	factory := newDummyFSWriterFactory(csvCh)
   188  	collector := newMetricFSCollector(factory, WithFlushInterval(3*time.Second), WithMetricThreshold(4), ExportMultiTable(false))
   189  	collector.Start(context.TODO())
   190  	defer collector.Stop(false)
   191  	names := []string{"m1", "m2"}
   192  	nodes := []string{"e669d136-24f3-11ed-ba8c-d6aee46d73fa", "e9b89520-24f3-11ed-ba8c-d6aee46d73fa"}
   193  	roles := []string{"ping", "pong"}
   194  	ts := time.Now().UnixMicro()
   195  	dummyInitView(ctx, names)
   196  	go func() {
   197  		_ = collector.SendMetrics(context.TODO(), []*pb.MetricFamily{
   198  			{Name: names[0], Type: pb.MetricType_COUNTER, Node: nodes[0], Role: roles[0], Metric: []*pb.Metric{
   199  				{
   200  					Label:   []*pb.LabelPair{{Name: "account", Value: "user"}},
   201  					Counter: &pb.Counter{Value: 12.0}, Collecttime: ts,
   202  				},
   203  			}},
   204  			{Name: names[1], Type: pb.MetricType_RAWHIST, Metric: []*pb.Metric{
   205  				{
   206  					Label:   []*pb.LabelPair{{Name: "type", Value: "select"}, {Name: "account", Value: "user"}},
   207  					RawHist: &pb.RawHist{Samples: []*pb.Sample{{Datetime: ts, Value: 12.0}, {Datetime: ts, Value: 12.0}}},
   208  				},
   209  			}},
   210  		})
   211  
   212  		_ = collector.SendMetrics(context.TODO(), []*pb.MetricFamily{
   213  			{Name: names[0], Type: pb.MetricType_COUNTER, Node: nodes[1], Role: roles[1], Metric: []*pb.Metric{
   214  				{
   215  					Label:   []*pb.LabelPair{{Name: "account", Value: "user"}},
   216  					Counter: &pb.Counter{Value: 21.0}, Collecttime: ts,
   217  				},
   218  				{
   219  					Label:   []*pb.LabelPair{{Name: "account", Value: "user"}},
   220  					Counter: &pb.Counter{Value: 66.0}, Collecttime: ts,
   221  				},
   222  			}},
   223  		})
   224  	}()
   225  	M1ValuesRe := regexp.MustCompile(`m1,(.*[,]?)+\n`) // find pattern like m1,...,...,...\n
   226  	M2ValuesRe := regexp.MustCompile(`m2,(.*[,]?)+\n`) // find pattern like m2,...,...,...\n
   227  	nameAndValueCnt := func(n, s string, re *regexp.Regexp) (name string, cnt int) {
   228  		t.Logf("name: %s, csv: %s", n, s)
   229  		cnt = len(re.FindAllString(s, -1))
   230  		if cnt > 0 {
   231  			name = n
   232  		} else {
   233  			name = "<nil>"
   234  		}
   235  		return name, cnt
   236  	}
   237  
   238  	n, s := <-csvCh, <-csvCh
   239  	name, cnt := nameAndValueCnt(n, s, M1ValuesRe)
   240  	if name != SingleMetricTable.GetName() || cnt != 3 {
   241  		t.Errorf("m1 metric should be flushed first with 3 rows, got %s with %d rows", name, cnt)
   242  	}
   243  
   244  	name, cnt = nameAndValueCnt(n, s, M2ValuesRe)
   245  	if name != SingleMetricTable.GetName() || cnt != 2 {
   246  		t.Errorf("m2 metric should be flushed first with 2 rows, got %s with %d rows", name, cnt)
   247  	}
   248  }