github.com/demonoid81/moby@v0.0.0-20200517203328-62dd8e17c460/daemon/logger/jsonfilelog/read.go (about)

     1  package jsonfilelog // import "github.com/demonoid81/moby/daemon/logger/jsonfilelog"
     2  
     3  import (
     4  	"context"
     5  	"encoding/json"
     6  	"io"
     7  
     8  	"github.com/demonoid81/moby/api/types/backend"
     9  	"github.com/demonoid81/moby/daemon/logger"
    10  	"github.com/demonoid81/moby/daemon/logger/jsonfilelog/jsonlog"
    11  	"github.com/demonoid81/moby/daemon/logger/loggerutils"
    12  	"github.com/demonoid81/moby/pkg/tailfile"
    13  	"github.com/sirupsen/logrus"
    14  )
    15  
    16  const maxJSONDecodeRetry = 20000
    17  
    18  // ReadLogs implements the logger's LogReader interface for the logs
    19  // created by this driver.
    20  func (l *JSONFileLogger) ReadLogs(config logger.ReadConfig) *logger.LogWatcher {
    21  	logWatcher := logger.NewLogWatcher()
    22  
    23  	go l.readLogs(logWatcher, config)
    24  	return logWatcher
    25  }
    26  
    27  func (l *JSONFileLogger) readLogs(watcher *logger.LogWatcher, config logger.ReadConfig) {
    28  	defer close(watcher.Msg)
    29  
    30  	l.mu.Lock()
    31  	l.readers[watcher] = struct{}{}
    32  	l.mu.Unlock()
    33  
    34  	l.writer.ReadLogs(config, watcher)
    35  
    36  	l.mu.Lock()
    37  	delete(l.readers, watcher)
    38  	l.mu.Unlock()
    39  }
    40  
    41  func decodeLogLine(dec *json.Decoder, l *jsonlog.JSONLog) (*logger.Message, error) {
    42  	l.Reset()
    43  	if err := dec.Decode(l); err != nil {
    44  		return nil, err
    45  	}
    46  
    47  	var attrs []backend.LogAttr
    48  	if len(l.Attrs) != 0 {
    49  		attrs = make([]backend.LogAttr, 0, len(l.Attrs))
    50  		for k, v := range l.Attrs {
    51  			attrs = append(attrs, backend.LogAttr{Key: k, Value: v})
    52  		}
    53  	}
    54  	msg := &logger.Message{
    55  		Source:    l.Stream,
    56  		Timestamp: l.Created,
    57  		Line:      []byte(l.Log),
    58  		Attrs:     attrs,
    59  	}
    60  	return msg, nil
    61  }
    62  
    63  type decoder struct {
    64  	rdr io.Reader
    65  	dec *json.Decoder
    66  	jl  *jsonlog.JSONLog
    67  }
    68  
    69  func (d *decoder) Reset(rdr io.Reader) {
    70  	d.rdr = rdr
    71  	d.dec = nil
    72  	if d.jl != nil {
    73  		d.jl.Reset()
    74  	}
    75  }
    76  
    77  func (d *decoder) Close() {
    78  	d.dec = nil
    79  	d.rdr = nil
    80  	d.jl = nil
    81  }
    82  
    83  func (d *decoder) Decode() (msg *logger.Message, err error) {
    84  	if d.dec == nil {
    85  		d.dec = json.NewDecoder(d.rdr)
    86  	}
    87  	if d.jl == nil {
    88  		d.jl = &jsonlog.JSONLog{}
    89  	}
    90  	for retries := 0; retries < maxJSONDecodeRetry; retries++ {
    91  		msg, err = decodeLogLine(d.dec, d.jl)
    92  		if err == nil || err == io.EOF {
    93  			break
    94  		}
    95  
    96  		logrus.WithError(err).WithField("retries", retries).Warn("got error while decoding json")
    97  		// try again, could be due to a an incomplete json object as we read
    98  		if _, ok := err.(*json.SyntaxError); ok {
    99  			d.dec = json.NewDecoder(d.rdr)
   100  			continue
   101  		}
   102  
   103  		// io.ErrUnexpectedEOF is returned from json.Decoder when there is
   104  		// remaining data in the parser's buffer while an io.EOF occurs.
   105  		// If the json logger writes a partial json log entry to the disk
   106  		// while at the same time the decoder tries to decode it, the race condition happens.
   107  		if err == io.ErrUnexpectedEOF {
   108  			d.rdr = io.MultiReader(d.dec.Buffered(), d.rdr)
   109  			d.dec = json.NewDecoder(d.rdr)
   110  			continue
   111  		}
   112  	}
   113  	return msg, err
   114  }
   115  
   116  // decodeFunc is used to create a decoder for the log file reader
   117  func decodeFunc(rdr io.Reader) loggerutils.Decoder {
   118  	return &decoder{
   119  		rdr: rdr,
   120  		dec: nil,
   121  		jl:  nil,
   122  	}
   123  }
   124  
   125  func getTailReader(ctx context.Context, r loggerutils.SizeReaderAt, req int) (io.Reader, int, error) {
   126  	return tailfile.NewTailReader(ctx, r, req)
   127  }