github.com/mutagen-io/mutagen@v0.18.0-rc1/pkg/stream/line_processor.go (about)

     1  package stream
     2  
     3  import (
     4  	"bytes"
     5  	"errors"
     6  )
     7  
     8  const (
     9  	// defaultLineProcessorMaximumBufferSize is the default maximum buffer size
    10  	// for LineProcessor.
    11  	defaultLineProcessorMaximumBufferSize = 64 * 1024
    12  )
    13  
    14  // ErrMaximumBufferSizeExceeded is returned when a write would exceed the
    15  // maximum internal buffer size for a writer.
    16  var ErrMaximumBufferSizeExceeded = errors.New("maximum buffer size exceed")
    17  
    18  // trimCarriageReturn trims any single trailing carriage return from the end of
    19  // a byte slice.
    20  func trimCarriageReturn(buffer []byte) []byte {
    21  	if len(buffer) > 0 && buffer[len(buffer)-1] == '\r' {
    22  		return buffer[:len(buffer)-1]
    23  	}
    24  	return buffer
    25  }
    26  
    27  // LineProcessor is an io.Writer that splits its input stream into lines and
    28  // writes those lines to a callback function. Line splits are performed on any
    29  // instance of '\n' or '\r\n', with the split character(s) removed from the
    30  // callback value.
    31  type LineProcessor struct {
    32  	// Callback is the line processing callback.
    33  	Callback func(string)
    34  	// MaximumBufferSize is the maximum allowed internal buffer size. If writes
    35  	// to the writer exceed this size without incorporating a newline, then an
    36  	// error will be raised. A value of 0 causes the writer to use a reasonable
    37  	// default. A negative value indicates no limit.
    38  	MaximumBufferSize int
    39  	// buffer is any incomplete line fragment left over from a previous write.
    40  	buffer []byte
    41  }
    42  
    43  // Write implements io.Writer.Write.
    44  func (p *LineProcessor) Write(data []byte) (int, error) {
    45  	// Ensure that storing the data won't exceed buffer size limits.
    46  	// TODO: We could truncate data here if any capacity remains. A partial
    47  	// fragment could (in theory) contain a newline that would allow the buffer
    48  	// to be cleared out, though it's hard to imagine such an optimization is
    49  	// critical given the relatively large default maximum buffer size and the
    50  	// typical line size of most newline-delimited data.
    51  	if p.MaximumBufferSize == 0 && len(p.buffer)+len(data) > defaultLineProcessorMaximumBufferSize {
    52  		return 0, ErrMaximumBufferSizeExceeded
    53  	} else if p.MaximumBufferSize > 0 && len(p.buffer)+len(data) > p.MaximumBufferSize {
    54  		return 0, ErrMaximumBufferSizeExceeded
    55  	}
    56  
    57  	// Append the data to our internal buffer.
    58  	p.buffer = append(p.buffer, data...)
    59  
    60  	// Process all lines in the buffer and track the number of processed bytes.
    61  	var processed int
    62  	remaining := p.buffer
    63  	for {
    64  		// Find the index of the next newline character.
    65  		index := bytes.IndexByte(remaining, '\n')
    66  		if index == -1 {
    67  			break
    68  		}
    69  
    70  		// Process the line.
    71  		p.Callback(string(trimCarriageReturn(remaining[:index])))
    72  
    73  		// Update the number of bytes that we've processed.
    74  		processed += index + 1
    75  
    76  		// Update the remaining slice.
    77  		remaining = remaining[index+1:]
    78  	}
    79  
    80  	// If we managed to process bytes, then truncate our internal buffer.
    81  	if processed > 0 {
    82  		// Compute the number of leftover bytes.
    83  		leftover := len(p.buffer) - processed
    84  
    85  		// If there are leftover bytes, then shift them to the front of the
    86  		// buffer.
    87  		if leftover > 0 {
    88  			copy(p.buffer[:leftover], p.buffer[processed:])
    89  		}
    90  
    91  		// Truncate the buffer.
    92  		p.buffer = p.buffer[:leftover]
    93  	}
    94  
    95  	// Done.
    96  	return len(data), nil
    97  }