github.com/influxdata/influxdb/v2@v2.7.6/telegraf/plugins/inputs/logparser.go (about)

     1  package inputs
     2  
     3  import (
     4  	"errors"
     5  	"fmt"
     6  	"strconv"
     7  	"strings"
     8  )
     9  
    10  // LogParserPlugin is based on telegraf LogParserPlugin.
    11  type LogParserPlugin struct {
    12  	baseInput
    13  	Files []string `json:"files"`
    14  }
    15  
    16  // PluginName is based on telegraf plugin name.
    17  func (l *LogParserPlugin) PluginName() string {
    18  	return "logparser"
    19  }
    20  
    21  // TOML encodes to toml string
    22  func (l *LogParserPlugin) TOML() string {
    23  	s := make([]string, len(l.Files))
    24  	for k, v := range l.Files {
    25  		s[k] = strconv.Quote(v)
    26  	}
    27  	return fmt.Sprintf(`[[inputs.%s]]
    28    ## Log files to parse.
    29    ## These accept standard unix glob matching rules, but with the addition of
    30    ## ** as a "super asterisk". ie:
    31    ##   /var/log/**.log     -> recursively find all .log files in /var/log
    32    ##   /var/log/*/*.log    -> find all .log files with a parent dir in /var/log
    33    ##   /var/log/apache.log -> only tail the apache log file
    34    files = [%s]
    35  
    36    ## Read files that currently exist from the beginning. Files that are created
    37    ## while telegraf is running (and that match the "files" globs) will always
    38    ## be read from the beginning.
    39    from_beginning = false
    40  
    41    ## Method used to watch for file updates.  Can be either "inotify" or "poll".
    42    # watch_method = "inotify"
    43  
    44    ## Parse logstash-style "grok" patterns:
    45    [inputs.logparser.grok]
    46      ## This is a list of patterns to check the given log file(s) for.
    47      ## Note that adding patterns here increases processing time. The most
    48      ## efficient configuration is to have one pattern per logparser.
    49      ## Other common built-in patterns are:
    50      ##   %%{COMMON_LOG_FORMAT}   (plain apache & nginx access logs)
    51      ##   %%{COMBINED_LOG_FORMAT} (access logs + referrer & agent)
    52      patterns = ["%%{COMBINED_LOG_FORMAT}"]
    53  
    54      ## Name of the outputted measurement name.
    55      measurement = "apache_access_log"
    56  
    57      ## Full path(s) to custom pattern files.
    58      custom_pattern_files = []
    59  
    60      ## Custom patterns can also be defined here. Put one pattern per line.
    61      custom_patterns = '''
    62      '''
    63  
    64      ## Timezone allows you to provide an override for timestamps that
    65      ## don't already include an offset
    66      ## e.g. 04/06/2016 12:41:45 data one two 5.43µs
    67      ##
    68      ## Default: "" which renders UTC
    69      ## Options are as follows:
    70      ##   1. Local             -- interpret based on machine localtime
    71      ##   2. "Canada/Eastern"  -- Unix TZ values like those found in https://en.wikipedia.org/wiki/List_of_tz_database_time_zones
    72      ##   3. UTC               -- or blank/unspecified, will return timestamp in UTC
    73      # timezone = "Canada/Eastern"
    74  
    75        ## When set to "disable", timestamp will not incremented if there is a
    76        ## duplicate.
    77      # unique_timestamp = "auto"
    78  `, l.PluginName(), strings.Join(s, ", "))
    79  }
    80  
    81  // UnmarshalTOML decodes the parsed data to the object
    82  func (l *LogParserPlugin) UnmarshalTOML(data interface{}) error {
    83  	dataOK, ok := data.(map[string]interface{})
    84  	if !ok {
    85  		return errors.New("bad files for logparser input plugin")
    86  	}
    87  	files, ok := dataOK["files"].([]interface{})
    88  	if !ok {
    89  		return errors.New("files is not an array for logparser input plugin")
    90  	}
    91  	for _, fi := range files {
    92  		l.Files = append(l.Files, fi.(string))
    93  	}
    94  	return nil
    95  }