github.com/supabase/cli@v1.168.1/internal/start/templates/vector.yaml (about) 1 api: 2 enabled: true 3 address: "0.0.0.0:9001" 4 5 sources: 6 docker_syslog: 7 type: "syslog" 8 address: "0.0.0.0:9000" 9 mode: "tcp" 10 path: "/tmp/socket" 11 12 transforms: 13 project_logs: 14 type: remap 15 inputs: 16 - docker_syslog 17 source: |- 18 .project = "default" 19 .event_message = del(.message) 20 del(.procid) 21 del(.source_id) 22 del(.source_type) 23 del(.facility) 24 del(.host) 25 del(.id) 26 router: 27 type: route 28 inputs: 29 - project_logs 30 route: 31 kong: '.appname == "{{ .KongId }}"' 32 auth: '.appname == "{{ .GotrueId }}"' 33 rest: '.appname == "{{ .RestId }}"' 34 realtime: '.appname == "{{ .RealtimeId }}"' 35 storage: '.appname == "{{ .StorageId }}"' 36 functions: '.appname == "{{ .EdgeRuntimeId }}"' 37 db: '.appname == "{{ .DbId }}"' 38 # Kong logs only include api requests 39 kong_logs: 40 type: remap 41 inputs: 42 - router.kong 43 source: |- 44 req, err = parse_nginx_log(.event_message, "combined") 45 if err == null { 46 .timestamp = req.timestamp 47 .metadata.request.headers.referer = req.referer 48 .metadata.request.headers.user_agent = req.agent 49 .metadata.request.headers.cf_connecting_ip = req.client 50 .metadata.request.method = req.method 51 .metadata.request.path = req.path 52 .metadata.request.protocol = req.protocol 53 .metadata.response.status_code = req.status 54 } 55 # TODO: create a separate page and filter for kong error logs 56 kong_err: 57 type: remap 58 inputs: 59 - router.kong 60 source: |- 61 .metadata.request.method = "GET" 62 .metadata.response.status_code = 200 63 parsed, err = parse_nginx_log(.event_message, "error") 64 if err == null { 65 .timestamp = parsed.timestamp 66 .severity = parsed.severity 67 .metadata.request.host = parsed.host 68 .metadata.request.headers.cf_connecting_ip = parsed.client 69 url, err = split(parsed.request, " ") 70 if err == null { 71 .metadata.request.method = url[0] 72 .metadata.request.path = url[1] 73 .metadata.request.protocol = url[2] 74 } 75 } 76 # Gotrue logs are structured json strings which frontend parses directly. But we keep metadata for consistency. 77 auth_logs: 78 type: remap 79 inputs: 80 - router.auth 81 source: |- 82 parsed, err = parse_json(.event_message) 83 if err == null { 84 .metadata.timestamp = parsed.time 85 .metadata = merge!(.metadata, parsed) 86 } 87 # PostgREST logs are structured so we separate timestamp from message using regex 88 rest_logs: 89 type: remap 90 inputs: 91 - router.rest 92 source: |- 93 parsed, err = parse_regex(.event_message, r'^(?P<time>.*): (?P<msg>.*)$') 94 if err == null { 95 .event_message = parsed.msg 96 .timestamp = to_timestamp!(parsed.time) 97 .metadata.host = .project 98 } 99 # Realtime logs are structured so we parse the severity level using regex (ignore time because it has no date) 100 realtime_logs: 101 type: remap 102 inputs: 103 - router.realtime 104 source: |- 105 .metadata.project = del(.project) 106 .metadata.external_id = .metadata.project 107 parsed, err = parse_regex(.event_message, r'^(?P<time>\d+:\d+:\d+\.\d+) \[(?P<level>\w+)\] (?P<msg>.*)$') 108 if err == null { 109 .event_message = parsed.msg 110 .metadata.level = parsed.level 111 } 112 # Storage logs may contain json objects so we parse them for completeness 113 storage_logs: 114 type: remap 115 inputs: 116 - router.storage 117 source: |- 118 .metadata.project = del(.project) 119 .metadata.tenantId = .metadata.project 120 parsed, err = parse_json(.event_message) 121 if err == null { 122 .event_message = parsed.msg 123 .metadata.level = parsed.level 124 .metadata.timestamp = parsed.time 125 .metadata.context[0].host = parsed.hostname 126 .metadata.context[0].pid = parsed.pid 127 } 128 # Postgres logs some messages to stderr which we map to warning severity level 129 # TODO: parse raw postgres logs via regex 130 db_logs: 131 type: remap 132 inputs: 133 - router.db 134 source: |- 135 .metadata.host = "db-default" 136 .metadata.parsed.timestamp = .timestamp 137 .metadata.parsed.error_severity = replace!(.severity, r'^err$', "warning") 138 if .metadata.parsed.error_severity == "info" { 139 .metadata.parsed.error_severity = "log" 140 } 141 .metadata.parsed.error_severity = upcase(.metadata.parsed.error_severity) 142 sinks: 143 logflare_auth: 144 type: "http" 145 inputs: 146 - auth_logs 147 encoding: 148 codec: "json" 149 method: "post" 150 request: 151 retry_max_duration_secs: 10 152 uri: "http://{{ .LogflareId }}:4000/api/logs?source_name=gotrue.logs.prod&api_key={{ .ApiKey }}" 153 logflare_realtime: 154 type: "http" 155 inputs: 156 - realtime_logs 157 encoding: 158 codec: "json" 159 method: "post" 160 request: 161 retry_max_duration_secs: 10 162 uri: "http://{{ .LogflareId }}:4000/api/logs?source_name=realtime.logs.prod&api_key={{ .ApiKey }}" 163 logflare_rest: 164 type: "http" 165 inputs: 166 - rest_logs 167 encoding: 168 codec: "json" 169 method: "post" 170 request: 171 retry_max_duration_secs: 10 172 uri: "http://{{ .LogflareId }}:4000/api/logs?source_name=postgREST.logs.prod&api_key={{ .ApiKey }}" 173 logflare_db: 174 type: "http" 175 inputs: 176 - db_logs 177 encoding: 178 codec: "json" 179 method: "post" 180 request: 181 retry_max_duration_secs: 10 182 # We must route the sink through kong because ingesting logs before logflare is fully initialized will 183 # lead to broken queries from studio. This works by the assumption that containers are started in the 184 # following order: vector > db > logflare > kong 185 uri: "http://{{ .KongId }}:8000/analytics/v1/api/logs?source_name=postgres.logs&api_key={{ .ApiKey }}" 186 logflare_storage: 187 type: "http" 188 inputs: 189 - storage_logs 190 encoding: 191 codec: "json" 192 method: "post" 193 request: 194 retry_max_duration_secs: 10 195 uri: "http://{{ .LogflareId }}:4000/api/logs?source_name=storage.logs.prod.2&api_key={{ .ApiKey }}" 196 logflare_functions: 197 type: "http" 198 inputs: 199 - router.functions 200 encoding: 201 codec: "json" 202 method: "post" 203 request: 204 retry_max_duration_secs: 10 205 uri: "http://{{ .LogflareId }}:4000/api/logs?source_name=deno-relay-logs&api_key={{ .ApiKey }}" 206 logflare_kong: 207 type: "http" 208 inputs: 209 - kong_logs 210 encoding: 211 codec: "json" 212 method: "post" 213 request: 214 retry_max_duration_secs: 10 215 uri: "http://{{ .LogflareId }}:4000/api/logs?source_name=cloudflare.logs.prod&api_key={{ .ApiKey }}"