github.com/influxdata/influxdb/v2@v2.7.6/influxql/v1tests/query_test.go (about) 1 package v1tests 2 3 import ( 4 "context" 5 "fmt" 6 "math/rand" 7 "net/url" 8 "strings" 9 "testing" 10 "time" 11 12 "github.com/influxdata/influxdb/v2" 13 icontext "github.com/influxdata/influxdb/v2/context" 14 "github.com/influxdata/influxdb/v2/tests" 15 "github.com/stretchr/testify/require" 16 ) 17 18 // Ensure parameterized queries can be executed 19 func TestServer_Query_Parameterized(t *testing.T) { 20 t.Parallel() 21 s := OpenServer(t) 22 defer s.Close() 23 24 writes := []string{ 25 fmt.Sprintf(`cpu,host=foo value=1.0 %d`, mustParseTime(time.RFC3339Nano, "2000-01-01T01:00:00Z").UnixNano()), 26 fmt.Sprintf(`cpu,host=bar value=1.0 %d`, mustParseTime(time.RFC3339Nano, "2000-01-02T01:00:00Z").UnixNano()), 27 } 28 29 test := NewTest("db0", "rp0") 30 test.writes = Writes{ 31 &Write{data: strings.Join(writes, "\n")}, 32 } 33 34 minTime := mustParseTime(time.RFC3339Nano, "2000-01-01T01:00:00Z").UnixNano() 35 maxTime := mustParseTime(time.RFC3339Nano, "2000-01-01T02:00:00Z").UnixNano() 36 37 test.addQueries([]*Query{ 38 { 39 name: "parameterized time", 40 params: url.Values{"db": []string{"db0"}, "params": []string{fmt.Sprintf(`{"0": %d, "1": %d}`, minTime, maxTime)}}, 41 command: `SELECT value FROM cpu WHERE time >= $0 AND time < $1`, 42 exp: `{"results":[{"statement_id":0,"series":[{"name":"cpu","columns":["time","value"],"values":[["2000-01-01T01:00:00Z",1]]}]}]}`, 43 }, 44 { 45 name: "parameterized tag", 46 params: url.Values{"db": []string{"db0"}, "params": []string{`{"0": "foo"}`}}, 47 command: `SELECT value FROM cpu WHERE host = $0`, 48 exp: `{"results":[{"statement_id":0,"series":[{"name":"cpu","columns":["time","value"],"values":[["2000-01-01T01:00:00Z",1]]}]}]}`, 49 }, 50 }...) 51 52 ctx := context.Background() 53 test.Run(ctx, t, s) 54 } 55 56 // Ensure queries are properly chunked 57 func TestServer_Query_Chunked(t *testing.T) { 58 t.Parallel() 59 s := OpenServer(t) 60 defer s.Close() 61 62 writes := []string{ 63 fmt.Sprintf(`cpu,host=foo value=1.0 %d`, mustParseTime(time.RFC3339Nano, "2000-01-01T01:00:00Z").UnixNano()), 64 fmt.Sprintf(`cpu,host=bar value=1.0 %d`, mustParseTime(time.RFC3339Nano, "2000-01-02T01:00:00Z").UnixNano()), 65 } 66 67 test := NewTest("db0", "rp0") 68 test.writes = Writes{ 69 &Write{data: strings.Join(writes, "\n")}, 70 } 71 72 test.addQueries([]*Query{ 73 { 74 name: "query is chunked", 75 params: url.Values{"db": []string{"db0"}, "chunked": []string{"true"}, "chunk_size": []string{"1"}}, 76 command: `SELECT value FROM cpu`, 77 exp: `{"results":[{"statement_id":0,"series":[{"name":"cpu","columns":["time","value"],"values":[["2000-01-01T01:00:00Z",1]],"partial":true}],"partial":true}]} 78 {"results":[{"statement_id":0,"series":[{"name":"cpu","columns":["time","value"],"values":[["2000-01-02T01:00:00Z",1]]}]}]}`, 79 }, 80 { 81 name: "query is not chunked", 82 params: url.Values{"db": []string{"db0"}, "chunked": []string{"false"}, "chunk_size": []string{"1"}}, 83 command: `SELECT value FROM cpu`, 84 exp: `{"results":[{"statement_id":0,"series":[{"name":"cpu","columns":["time","value"],"values":[["2000-01-01T01:00:00Z",1],["2000-01-02T01:00:00Z",1]]}]}]}`, 85 }, 86 }...) 87 88 ctx := context.Background() 89 test.Run(ctx, t, s) 90 } 91 92 // Ensure a more complex group-by is correct 93 func TestServer_Query_ComplexGroupby(t *testing.T) { 94 t.Parallel() 95 s := OpenServer(t) 96 defer s.Close() 97 98 r := rand.New(rand.NewSource(1000)) 99 abc := []string{"a", "b", "c"} 100 startDate := time.Date(2021, 5, 10, 0, 0, 0, 0, time.UTC) 101 endDate := time.Date(2021, 5, 15, 0, 0, 0, 0, time.UTC) 102 writes := make([]string, 0) 103 for date := startDate; date.Before(endDate); date = date.Add(1 * time.Hour) { 104 line := fmt.Sprintf(`m0,tenant_id=t%s,env=e%s total_count=%d %d`, 105 abc[r.Intn(3)], abc[r.Intn(3)], 10+r.Intn(5), date.UnixNano()) 106 writes = append(writes, line) 107 } 108 109 test := NewTest("db0", "rp0") 110 test.writes = Writes{ 111 &Write{data: strings.Join(writes, "\n")}, 112 } 113 114 minTime := startDate.UnixNano() 115 maxTime := endDate.UnixNano() 116 117 test.addQueries([]*Query{ 118 { 119 name: "parameterized time", 120 params: url.Values{"db": []string{"db0"}, "params": []string{fmt.Sprintf(`{"0": %d, "1": %d}`, minTime, maxTime)}}, 121 command: `SELECT SUM(ncount) as scount FROM (SELECT NON_NEGATIVE_DIFFERENCE(total_count) as ncount FROM m0 WHERE time >= $0 AND time <= $1 AND tenant_id='tb' GROUP BY env) WHERE time >= $0 AND time <= $1 GROUP BY time(1d)`, 122 exp: `{"results":[{"statement_id":0,"series":[{"name":"m0","columns":["time","scount"],"values":[["2021-05-10T00:00:00Z",10],["2021-05-11T00:00:00Z",5],["2021-05-12T00:00:00Z",3],["2021-05-13T00:00:00Z",7],["2021-05-14T00:00:00Z",4],["2021-05-15T00:00:00Z",null]]}]}]}`, 123 }, 124 }...) 125 126 ctx := context.Background() 127 test.Run(ctx, t, s) 128 } 129 130 func TestServer_Query_ShowDatabases(t *testing.T) { 131 t.Parallel() 132 s := OpenServer(t) 133 defer s.MustClose() 134 135 ctx := context.Background() 136 ctx = icontext.SetAuthorizer(ctx, tests.MakeAuthorization(s.DefaultOrgID, s.DefaultUserID, influxdb.OperPermissions())) 137 138 // create some buckets and mappings 139 buckets := []struct { 140 name string 141 db string 142 rp string 143 }{ 144 {"my-bucket", "my-bucket", "autogen"}, 145 {"telegraf/autogen", "telegraf", "autogen"}, 146 {"telegraf/1_week", "telegraf", "1_week"}, 147 {"telegraf/1_month", "telegraf", "1_month"}, 148 } 149 150 for _, bi := range buckets { 151 b := influxdb.Bucket{ 152 OrgID: s.DefaultOrgID, 153 Type: influxdb.BucketTypeUser, 154 Name: bi.name, 155 RetentionPeriod: 0, 156 } 157 err := s.Launcher. 158 Launcher. 159 BucketService(). 160 CreateBucket(ctx, &b) 161 require.NoError(t, err) 162 163 err = s.Launcher. 164 DBRPMappingService(). 165 Create(ctx, &influxdb.DBRPMapping{ 166 Database: bi.db, 167 RetentionPolicy: bi.rp, 168 Default: true, 169 OrganizationID: s.DefaultOrgID, 170 BucketID: b.ID, 171 }) 172 require.NoError(t, err) 173 } 174 175 test := NewEmptyTest() 176 test.addQueries( 177 &Query{ 178 name: "show databases does not return duplicates", 179 command: "SHOW DATABASES", 180 exp: `{"results":[{"statement_id":0,"series":[{"name":"databases","columns":["name"],"values":[["my-bucket"],["telegraf"],["_monitoring"],["_tasks"],["db"]]}]}]}`, 181 }, 182 ) 183 184 test.Run(context.Background(), t, s) 185 } 186 187 func TestServer_Query_Subquery(t *testing.T) { 188 writes := []string{ 189 fmt.Sprintf(`request,region=west,status=200 duration_ms=100 %d`, mustParseTime(time.RFC3339Nano, "2004-04-09T01:00:00Z").UnixNano()), 190 fmt.Sprintf(`request,region=west,status=200 duration_ms=100 %d`, mustParseTime(time.RFC3339Nano, "2004-04-09T01:00:10Z").UnixNano()), 191 fmt.Sprintf(`request,region=west,status=200 duration_ms=100 %d`, mustParseTime(time.RFC3339Nano, "2004-04-09T01:00:20Z").UnixNano()), 192 fmt.Sprintf(`request,region=west,status=204 duration_ms=100 %d`, mustParseTime(time.RFC3339Nano, "2004-04-09T01:00:30Z").UnixNano()), 193 fmt.Sprintf(`request,region=west,status=204 duration_ms=100 %d`, mustParseTime(time.RFC3339Nano, "2004-04-09T01:00:40Z").UnixNano()), 194 fmt.Sprintf(`request,region=west,status=500 duration_ms=200 %d`, mustParseTime(time.RFC3339Nano, "2004-04-09T01:00:00Z").UnixNano()), 195 fmt.Sprintf(`request,region=west,status=500 duration_ms=200 %d`, mustParseTime(time.RFC3339Nano, "2004-04-09T01:00:10Z").UnixNano()), 196 fmt.Sprintf(`request,region=west,status=500 duration_ms=200 %d`, mustParseTime(time.RFC3339Nano, "2004-04-09T01:00:20Z").UnixNano()), 197 fmt.Sprintf(`request,region=west,status=504 duration_ms=200 %d`, mustParseTime(time.RFC3339Nano, "2004-04-09T01:00:30Z").UnixNano()), 198 fmt.Sprintf(`request,region=west,status=504 duration_ms=200 %d`, mustParseTime(time.RFC3339Nano, "2004-04-09T01:00:40Z").UnixNano()), 199 } 200 201 ctx := context.Background() 202 s := NewTestServer(ctx, t, "db0", "rp0", writes...) 203 204 cases := []Query{ 205 { 206 // This test verifies that data cached from the storage layer 207 // is complete in order to satisfy the two subqueries. 208 name: "different tag predicates for same field", 209 params: url.Values{"db": []string{"db0"}}, 210 command: ` 211 SELECT SUM(success) as sum_success, SUM(requests) as sum_fail 212 FROM ( 213 SELECT duration_ms as success 214 FROM request 215 WHERE status !~ /^5.*$/ AND region = 'west' 216 ), ( 217 SELECT duration_ms as requests 218 FROM request 219 WHERE status =~ /^5.*$/ AND region = 'west' 220 ) 221 `, 222 exp: `{"results":[{"statement_id":0,"series":[{"name":"request","columns":["time","sum_success","sum_fail"],"values":[["1970-01-01T00:00:00Z",500,1000]]}]}]}`, 223 }, 224 { 225 name: "different time predicates for same field", 226 params: url.Values{"db": []string{"db0"}}, 227 command: ` 228 SELECT COUNT(r1) as r1, COUNT(r2) as r2 229 FROM ( 230 SELECT duration_ms as r1 231 FROM request 232 WHERE time >= '2004-04-09T01:00:00Z' AND time <= '2004-04-09T01:00:20Z' 233 ), ( 234 SELECT duration_ms as r2 235 FROM request 236 WHERE time >= '2004-04-09T01:00:10Z' AND time <= '2004-04-09T01:00:40Z' 237 ) 238 `, 239 exp: `{"results":[{"statement_id":0,"series":[{"name":"request","columns":["time","r1","r2"],"values":[["1970-01-01T00:00:00Z",6,8]]}]}]}`, 240 }, 241 { 242 name: "outer query with narrower time range than subqueries", 243 params: url.Values{"db": []string{"db0"}}, 244 command: ` 245 SELECT COUNT(r1) as r1, COUNT(r2) as r2 246 FROM ( 247 SELECT duration_ms as r1 248 FROM request 249 WHERE time >= '2004-04-09T01:00:00Z' AND time <= '2004-04-09T01:00:20Z' 250 ), ( 251 SELECT duration_ms as r2 252 FROM request 253 WHERE time >= '2004-04-09T01:00:10Z' AND time <= '2004-04-09T01:00:40Z' 254 ) 255 WHERE time >= '2004-04-09T01:00:20Z' AND time <= '2004-04-09T01:00:30Z' 256 `, 257 exp: `{"results":[{"statement_id":0,"series":[{"name":"request","columns":["time","r1","r2"],"values":[["2004-04-09T01:00:20Z",2,4]]}]}]}`, 258 }, 259 { 260 name: "outer query with narrower time range than subqueries using aggregates", 261 params: url.Values{"db": []string{"db0"}}, 262 command: ` 263 SELECT r1 as r1, r2 as r2 264 FROM ( 265 SELECT COUNT(duration_ms) as r1 266 FROM request 267 WHERE time >= '2004-04-09T01:00:00Z' AND time <= '2004-04-09T01:00:20Z' 268 ), ( 269 SELECT COUNT(duration_ms) as r2 270 FROM request 271 WHERE time >= '2004-04-09T01:00:10Z' AND time <= '2004-04-09T01:00:40Z' 272 ) 273 WHERE time >= '2004-04-09T01:00:20Z' AND time <= '2004-04-09T01:00:30Z' 274 `, 275 exp: `{"results":[{"statement_id":0,"series":[{"name":"request","columns":["time","r1","r2"],"values":[["2004-04-09T01:00:20Z",2,null],["2004-04-09T01:00:20Z",null,4]]}]}]}`, 276 }, 277 { 278 name: "outer query with no time range and subqueries using aggregates", 279 params: url.Values{"db": []string{"db0"}}, 280 command: ` 281 SELECT r1 as r1, r2 as r2 282 FROM ( 283 SELECT COUNT(duration_ms) as r1 284 FROM request 285 WHERE time >= '2004-04-09T01:00:00Z' AND time <= '2004-04-09T01:00:20Z' 286 ), ( 287 SELECT COUNT(duration_ms) as r2 288 FROM request 289 WHERE time >= '2004-04-09T01:00:10Z' AND time <= '2004-04-09T01:00:40Z' 290 ) 291 `, 292 exp: `{"results":[{"statement_id":0,"series":[{"name":"request","columns":["time","r1","r2"],"values":[["2004-04-09T01:00:00Z",6,null],["2004-04-09T01:00:10Z",null,8]]}]}]}`, 293 }, 294 { 295 name: "outer query with narrower time range than subqueries no aggregate", 296 params: url.Values{"db": []string{"db0"}}, 297 command: ` 298 SELECT r1 as r1, r2 as r2 299 FROM ( 300 SELECT duration_ms as r1 301 FROM request 302 WHERE time >= '2004-04-09T01:00:00Z' AND time <= '2004-04-09T01:00:20Z' 303 ), ( 304 SELECT duration_ms as r2 305 FROM request 306 WHERE time >= '2004-04-09T01:00:10Z' AND time <= '2004-04-09T01:00:40Z' 307 ) 308 WHERE time >= '2004-04-09T01:00:20Z' AND time <= '2004-04-09T01:00:30Z' 309 `, 310 exp: `{"results":[{"statement_id":0,"series":[{"name":"request","columns":["time","r1","r2"],"values":[["2004-04-09T01:00:20Z",100,null],["2004-04-09T01:00:20Z",null,100],["2004-04-09T01:00:20Z",200,null],["2004-04-09T01:00:20Z",null,200],["2004-04-09T01:00:30Z",null,200],["2004-04-09T01:00:30Z",null,100]]}]}]}`, 311 }, 312 { 313 name: "outer query with time range", 314 params: url.Values{"db": []string{"db0"}}, 315 command: ` 316 SELECT COUNT(r1) as r1, COUNT(r2) as r2 317 FROM ( 318 SELECT duration_ms as r1 319 FROM request 320 ), ( 321 SELECT duration_ms as r2 322 FROM request 323 ) 324 WHERE time >= '2004-04-09T01:00:20Z' AND time <= '2004-04-09T01:00:30Z' 325 `, 326 exp: `{"results":[{"statement_id":0,"series":[{"name":"request","columns":["time","r1","r2"],"values":[["2004-04-09T01:00:20Z",4,4]]}]}]}`, 327 }, 328 } 329 330 for _, q := range cases { 331 t.Run(q.name, func(t *testing.T) { 332 s.Execute(ctx, t, q) 333 }) 334 } 335 }