github.com/cockroachdb/cockroach@v20.2.0-alpha.1+incompatible/pkg/sql/sem/tree/datum_test.go (about) 1 // Copyright 2016 The Cockroach Authors. 2 // 3 // Use of this software is governed by the Business Source License 4 // included in the file licenses/BSL.txt. 5 // 6 // As of the Change Date specified in that file, in accordance with 7 // the Business Source License, use of this software will be governed 8 // by the Apache License, Version 2.0, included in the file 9 // licenses/APL.txt. 10 11 package tree_test 12 13 import ( 14 "context" 15 "fmt" 16 "math" 17 "strings" 18 "testing" 19 "time" 20 21 "github.com/cockroachdb/cockroach/pkg/settings/cluster" 22 "github.com/cockroachdb/cockroach/pkg/sql/parser" 23 "github.com/cockroachdb/cockroach/pkg/sql/sem/tree" 24 "github.com/cockroachdb/cockroach/pkg/sql/sessiondata" 25 "github.com/cockroachdb/cockroach/pkg/sql/types" 26 "github.com/cockroachdb/cockroach/pkg/util/leaktest" 27 "github.com/cockroachdb/cockroach/pkg/util/timeofday" 28 "github.com/stretchr/testify/assert" 29 "github.com/stretchr/testify/require" 30 ) 31 32 func prepareExpr(t *testing.T, datumExpr string) tree.TypedExpr { 33 expr, err := parser.ParseExpr(datumExpr) 34 if err != nil { 35 t.Fatalf("%s: %v", datumExpr, err) 36 } 37 // Type checking ensures constant folding is performed and type 38 // annotations have come into effect. 39 ctx := context.Background() 40 sema := tree.MakeSemaContext() 41 typedExpr, err := tree.TypeCheck(ctx, expr, &sema, types.Any) 42 if err != nil { 43 t.Fatalf("%s: %v", datumExpr, err) 44 } 45 // Normalization ensures that casts are processed. 46 evalCtx := tree.NewTestingEvalContext(cluster.MakeTestingClusterSettings()) 47 defer evalCtx.Stop(context.Background()) 48 typedExpr, err = evalCtx.NormalizeExpr(typedExpr) 49 if err != nil { 50 t.Fatalf("%s: %v", datumExpr, err) 51 } 52 return typedExpr 53 } 54 55 func TestDatumOrdering(t *testing.T) { 56 defer leaktest.AfterTest(t)() 57 const valIsMin = `min` 58 const valIsMax = `max` 59 const noPrev = `` 60 const noNext = `` 61 const noMin = `` 62 const noMax = `` 63 64 testData := []struct { 65 datumExpr string 66 prev string 67 next string 68 min string 69 max string 70 }{ 71 // Integers 72 {`1`, `0`, `2`, `-9223372036854775808`, `9223372036854775807`}, 73 {`-9223372036854775808`, valIsMin, `-9223372036854775807`, `-9223372036854775808`, `9223372036854775807`}, 74 {`9223372036854775807`, `9223372036854775806`, valIsMax, `-9223372036854775808`, `9223372036854775807`}, 75 76 // Boolean 77 {`true`, `false`, valIsMax, `false`, `true`}, 78 {`false`, valIsMin, `true`, `false`, `true`}, 79 80 // Floats 81 {`3.14:::float`, `3.1399999999999997`, `3.1400000000000006`, `NaN`, `+Inf`}, 82 {`9.223372036854776e+18:::float`, `9.223372036854775e+18`, `9.223372036854778e+18`, `NaN`, `+Inf`}, 83 {`'NaN':::float`, valIsMin, `-Inf`, `NaN`, `+Inf`}, 84 {`-1.7976931348623157e+308:::float`, `-Inf`, `-1.7976931348623155e+308`, `NaN`, `+Inf`}, 85 {`1.7976931348623157e+308:::float`, `1.7976931348623155e+308`, `+Inf`, `NaN`, `+Inf`}, 86 87 // Decimal 88 {`1.0:::decimal`, noPrev, noNext, `NaN`, `Infinity`}, 89 90 // Strings and byte arrays 91 {`'':::string`, valIsMin, `e'\x00'`, `''`, noMax}, 92 {`e'\x00'`, noPrev, `e'\x00\x00'`, `''`, noMax}, 93 {`'abc':::string`, noPrev, `e'abc\x00'`, `''`, noMax}, 94 {`'':::bytes`, valIsMin, `'\x00'`, `'\x'`, noMax}, 95 {`'abc':::bytes`, noPrev, `'\x61626300'`, `'\x'`, noMax}, 96 97 // Dates 98 {`'2006-01-02':::date`, `'2006-01-01'`, `'2006-01-03'`, `'-infinity'`, `'infinity'`}, 99 {`'0001-01-01':::date`, `'0001-12-31 BC'`, `'0001-01-02'`, `'-infinity'`, `'infinity'`}, 100 {`'4000-01-01 BC':::date`, `'4001-12-31 BC'`, `'4000-01-02 BC'`, `'-infinity'`, `'infinity'`}, 101 {`'2006-01-02 03:04:05.123123':::timestamp`, 102 `'2006-01-02 03:04:05.123122+00:00'`, `'2006-01-02 03:04:05.123124+00:00'`, `'-4713-11-24 00:00:00+00:00'`, `'294276-12-31 23:59:59.999999+00:00'`}, 103 104 // Geospatial types 105 {`'POINT(1.0 1.0)'::geometry`, noPrev, noNext, noMin, noMax}, 106 {`'POINT(1.0 1.0)'::geography`, noPrev, noNext, noMin, noMax}, 107 108 // Times 109 {`'00:00:00':::time`, valIsMin, `'00:00:00.000001'`, 110 `'00:00:00'`, `'24:00:00'`}, 111 {`'12:00:00':::time`, `'11:59:59.999999'`, `'12:00:00.000001'`, 112 `'00:00:00'`, `'24:00:00'`}, 113 {`'24:00:00':::time`, `'23:59:59.999999'`, valIsMax, `'00:00:00'`, `'24:00:00'`}, 114 115 // Intervals 116 {`'1 day':::interval`, noPrev, noNext, 117 `'-768614336404564650 years -8 mons -9223372036854775808 days -2562047:47:16.854775'`, 118 `'768614336404564650 years 7 mons 9223372036854775807 days 2562047:47:16.854775'`}, 119 // Max interval: we use Postgres syntax, because Go doesn't accept 120 // months/days and ISO8601 doesn't accept nanoseconds. 121 {`'9223372036854775807 months 9223372036854775807 days ` + 122 `2562047 hours 47 minutes 16 seconds 854775 us':::interval`, 123 noPrev, valIsMax, 124 `'-768614336404564650 years -8 mons -9223372036854775808 days -2562047:47:16.854775'`, 125 `'768614336404564650 years 7 mons 9223372036854775807 days 2562047:47:16.854775'`}, 126 {`'-9223372036854775808 months -9223372036854775808 days ` + 127 `-2562047 h -47 m -16 s -854775 us':::interval`, 128 valIsMin, noNext, 129 `'-768614336404564650 years -8 mons -9223372036854775808 days -2562047:47:16.854775'`, 130 `'768614336404564650 years 7 mons 9223372036854775807 days 2562047:47:16.854775'`}, 131 132 // UUIDs 133 {`'ffffffff-ffff-ffff-ffff-ffffffffffff'::uuid`, `'ffffffff-ffff-ffff-ffff-fffffffffffe'`, valIsMax, 134 `'00000000-0000-0000-0000-000000000000'`, `'ffffffff-ffff-ffff-ffff-ffffffffffff'`}, 135 {`'00000000-0000-0000-0000-000000000000'::uuid`, valIsMin, `'00000000-0000-0000-0000-000000000001'`, 136 `'00000000-0000-0000-0000-000000000000'`, `'ffffffff-ffff-ffff-ffff-ffffffffffff'`}, 137 {`'ffffffff-ffff-ffff-0000-000000000000'::uuid`, `'ffffffff-ffff-fffe-ffff-ffffffffffff'`, 138 `'ffffffff-ffff-ffff-0000-000000000001'`, `'00000000-0000-0000-0000-000000000000'`, 139 `'ffffffff-ffff-ffff-ffff-ffffffffffff'`}, 140 {`'00000000-0000-0000-ffff-ffffffffffff'::uuid`, `'00000000-0000-0000-ffff-fffffffffffe'`, 141 `'00000000-0000-0001-0000-000000000000'`, `'00000000-0000-0000-0000-000000000000'`, 142 `'ffffffff-ffff-ffff-ffff-ffffffffffff'`}, 143 144 // INETs 145 {`'0.0.0.0'::inet`, `'255.255.255.255/31'`, `'0.0.0.1'`, `'0.0.0.0/0'`, `'ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff'`}, 146 {`'0.0.0.0/0'::inet`, noPrev, `'0.0.0.1/0'`, `'0.0.0.0/0'`, `'ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff'`}, 147 {`'192.168.255.255'::inet`, `'192.168.255.254'`, `'192.169.0.0'`, `'0.0.0.0/0'`, `'ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff'`}, 148 {`'127.0.0.1'::inet`, `'127.0.0.0'`, `'127.0.0.2'`, `'0.0.0.0/0'`, `'ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff'`}, 149 {`'192.168.0.1/20'::inet`, `'192.168.0.0/20'`, `'192.168.0.2/20'`, `'0.0.0.0/0'`, `'ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff'`}, 150 {`'192.168.0.0/20'::inet`, `'192.167.255.255/20'`, `'192.168.0.1/20'`, `'0.0.0.0/0'`, `'ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff'`}, 151 {`'::ffff:1.2.3.4'::inet`, `'::ffff:1.2.3.3'`, `'::ffff:1.2.3.5'`, `'0.0.0.0/0'`, `'ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff'`}, 152 {`'::0'::inet`, `'ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/127'`, `'::1'`, `'0.0.0.0/0'`, `'ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff'`}, 153 {`'::0/0'::inet`, `'255.255.255.255'`, `'::1/0'`, `'0.0.0.0/0'`, `'ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff'`}, 154 {`'255.255.255.255/32'::inet`, `'255.255.255.254'`, `'::/0'`, `'0.0.0.0/0'`, `'ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff'`}, 155 {`'255.255.255.255/16'::inet`, `'255.255.255.254/16'`, `'0.0.0.0/17'`, `'0.0.0.0/0'`, `'ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff'`}, 156 {`'ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/128'::inet`, `'ffff:ffff:ffff:ffff:ffff:ffff:ffff:fffe'`, noNext, `'0.0.0.0/0'`, `'ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff'`}, 157 158 // NULL 159 {`NULL`, valIsMin, valIsMax, `NULL`, `NULL`}, 160 161 // Tuples 162 {`row()`, valIsMin, valIsMax, `()`, `()`}, 163 164 {`(NULL,)`, valIsMin, valIsMax, `(NULL,)`, `(NULL,)`}, 165 166 {`(true,)`, `(false,)`, valIsMax, `(false,)`, `(true,)`}, 167 {`(false,)`, valIsMin, `(true,)`, `(false,)`, `(true,)`}, 168 169 {`(true, false, false)`, `(false, true, true)`, `(true, false, true)`, 170 `(false, false, false)`, `(true, true, true)`}, 171 {`(false, true, true)`, `(false, true, false)`, `(true, NULL, NULL)`, 172 `(false, false, false)`, `(true, true, true)`}, 173 174 {`(0, 0)`, `(0, -1)`, `(0, 1)`, 175 `(-9223372036854775808, -9223372036854775808)`, 176 `(9223372036854775807, 9223372036854775807)`}, 177 178 {`(0, 9223372036854775807)`, 179 `(0, 9223372036854775806)`, `(1, NULL)`, 180 `(-9223372036854775808, -9223372036854775808)`, 181 `(9223372036854775807, 9223372036854775807)`}, 182 {`(9223372036854775807, 9223372036854775807)`, 183 `(9223372036854775807, 9223372036854775806)`, valIsMax, 184 `(-9223372036854775808, -9223372036854775808)`, 185 `(9223372036854775807, 9223372036854775807)`}, 186 187 {`(0, 0:::decimal)`, noPrev, noNext, 188 `(-9223372036854775808, NaN)`, 189 `(9223372036854775807, Infinity)`}, 190 {`(0:::decimal, 0)`, `(0, -1)`, `(0, 1)`, 191 `(NaN, -9223372036854775808)`, 192 `(Infinity, 9223372036854775807)`}, 193 194 {`(10, '')`, noPrev, `(10, e'\x00')`, 195 `(-9223372036854775808, '')`, noMax}, 196 {`(-9223372036854775808, '')`, valIsMin, `(-9223372036854775808, e'\x00')`, 197 `(-9223372036854775808, '')`, noMax}, 198 {`(-9223372036854775808, 'abc')`, noPrev, `(-9223372036854775808, e'abc\x00')`, 199 `(-9223372036854775808, '')`, noMax}, 200 201 {`(10, NULL)`, `(9, NULL)`, `(11, NULL)`, 202 `(-9223372036854775808, NULL)`, `(9223372036854775807, NULL)`}, 203 {`(NULL, 10)`, `(NULL, 9)`, `(NULL, 11)`, 204 `(NULL, -9223372036854775808)`, `(NULL, 9223372036854775807)`}, 205 206 {`(true, NULL, false)`, `(false, NULL, true)`, `(true, NULL, true)`, 207 `(false, NULL, false)`, `(true, NULL, true)`}, 208 {`(false, NULL, true)`, `(false, NULL, false)`, `(true, NULL, NULL)`, 209 `(false, NULL, false)`, `(true, NULL, true)`}, 210 211 {`((true,), (false,))`, `((false,), (true,))`, `((true,), (true,))`, 212 `((false,), (false,))`, `((true,), (true,))`}, 213 {`((false,), (true,))`, `((false,), (false,))`, `((true,), NULL)`, 214 `((false,), (false,))`, `((true,), (true,))`}, 215 216 // Arrays 217 218 {`'{}'::INT[]`, valIsMin, `ARRAY[NULL]`, `ARRAY[]`, noMax}, 219 220 {`array[NULL]`, noPrev, `ARRAY[NULL,NULL]`, `ARRAY[]`, noMax}, 221 {`array[true]`, noPrev, `ARRAY[true,NULL]`, `ARRAY[]`, noMax}, 222 223 // Mixed tuple/array datums. 224 {`(ARRAY[true], (true,))`, `(ARRAY[true], (false,))`, `(ARRAY[true,NULL], NULL)`, 225 `(ARRAY[], (false,))`, noMax}, 226 {`((false,), ARRAY[true])`, noPrev, `((false,), ARRAY[true,NULL])`, 227 `((false,), ARRAY[])`, noMax}, 228 } 229 ctx := tree.NewTestingEvalContext(cluster.MakeTestingClusterSettings()) 230 for _, td := range testData { 231 expr := prepareExpr(t, td.datumExpr) 232 233 d := expr.(tree.Datum) 234 prevVal, hasPrev := d.Prev(ctx) 235 nextVal, hasNext := d.Next(ctx) 236 if td.prev == noPrev { 237 if hasPrev { 238 if !d.IsMin(ctx) { 239 t.Errorf("%s: value should not have a prev, yet hasPrev true and IsMin() false (expected (!hasPrev || IsMin()))", td.datumExpr) 240 } 241 } 242 } else { 243 if !hasPrev && td.prev != valIsMin { 244 t.Errorf("%s: hasPrev: got false, expected true", td.datumExpr) 245 continue 246 } 247 isMin := d.IsMin(ctx) 248 if isMin != (td.prev == valIsMin) { 249 t.Errorf("%s: IsMin() %v, expected %v", td.datumExpr, isMin, (td.prev == valIsMin)) 250 continue 251 } 252 if !isMin { 253 dPrev := prevVal.String() 254 if dPrev != td.prev { 255 t.Errorf("%s: Prev(): got %s, expected %s", td.datumExpr, dPrev, td.prev) 256 } 257 } 258 } 259 if td.next == noNext { 260 if hasNext { 261 if !d.IsMax(ctx) { 262 t.Errorf("%s: value should not have a next, yet hasNext true and IsMax() false (expected (!hasNext || IsMax()))", td.datumExpr) 263 } 264 } 265 } else { 266 if !hasNext && td.next != valIsMax { 267 t.Errorf("%s: HasNext(): got false, expected true", td.datumExpr) 268 continue 269 } 270 isMax := d.IsMax(ctx) 271 if isMax != (td.next == valIsMax) { 272 t.Errorf("%s: IsMax() %v, expected %v", td.datumExpr, isMax, (td.next == valIsMax)) 273 continue 274 } 275 if !isMax { 276 dNext := nextVal.String() 277 if dNext != td.next { 278 t.Errorf("%s: Next(): got %s, expected %s", td.datumExpr, dNext, td.next) 279 } 280 } 281 } 282 283 minVal, hasMin := d.Min(ctx) 284 maxVal, hasMax := d.Max(ctx) 285 286 if td.min == noMin { 287 if hasMin { 288 t.Errorf("%s: hasMin true, expected false", td.datumExpr) 289 } 290 } else { 291 dMin := minVal.String() 292 if dMin != td.min { 293 t.Errorf("%s: min(): got %s, expected %s", td.datumExpr, dMin, td.min) 294 } 295 } 296 if td.max == noMax { 297 if hasMax { 298 t.Errorf("%s: hasMax true, expected false", td.datumExpr) 299 } 300 } else { 301 dMax := maxVal.String() 302 if dMax != td.max { 303 t.Errorf("%s: max(): got %s, expected %s", td.datumExpr, dMax, td.max) 304 } 305 } 306 } 307 } 308 309 func TestDFloatCompare(t *testing.T) { 310 defer leaktest.AfterTest(t)() 311 values := []tree.Datum{tree.DNull} 312 for _, x := range []float64{math.NaN(), math.Inf(-1), -1, 0, 1, math.Inf(1)} { 313 values = append(values, tree.NewDFloat(tree.DFloat(x))) 314 } 315 for i, x := range values { 316 for j, y := range values { 317 expected := 0 318 if i < j { 319 expected = -1 320 } else if i > j { 321 expected = 1 322 } 323 evalCtx := tree.NewTestingEvalContext(cluster.MakeTestingClusterSettings()) 324 defer evalCtx.Stop(context.Background()) 325 got := x.Compare(evalCtx, y) 326 if got != expected { 327 t.Errorf("comparing DFloats %s and %s: expected %d, got %d", x, y, expected, got) 328 } 329 } 330 } 331 } 332 333 // TestParseDIntervalWithTypeMetadata tests that the additional features available 334 // to tree.ParseDIntervalWithTypeMetadata beyond those in tree.ParseDInterval behave as expected. 335 func TestParseDIntervalWithTypeMetadata(t *testing.T) { 336 defer leaktest.AfterTest(t)() 337 338 var ( 339 second = types.IntervalTypeMetadata{ 340 DurationField: types.IntervalDurationField{ 341 DurationType: types.IntervalDurationType_SECOND, 342 }, 343 } 344 minute = types.IntervalTypeMetadata{ 345 DurationField: types.IntervalDurationField{ 346 DurationType: types.IntervalDurationType_MINUTE, 347 }, 348 } 349 hour = types.IntervalTypeMetadata{ 350 DurationField: types.IntervalDurationField{ 351 DurationType: types.IntervalDurationType_HOUR, 352 }, 353 } 354 day = types.IntervalTypeMetadata{ 355 DurationField: types.IntervalDurationField{ 356 DurationType: types.IntervalDurationType_DAY, 357 }, 358 } 359 month = types.IntervalTypeMetadata{ 360 DurationField: types.IntervalDurationField{ 361 DurationType: types.IntervalDurationType_MONTH, 362 }, 363 } 364 year = types.IntervalTypeMetadata{ 365 DurationField: types.IntervalDurationField{ 366 DurationType: types.IntervalDurationType_YEAR, 367 }, 368 } 369 ) 370 371 testData := []struct { 372 str string 373 dtype types.IntervalTypeMetadata 374 expected string 375 }{ 376 // Test cases for raw numbers with fields 377 {"5", second, "5s"}, 378 {"5.8", second, "5.8s"}, 379 {"5", minute, "5m"}, 380 {"5.8", minute, "5m"}, 381 {"5", hour, "5h"}, 382 {"5.8", hour, "5h"}, 383 {"5", day, "5 day"}, 384 {"5.8", day, "5 day"}, 385 {"5", month, "5 month"}, 386 {"5.8", month, "5 month"}, 387 {"5", year, "5 year"}, 388 {"5.8", year, "5 year"}, 389 // Test cases for truncation based on fields 390 {"1-2 3 4:56:07", second, "1-2 3 4:56:07"}, 391 {"1-2 3 4:56:07", minute, "1-2 3 4:56:00"}, 392 {"1-2 3 4:56:07", hour, "1-2 3 4:00:00"}, 393 {"1-2 3 4:56:07", day, "1-2 3 0:"}, 394 {"1-2 3 4:56:07", month, "1-2 0 0:"}, 395 {"1-2 3 4:56:07", year, "1 year"}, 396 } 397 for _, td := range testData { 398 actual, err := tree.ParseDIntervalWithTypeMetadata(td.str, td.dtype) 399 if err != nil { 400 t.Errorf("unexpected error while parsing INTERVAL %s %#v: %s", td.str, td.dtype, err) 401 continue 402 } 403 expected, err := tree.ParseDInterval(td.expected) 404 if err != nil { 405 t.Errorf("unexpected error while parsing expected value INTERVAL %s: %s", td.expected, err) 406 continue 407 } 408 evalCtx := tree.NewTestingEvalContext(cluster.MakeTestingClusterSettings()) 409 defer evalCtx.Stop(context.Background()) 410 if expected.Compare(evalCtx, actual) != 0 { 411 t.Errorf("INTERVAL %s %#v: got %s, expected %s", td.str, td.dtype, actual, expected) 412 } 413 } 414 } 415 416 func TestParseDDate(t *testing.T) { 417 defer leaktest.AfterTest(t)() 418 testData := []struct { 419 str string 420 expected string 421 }{ 422 {"2017-03-03 -01:00:00", "2017-03-03"}, 423 {"2017-03-03 -1:0:0", "2017-03-03"}, 424 {"2017-03-03 -01:00", "2017-03-03"}, 425 {"2017-03-03 -01", "2017-03-03"}, 426 {"2017-03-03 -010000", "2017-03-03"}, 427 {"2017-03-03 -0100", "2017-03-03"}, 428 {"2017-03-03 -1", "2017-03-03"}, 429 {"2017-03-03", "2017-03-03"}, 430 {"2017-3-3 -01:00:00", "2017-03-03"}, 431 {"2017-3-3 -1:0:0", "2017-03-03"}, 432 {"2017-3-3 -01:00", "2017-03-03"}, 433 {"2017-3-3 -01", "2017-03-03"}, 434 {"2017-3-3 -010000", "2017-03-03"}, 435 {"2017-3-3 -0100", "2017-03-03"}, 436 {"2017-3-3 -1", "2017-03-03"}, 437 {"2017-3-3", "2017-03-03"}, 438 } 439 for _, td := range testData { 440 actual, err := tree.ParseDDate(nil, td.str) 441 if err != nil { 442 t.Errorf("unexpected error while parsing DATE %s: %s", td.str, err) 443 continue 444 } 445 expected, err := tree.ParseDDate(nil, td.expected) 446 if err != nil { 447 t.Errorf("unexpected error while parsing expected value DATE %s: %s", td.expected, err) 448 continue 449 } 450 evalCtx := tree.NewTestingEvalContext(cluster.MakeTestingClusterSettings()) 451 defer evalCtx.Stop(context.Background()) 452 if expected.Compare(evalCtx, actual) != 0 { 453 t.Errorf("DATE %s: got %s, expected %s", td.str, actual, expected) 454 } 455 } 456 } 457 458 func TestParseDBool(t *testing.T) { 459 defer leaktest.AfterTest(t)() 460 testData := []struct { 461 str string 462 expected *tree.DBool 463 err bool 464 }{ 465 {str: "t", expected: tree.DBoolTrue}, 466 {str: "tr", expected: tree.DBoolTrue}, 467 {str: "tru", expected: tree.DBoolTrue}, 468 {str: "true", expected: tree.DBoolTrue}, 469 {str: "tr", expected: tree.DBoolTrue}, 470 {str: "TRUE", expected: tree.DBoolTrue}, 471 {str: "tRUe", expected: tree.DBoolTrue}, 472 {str: " tRUe ", expected: tree.DBoolTrue}, 473 {str: " tR ", expected: tree.DBoolTrue}, 474 {str: "on", expected: tree.DBoolTrue}, 475 {str: "On", expected: tree.DBoolTrue}, 476 {str: "oN", expected: tree.DBoolTrue}, 477 {str: "ON", expected: tree.DBoolTrue}, 478 {str: "1", expected: tree.DBoolTrue}, 479 {str: "yes", expected: tree.DBoolTrue}, 480 {str: "ye", expected: tree.DBoolTrue}, 481 {str: "y", expected: tree.DBoolTrue}, 482 483 {str: "false", expected: tree.DBoolFalse}, 484 {str: "FALSE", expected: tree.DBoolFalse}, 485 {str: "fALse", expected: tree.DBoolFalse}, 486 {str: "f", expected: tree.DBoolFalse}, 487 {str: "off", expected: tree.DBoolFalse}, 488 {str: "Off", expected: tree.DBoolFalse}, 489 {str: "oFF", expected: tree.DBoolFalse}, 490 {str: "OFF", expected: tree.DBoolFalse}, 491 {str: "0", expected: tree.DBoolFalse}, 492 493 {str: "foo", err: true}, 494 {str: "tr ue", err: true}, 495 {str: "o", err: true}, 496 {str: "", err: true}, 497 {str: " ", err: true}, 498 {str: " ", err: true}, 499 } 500 501 for _, td := range testData { 502 t.Run(td.str, func(t *testing.T) { 503 result, err := tree.ParseDBool(td.str) 504 if td.err { 505 if err == nil { 506 t.Fatalf("expected parsing %v to error, got %v", td.str, result) 507 } 508 return 509 } 510 if err != nil { 511 t.Fatalf("expected parsing %v to be %s, got error: %s", td.str, td.expected, err) 512 } 513 if *td.expected != *result { 514 t.Fatalf("expected parsing %v to be %s, got %s", td.str, td.expected, result) 515 } 516 }) 517 } 518 } 519 520 func TestParseDTime(t *testing.T) { 521 defer leaktest.AfterTest(t)() 522 // Since ParseDTime mostly delegates parsing logic to ParseDTimestamp, we only test a subset of 523 // the timestamp test cases. 524 testData := []struct { 525 str string 526 precision time.Duration 527 expected timeofday.TimeOfDay 528 }{ 529 {" 04:05:06 ", time.Microsecond, timeofday.New(4, 5, 6, 0)}, 530 {"04:05:06", time.Microsecond, timeofday.New(4, 5, 6, 0)}, 531 {"04:05:06.000001", time.Microsecond, timeofday.New(4, 5, 6, 1)}, 532 {"04:05:06.000001", time.Second, timeofday.New(4, 5, 6, 0)}, 533 {"04:05:06-07", time.Microsecond, timeofday.New(4, 5, 6, 0)}, 534 {"0000-01-01 04:05:06", time.Microsecond, timeofday.New(4, 5, 6, 0)}, 535 {"2001-01-01 04:05:06", time.Microsecond, timeofday.New(4, 5, 6, 0)}, 536 {"4:5:6", time.Microsecond, timeofday.New(4, 5, 6, 0)}, 537 {"24:00:00", time.Microsecond, timeofday.Time2400}, 538 {"24:00:00.000", time.Microsecond, timeofday.Time2400}, 539 {"24:00:00.000000", time.Microsecond, timeofday.Time2400}, 540 {"0000-01-01T24:00:00", time.Microsecond, timeofday.Time2400}, 541 {"0000-01-01T24:00:00.0", time.Microsecond, timeofday.Time2400}, 542 {"0000-01-01 24:00:00", time.Microsecond, timeofday.Time2400}, 543 {"0000-01-01 24:00:00.0", time.Microsecond, timeofday.Time2400}, 544 {" 24:00:00.0", time.Microsecond, timeofday.Time2400}, 545 {" 24:00:00.0 ", time.Microsecond, timeofday.Time2400}, 546 } 547 for _, td := range testData { 548 actual, err := tree.ParseDTime(nil, td.str, td.precision) 549 if err != nil { 550 t.Errorf("unexpected error while parsing TIME %s: %s", td.str, err) 551 continue 552 } 553 if *actual != tree.DTime(td.expected) { 554 t.Errorf("TIME %s: got %s, expected %s", td.str, actual, td.expected) 555 } 556 } 557 } 558 559 func TestParseDTimeError(t *testing.T) { 560 defer leaktest.AfterTest(t)() 561 testData := []string{ 562 "", 563 "foo", 564 "01", 565 } 566 for _, s := range testData { 567 actual, _ := tree.ParseDTime(nil, s, time.Microsecond) 568 if actual != nil { 569 t.Errorf("TIME %s: got %s, expected error", s, actual) 570 } 571 } 572 } 573 574 func TestParseDTimestamp(t *testing.T) { 575 defer leaktest.AfterTest(t)() 576 testData := []struct { 577 str string 578 expected time.Time 579 }{ 580 {"2001-02-03", time.Date(2001, time.February, 3, 0, 0, 0, 0, time.FixedZone("", 0))}, 581 {"2001-02-03 04:05:06", time.Date(2001, time.February, 3, 4, 5, 6, 0, time.FixedZone("", 0))}, 582 {"2001-02-03 04:05:06.000001", time.Date(2001, time.February, 3, 4, 5, 6, 1000, time.FixedZone("", 0))}, 583 {"2001-02-03 04:05:06.00001", time.Date(2001, time.February, 3, 4, 5, 6, 10000, time.FixedZone("", 0))}, 584 {"2001-02-03 04:05:06.0001", time.Date(2001, time.February, 3, 4, 5, 6, 100000, time.FixedZone("", 0))}, 585 {"2001-02-03 04:05:06.001", time.Date(2001, time.February, 3, 4, 5, 6, 1000000, time.FixedZone("", 0))}, 586 {"2001-02-03 04:05:06.01", time.Date(2001, time.February, 3, 4, 5, 6, 10000000, time.FixedZone("", 0))}, 587 {"2001-02-03 04:05:06.1", time.Date(2001, time.February, 3, 4, 5, 6, 100000000, time.FixedZone("", 0))}, 588 {"2001-02-03 04:05:06.12", time.Date(2001, time.February, 3, 4, 5, 6, 120000000, time.FixedZone("", 0))}, 589 {"2001-02-03 04:05:06.123", time.Date(2001, time.February, 3, 4, 5, 6, 123000000, time.FixedZone("", 0))}, 590 {"2001-02-03 04:05:06.1234", time.Date(2001, time.February, 3, 4, 5, 6, 123400000, time.FixedZone("", 0))}, 591 {"2001-02-03 04:05:06.12345", time.Date(2001, time.February, 3, 4, 5, 6, 123450000, time.FixedZone("", 0))}, 592 {"2001-02-03 04:05:06.123456", time.Date(2001, time.February, 3, 4, 5, 6, 123456000, time.FixedZone("", 0))}, 593 {"2001-02-03 04:05:06.123-07", time.Date(2001, time.February, 3, 4, 5, 6, 123000000, 594 time.FixedZone("", 0))}, 595 {"2001-02-03 04:05:06-07", time.Date(2001, time.February, 3, 4, 5, 6, 0, 596 time.FixedZone("", 0))}, 597 {"2001-02-03 04:05:06-07:42", time.Date(2001, time.February, 3, 4, 5, 6, 0, 598 time.FixedZone("", 0))}, 599 {"2001-02-03 04:05:06-07:30:09", time.Date(2001, time.February, 3, 4, 5, 6, 0, 600 time.FixedZone("", 0))}, 601 {"2001-02-03 04:05:06+07", time.Date(2001, time.February, 3, 4, 5, 6, 0, 602 time.FixedZone("", 0))}, 603 {"2001-02-03 04:0:06", time.Date(2001, time.February, 3, 4, 0, 6, 0, 604 time.FixedZone("", 0))}, 605 {"2001-02-03 0:0:06", time.Date(2001, time.February, 3, 0, 0, 6, 0, 606 time.FixedZone("", 0))}, 607 {"2001-02-03 4:05:0", time.Date(2001, time.February, 3, 4, 5, 0, 0, 608 time.FixedZone("", 0))}, 609 {"2001-02-03 4:05:0-07:0:00", time.Date(2001, time.February, 3, 4, 5, 0, 0, 610 time.FixedZone("", 0))}, 611 {"2001-02-03 4:0:6 +3:0:0", time.Date(2001, time.February, 3, 4, 0, 6, 0, 612 time.FixedZone("", 0))}, 613 } 614 for _, td := range testData { 615 actual, err := tree.ParseDTimestamp(nil, td.str, time.Nanosecond) 616 if err != nil { 617 t.Errorf("unexpected error while parsing TIMESTAMP %s: %s", td.str, err) 618 continue 619 } 620 if !actual.Time.Equal(td.expected) { 621 t.Errorf("DATE %s: got %s, expected %s", td.str, actual, td.expected) 622 } 623 } 624 } 625 626 func TestMakeDJSON(t *testing.T) { 627 defer leaktest.AfterTest(t)() 628 j1, err := tree.MakeDJSON(1) 629 if err != nil { 630 t.Fatal(err) 631 } 632 j2, err := tree.MakeDJSON(2) 633 if err != nil { 634 t.Fatal(err) 635 } 636 if j1.Compare(tree.NewTestingEvalContext(cluster.MakeTestingClusterSettings()), j2) != -1 { 637 t.Fatal("expected JSON 1 < 2") 638 } 639 } 640 641 func TestDTimeTZ(t *testing.T) { 642 defer leaktest.AfterTest(t)() 643 644 ctx := &tree.EvalContext{ 645 SessionData: &sessiondata.SessionData{ 646 DataConversion: sessiondata.DataConversionConfig{ 647 Location: time.UTC, 648 }, 649 }, 650 } 651 652 maxTime, err := tree.ParseDTimeTZ(ctx, "24:00:00-1559", time.Microsecond) 653 require.NoError(t, err) 654 minTime, err := tree.ParseDTimeTZ(ctx, "00:00:00+1559", time.Microsecond) 655 require.NoError(t, err) 656 657 // These are all the same UTC time equivalents. 658 utcTime, err := tree.ParseDTimeTZ(ctx, "11:14:15+0", time.Microsecond) 659 require.NoError(t, err) 660 sydneyTime, err := tree.ParseDTimeTZ(ctx, "21:14:15+10", time.Microsecond) 661 require.NoError(t, err) 662 663 // No daylight savings in Hawaii! 664 hawaiiZone, err := time.LoadLocation("Pacific/Honolulu") 665 require.NoError(t, err) 666 hawaiiTime := tree.NewDTimeTZFromLocation(timeofday.New(1, 14, 15, 0), hawaiiZone) 667 668 weirdTimeZone := tree.NewDTimeTZFromOffset(timeofday.New(10, 0, 0, 0), -((5 * 60 * 60) + 30*60 + 15)) 669 670 testCases := []struct { 671 t *tree.DTimeTZ 672 largerThan []tree.Datum 673 smallerThan []tree.Datum 674 equalTo []tree.Datum 675 isMax bool 676 isMin bool 677 }{ 678 { 679 t: weirdTimeZone, 680 largerThan: []tree.Datum{minTime, tree.DNull}, 681 smallerThan: []tree.Datum{maxTime}, 682 equalTo: []tree.Datum{weirdTimeZone}, 683 isMax: false, 684 isMin: false, 685 }, 686 { 687 t: utcTime, 688 largerThan: []tree.Datum{minTime, sydneyTime, tree.DNull}, 689 smallerThan: []tree.Datum{maxTime, hawaiiTime}, 690 equalTo: []tree.Datum{utcTime}, 691 isMax: false, 692 isMin: false, 693 }, 694 { 695 t: sydneyTime, 696 largerThan: []tree.Datum{minTime, tree.DNull}, 697 smallerThan: []tree.Datum{maxTime, utcTime, hawaiiTime}, 698 equalTo: []tree.Datum{sydneyTime}, 699 isMax: false, 700 isMin: false, 701 }, 702 { 703 t: hawaiiTime, 704 largerThan: []tree.Datum{minTime, utcTime, sydneyTime, tree.DNull}, 705 smallerThan: []tree.Datum{maxTime}, 706 equalTo: []tree.Datum{hawaiiTime}, 707 isMax: false, 708 isMin: false, 709 }, 710 { 711 t: minTime, 712 largerThan: []tree.Datum{tree.DNull}, 713 smallerThan: []tree.Datum{maxTime, utcTime, sydneyTime, hawaiiTime}, 714 equalTo: []tree.Datum{minTime}, 715 isMax: false, 716 isMin: true, 717 }, 718 { 719 t: maxTime, 720 largerThan: []tree.Datum{minTime, utcTime, sydneyTime, hawaiiTime, tree.DNull}, 721 smallerThan: []tree.Datum{}, 722 equalTo: []tree.Datum{maxTime}, 723 isMax: true, 724 isMin: false, 725 }, 726 } 727 for i, tc := range testCases { 728 t.Run(fmt.Sprintf("#%d %s", i, tc.t.String()), func(t *testing.T) { 729 var largerThan []tree.Datum 730 prev, ok := tc.t.Prev(ctx) 731 if !tc.isMin { 732 assert.True(t, ok) 733 largerThan = append(largerThan, prev) 734 } else { 735 assert.False(t, ok) 736 } 737 for _, largerThan := range append(largerThan, tc.largerThan...) { 738 assert.Equal(t, 1, tc.t.Compare(ctx, largerThan), "%s > %s", tc.t.String(), largerThan.String()) 739 } 740 741 var smallerThan []tree.Datum 742 next, ok := tc.t.Next(ctx) 743 if !tc.isMax { 744 assert.True(t, ok) 745 smallerThan = append(smallerThan, next) 746 } else { 747 assert.False(t, ok) 748 } 749 for _, smallerThan := range append(smallerThan, tc.smallerThan...) { 750 assert.Equal(t, -1, tc.t.Compare(ctx, smallerThan), "%s < %s", tc.t.String(), smallerThan.String()) 751 } 752 753 for _, equalTo := range tc.equalTo { 754 assert.Equal(t, 0, tc.t.Compare(ctx, equalTo), "%s = %s", tc.t.String(), equalTo.String()) 755 } 756 757 assert.Equal(t, tc.isMax, tc.t.IsMax(ctx)) 758 assert.Equal(t, tc.isMin, tc.t.IsMin(ctx)) 759 }) 760 } 761 } 762 763 func TestIsDistinctFrom(t *testing.T) { 764 defer leaktest.AfterTest(t)() 765 testData := []struct { 766 a string // comma separated list of strings, `NULL` is converted to a NULL 767 b string // same as a 768 expected bool 769 }{ 770 {"a", "a", false}, 771 {"a", "b", true}, 772 {"b", "b", false}, 773 {"a,a", "a,a", false}, 774 {"a,a", "a,b", true}, 775 {"a,a", "b,a", true}, 776 {"a,a,a", "a,a,a", false}, 777 {"a,a,a", "a,a,b", true}, 778 {"a,a,a", "a,b,a", true}, 779 {"a,a,a", "a,b,b", true}, 780 {"a,a,a", "b,a,a", true}, 781 {"a,a,a", "b,a,b", true}, 782 {"a,a,a", "b,b,a", true}, 783 {"a,a,a", "b,b,b", true}, 784 {"NULL", "NULL", false}, 785 {"a", "NULL", true}, 786 {"a,a", "a,NULL", true}, 787 {"a,a", "NULL,a", true}, 788 {"a,a", "NULL,NULL", true}, 789 {"a,NULL", "a,a", true}, 790 {"a,NULL", "a,NULL", false}, 791 {"a,NULL", "NULL,a", true}, 792 {"a,NULL", "NULL,NULL", true}, 793 {"NULL,a", "a,a", true}, 794 {"NULL,a", "a,NULL", true}, 795 {"NULL,a", "NULL,a", false}, 796 {"NULL,a", "NULL,NULL", true}, 797 {"NULL,NULL", "a,a", true}, 798 {"NULL,NULL", "a,NULL", true}, 799 {"NULL,NULL", "NULL,a", true}, 800 {"NULL,NULL", "NULL,NULL", false}, 801 {"a,a,a", "a,a,NULL", true}, 802 {"a,a,a", "a,NULL,a", true}, 803 {"a,a,a", "a,NULL,NULL", true}, 804 {"a,a,a", "NULL,a,a", true}, 805 {"a,a,a", "NULL,a,NULL", true}, 806 {"a,a,a", "NULL,NULL,a", true}, 807 {"a,a,a", "NULL,NULL,NULL", true}, 808 {"a,NULL,a", "a,a,a", true}, 809 {"a,NULL,a", "a,a,NULL", true}, 810 {"a,NULL,a", "a,NULL,a", false}, 811 {"a,NULL,a", "a,NULL,NULL", true}, 812 {"a,NULL,a", "NULL,a,a", true}, 813 {"a,NULL,a", "NULL,a,NULL", true}, 814 {"a,NULL,a", "NULL,NULL,a", true}, 815 {"a,NULL,a", "NULL,NULL,NULL", true}, 816 {"NULL,a,NULL", "a,a,a", true}, 817 {"NULL,a,NULL", "a,a,NULL", true}, 818 {"NULL,a,NULL", "a,NULL,a", true}, 819 {"NULL,a,NULL", "a,NULL,NULL", true}, 820 {"NULL,a,NULL", "NULL,a,a", true}, 821 {"NULL,a,NULL", "NULL,a,NULL", false}, 822 {"NULL,a,NULL", "NULL,NULL,a", true}, 823 {"NULL,a,NULL", "NULL,NULL,NULL", true}, 824 {"NULL,NULL,NULL", "a,a,a", true}, 825 {"NULL,NULL,NULL", "a,a,NULL", true}, 826 {"NULL,NULL,NULL", "a,NULL,a", true}, 827 {"NULL,NULL,NULL", "a,NULL,NULL", true}, 828 {"NULL,NULL,NULL", "NULL,a,a", true}, 829 {"NULL,NULL,NULL", "NULL,a,NULL", true}, 830 {"NULL,NULL,NULL", "NULL,NULL,a", true}, 831 {"NULL,NULL,NULL", "NULL,NULL,NULL", false}, 832 } 833 convert := func(s string) tree.Datums { 834 splits := strings.Split(s, ",") 835 result := make(tree.Datums, len(splits)) 836 for i, value := range splits { 837 if value == "NULL" { 838 result[i] = tree.DNull 839 continue 840 } 841 result[i] = tree.NewDString(value) 842 } 843 return result 844 } 845 for _, td := range testData { 846 t.Run(fmt.Sprintf("%s to %s", td.a, td.b), func(t *testing.T) { 847 datumsA := convert(td.a) 848 datumsB := convert(td.b) 849 if e, a := td.expected, datumsA.IsDistinctFrom(&tree.EvalContext{}, datumsB); e != a { 850 if e { 851 t.Errorf("expected %s to be distinct from %s, but got %t", datumsA, datumsB, e) 852 } else { 853 t.Errorf("expected %s to not be distinct from %s, but got %t", datumsA, datumsB, e) 854 } 855 } 856 }) 857 } 858 } 859 860 func TestAllTypesAsJSON(t *testing.T) { 861 defer leaktest.AfterTest(t)() 862 for _, typ := range types.Scalar { 863 d := tree.SampleDatum(typ) 864 _, err := tree.AsJSON(d, time.UTC) 865 if err != nil { 866 t.Errorf("couldn't convert %s to JSON: %s", d, err) 867 } 868 } 869 } 870 871 // Test default values of many different datum types. 872 func TestNewDefaultDatum(t *testing.T) { 873 defer leaktest.AfterTest(t)() 874 875 evalCtx := tree.NewTestingEvalContext(cluster.MakeTestingClusterSettings()) 876 defer evalCtx.Stop(context.Background()) 877 878 testCases := []struct { 879 t *types.T 880 expected string 881 }{ 882 {t: types.Bool, expected: "false"}, 883 {t: types.Int, expected: "0:::INT8"}, 884 {t: types.Int2, expected: "0:::INT8"}, 885 {t: types.Int4, expected: "0:::INT8"}, 886 {t: types.Float, expected: "0.0:::FLOAT8"}, 887 {t: types.Float4, expected: "0.0:::FLOAT8"}, 888 {t: types.Decimal, expected: "0:::DECIMAL"}, 889 {t: types.MakeDecimal(10, 5), expected: "0:::DECIMAL"}, 890 {t: types.Date, expected: "'2000-01-01':::DATE"}, 891 {t: types.Timestamp, expected: "'0001-01-01 00:00:00+00:00':::TIMESTAMP"}, 892 {t: types.Interval, expected: "'00:00:00':::INTERVAL"}, 893 {t: types.String, expected: "'':::STRING"}, 894 {t: types.MakeChar(3), expected: "'':::STRING"}, 895 {t: types.Bytes, expected: "'\\x':::BYTES"}, 896 {t: types.TimestampTZ, expected: "'0001-01-01 00:00:00+00:00':::TIMESTAMPTZ"}, 897 {t: types.MakeCollatedString(types.MakeVarChar(10), "de"), expected: "'' COLLATE de"}, 898 {t: types.MakeCollatedString(types.VarChar, "en_US"), expected: "'' COLLATE en_US"}, 899 {t: types.Oid, expected: "26:::OID"}, 900 {t: types.RegClass, expected: "crdb_internal.create_regclass(2205,'regclass'):::REGCLASS"}, 901 {t: types.Unknown, expected: "NULL"}, 902 {t: types.Uuid, expected: "'00000000-0000-0000-0000-000000000000':::UUID"}, 903 {t: types.MakeArray(types.Int), expected: "ARRAY[]:::INT8[]"}, 904 {t: types.MakeArray(types.MakeArray(types.String)), expected: "ARRAY[]:::STRING[][]"}, 905 {t: types.OidVector, expected: "ARRAY[]:::OID[]"}, 906 {t: types.INet, expected: "'0.0.0.0/0':::INET"}, 907 {t: types.Time, expected: "'00:00:00':::TIME"}, 908 {t: types.Jsonb, expected: "'null':::JSONB"}, 909 {t: types.TimeTZ, expected: "'00:00:00+00:00:00':::TIMETZ"}, 910 {t: types.MakeTuple([]*types.T{}), expected: "()"}, 911 {t: types.MakeTuple([]*types.T{types.Int, types.MakeChar(1)}), expected: "(0:::INT8, '':::STRING)"}, 912 {t: types.MakeTuple([]*types.T{types.OidVector, types.MakeTuple([]*types.T{types.Float})}), expected: "(ARRAY[]:::OID[], (0.0:::FLOAT8,))"}, 913 {t: types.VarBit, expected: "B''"}, 914 {t: types.MakeBit(5), expected: "B''"}, 915 } 916 917 for i, tc := range testCases { 918 t.Run(fmt.Sprintf("#%d %s", i, tc.t.SQLString()), func(t *testing.T) { 919 datum, err := tree.NewDefaultDatum(evalCtx, tc.t) 920 if err != nil { 921 t.Errorf("unexpected error: %s", err) 922 } 923 924 actual := tree.AsStringWithFlags(datum, tree.FmtCheckEquivalence) 925 if actual != tc.expected { 926 t.Errorf("expected %s, got %s", tc.expected, actual) 927 } 928 }) 929 } 930 }