github.com/cockroachdb/cockroach@v20.2.0-alpha.1+incompatible/pkg/sql/opt/optbuilder/scalar.go (about)

     1  // Copyright 2018 The Cockroach Authors.
     2  //
     3  // Use of this software is governed by the Business Source License
     4  // included in the file licenses/BSL.txt.
     5  //
     6  // As of the Change Date specified in that file, in accordance with
     7  // the Business Source License, use of this software will be governed
     8  // by the Apache License, Version 2.0, included in the file
     9  // licenses/APL.txt.
    10  
    11  package optbuilder
    12  
    13  import (
    14  	"context"
    15  	"fmt"
    16  
    17  	"github.com/cockroachdb/cockroach/pkg/server/telemetry"
    18  	"github.com/cockroachdb/cockroach/pkg/sql/opt"
    19  	"github.com/cockroachdb/cockroach/pkg/sql/opt/memo"
    20  	"github.com/cockroachdb/cockroach/pkg/sql/opt/norm"
    21  	"github.com/cockroachdb/cockroach/pkg/sql/pgwire/pgcode"
    22  	"github.com/cockroachdb/cockroach/pkg/sql/pgwire/pgerror"
    23  	"github.com/cockroachdb/cockroach/pkg/sql/sem/tree"
    24  	"github.com/cockroachdb/cockroach/pkg/sql/sqlbase"
    25  	"github.com/cockroachdb/cockroach/pkg/sql/sqltelemetry"
    26  	"github.com/cockroachdb/cockroach/pkg/sql/types"
    27  	"github.com/cockroachdb/cockroach/pkg/util/errorutil"
    28  	"github.com/cockroachdb/cockroach/pkg/util/errorutil/unimplemented"
    29  	"github.com/cockroachdb/cockroach/pkg/util/log"
    30  	"github.com/cockroachdb/errors"
    31  )
    32  
    33  // buildScalar builds a set of memo groups that represent the given scalar
    34  // expression. If outScope is not nil, then this is a projection context, and
    35  // the resulting memo group will be projected as the output column outCol.
    36  // Otherwise, the memo group is part of a larger expression that is not bound
    37  // to a column.
    38  //
    39  // colRefs is the set of columns referenced so far by the scalar expression
    40  // being built. If not nil, it is updated with any columns seen in
    41  // finishBuildScalarRef.
    42  //
    43  // See Builder.buildStmt for a description of the remaining input and return
    44  // values.
    45  func (b *Builder) buildScalar(
    46  	scalar tree.TypedExpr, inScope, outScope *scope, outCol *scopeColumn, colRefs *opt.ColSet,
    47  ) (out opt.ScalarExpr) {
    48  	// If we are in a grouping context and this expression corresponds to a
    49  	// GROUP BY expression, return a reference to the GROUP BY column.
    50  	// Note that GROUP BY columns cannot be reused inside an aggregate input
    51  	// expression (when inAgg=true) because the aggregate input expressions and
    52  	// grouping expressions are built as part of the same projection.
    53  	inGroupingContext := inScope.inGroupingContext() && !inScope.inAgg &&
    54  		!inScope.groupby.buildingGroupingCols
    55  	if inGroupingContext {
    56  		// TODO(rytaft): This currently regenerates a string for each subexpression.
    57  		// Change this to generate the string once for the top-level expression and
    58  		// check the relevant slice for this subexpression.
    59  		if col, ok := inScope.groupby.groupStrs[symbolicExprStr(scalar)]; ok {
    60  			// We pass aggOutScope as the input scope because it contains all of
    61  			// the aggregates and grouping columns that are available for projection.
    62  			// finishBuildScalarRef wraps projected columns in a variable expression
    63  			// with a new column ID if they are not contained in the input scope, so
    64  			// passing in aggOutScope ensures we don't create new column IDs when not
    65  			// necessary.
    66  			return b.finishBuildScalarRef(col, inScope.groupby.aggOutScope, outScope, outCol, colRefs)
    67  		}
    68  	}
    69  
    70  	switch t := scalar.(type) {
    71  	case *scopeColumn:
    72  		if inGroupingContext {
    73  			// Non-grouping column was referenced. Note that a column that is part
    74  			// of a larger grouping expression would have been detected by the
    75  			// groupStrs checking code above.
    76  			// Normally this would be a "column must appear in the GROUP BY clause"
    77  			// error. The only cases where we allow this (for compatibility with
    78  			// Postgres) is when this column is an outer column (and therefore
    79  			// effectively constant) or it is part of a table and we are already
    80  			// grouping on the entire PK of that table.
    81  			g := inScope.groupby
    82  			if !inScope.isOuterColumn(t.id) && !b.allowImplicitGroupingColumn(t.id, g) {
    83  				panic(newGroupingError(&t.name))
    84  			}
    85  
    86  			// We add a new grouping column; these show up both in aggInScope and
    87  			// aggOutScope.
    88  			//
    89  			// Note that normalization rules will trim down the list of grouping
    90  			// columns based on FDs, so this is only for the purposes of building a
    91  			// valid operator.
    92  			aggInCol := b.addColumn(g.aggInScope, "" /* alias */, t)
    93  			b.finishBuildScalarRef(t, inScope, g.aggInScope, aggInCol, nil)
    94  			g.groupStrs[symbolicExprStr(t)] = aggInCol
    95  
    96  			g.aggOutScope.appendColumn(aggInCol)
    97  
    98  			return b.finishBuildScalarRef(t, g.aggOutScope, outScope, outCol, colRefs)
    99  		}
   100  
   101  		return b.finishBuildScalarRef(t, inScope, outScope, outCol, colRefs)
   102  
   103  	case *aggregateInfo:
   104  		var aggOutScope *scope
   105  		if inScope.groupby != nil {
   106  			aggOutScope = inScope.groupby.aggOutScope
   107  		}
   108  		return b.finishBuildScalarRef(t.col, aggOutScope, outScope, outCol, colRefs)
   109  
   110  	case *windowInfo:
   111  		return b.finishBuildScalarRef(t.col, inScope, outScope, outCol, colRefs)
   112  
   113  	case *tree.AndExpr:
   114  		left := b.buildScalar(t.TypedLeft(), inScope, nil, nil, colRefs)
   115  		right := b.buildScalar(t.TypedRight(), inScope, nil, nil, colRefs)
   116  		out = b.factory.ConstructAnd(left, right)
   117  
   118  	case *tree.Array:
   119  		els := make(memo.ScalarListExpr, len(t.Exprs))
   120  		arrayType := t.ResolvedType()
   121  		elementType := arrayType.ArrayContents()
   122  		if err := types.CheckArrayElementType(elementType); err != nil {
   123  			panic(err)
   124  		}
   125  		for i := range t.Exprs {
   126  			texpr := t.Exprs[i].(tree.TypedExpr)
   127  			els[i] = b.buildScalar(texpr, inScope, nil, nil, colRefs)
   128  		}
   129  		out = b.factory.ConstructArray(els, arrayType)
   130  
   131  	case *tree.CollateExpr:
   132  		in := b.buildScalar(t.Expr.(tree.TypedExpr), inScope, nil, nil, colRefs)
   133  		out = b.factory.ConstructCollate(in, t.Locale)
   134  
   135  	case *tree.ArrayFlatten:
   136  		s := t.Subquery.(*subquery)
   137  
   138  		inCol := s.cols[0].id
   139  
   140  		// This looks kind of arbitrary and strange, because it is:
   141  		// We cannot array_agg over some types, but we can only decorrelate via array_agg.
   142  		// Thus, we reject a query that is correlated and over a type that we can't array_agg.
   143  		typ := b.factory.Metadata().ColumnMeta(inCol).Type
   144  		if !s.outerCols.Empty() && !memo.AggregateOverloadExists(opt.ArrayAggOp, typ) {
   145  			panic(unimplementedWithIssueDetailf(35710, "", "can't execute a correlated ARRAY(...) over %s", typ))
   146  		}
   147  
   148  		if err := types.CheckArrayElementType(typ); err != nil {
   149  			panic(err)
   150  		}
   151  
   152  		// Perform correctness checks on the outer cols, update colRefs and
   153  		// b.subquery.outerCols.
   154  		b.checkSubqueryOuterCols(s.outerCols, inGroupingContext, inScope, colRefs)
   155  
   156  		subqueryPrivate := memo.SubqueryPrivate{
   157  			OriginalExpr: s.Subquery,
   158  			Ordering:     s.ordering,
   159  			RequestedCol: inCol,
   160  		}
   161  		out = b.factory.ConstructArrayFlatten(s.node, &subqueryPrivate)
   162  
   163  	case *tree.IndirectionExpr:
   164  		expr := b.buildScalar(t.Expr.(tree.TypedExpr), inScope, nil, nil, colRefs)
   165  
   166  		if len(t.Indirection) != 1 {
   167  			panic(unimplementedWithIssueDetailf(32552, "ind", "multidimensional indexing is not supported"))
   168  		}
   169  
   170  		subscript := t.Indirection[0]
   171  		if subscript.Slice {
   172  			panic(unimplementedWithIssueDetailf(32551, "", "array slicing is not supported"))
   173  		}
   174  
   175  		out = b.factory.ConstructIndirection(
   176  			expr,
   177  			b.buildScalar(subscript.Begin.(tree.TypedExpr), inScope, nil, nil, colRefs),
   178  		)
   179  
   180  	case *tree.IfErrExpr:
   181  		cond := b.buildScalar(t.Cond.(tree.TypedExpr), inScope, nil, nil, colRefs)
   182  
   183  		orElse := memo.EmptyScalarListExpr
   184  		if t.Else != nil {
   185  			orElse = memo.ScalarListExpr{
   186  				b.buildScalar(t.Else.(tree.TypedExpr), inScope, nil, nil, colRefs),
   187  			}
   188  		}
   189  
   190  		errCode := memo.EmptyScalarListExpr
   191  		if t.ErrCode != nil {
   192  			errCode = memo.ScalarListExpr{
   193  				b.buildScalar(t.ErrCode.(tree.TypedExpr), inScope, nil, nil, colRefs),
   194  			}
   195  		}
   196  
   197  		out = b.factory.ConstructIfErr(cond, orElse, errCode)
   198  
   199  	case *tree.BinaryExpr:
   200  		// It's possible for an overload to be selected that expects different
   201  		// types than the TypedExpr arguments return:
   202  		//
   203  		//   ARRAY[1, 2] || NULL
   204  		//
   205  		// This is a tricky case, because the type checker selects []int as the
   206  		// type of the right argument, but then types it as unknown. This causes
   207  		// issues for the execbuilder, which doesn't have enough information to
   208  		// select the right overload. The solution is to wrap any mismatched
   209  		// arguments with a CastExpr that preserves the static type.
   210  
   211  		left := tree.ReType(t.TypedLeft(), t.ResolvedBinOp().LeftType)
   212  		right := tree.ReType(t.TypedRight(), t.ResolvedBinOp().RightType)
   213  		out = b.constructBinary(t.Operator,
   214  			b.buildScalar(left, inScope, nil, nil, colRefs),
   215  			b.buildScalar(right, inScope, nil, nil, colRefs),
   216  			t.ResolvedType(),
   217  		)
   218  
   219  	case *tree.CaseExpr:
   220  		var input opt.ScalarExpr
   221  		if t.Expr != nil {
   222  			texpr := t.Expr.(tree.TypedExpr)
   223  			input = b.buildScalar(texpr, inScope, nil, nil, colRefs)
   224  		} else {
   225  			input = memo.TrueSingleton
   226  		}
   227  
   228  		whens := make(memo.ScalarListExpr, 0, len(t.Whens)+1)
   229  		for i := range t.Whens {
   230  			texpr := t.Whens[i].Cond.(tree.TypedExpr)
   231  			cond := b.buildScalar(texpr, inScope, nil, nil, colRefs)
   232  			texpr = t.Whens[i].Val.(tree.TypedExpr)
   233  			val := b.buildScalar(texpr, inScope, nil, nil, colRefs)
   234  			whens = append(whens, b.factory.ConstructWhen(cond, val))
   235  		}
   236  		// Add the ELSE expression to the end of whens as a raw scalar expression.
   237  		var orElse opt.ScalarExpr
   238  		if t.Else != nil {
   239  			texpr := t.Else.(tree.TypedExpr)
   240  			orElse = b.buildScalar(texpr, inScope, nil, nil, colRefs)
   241  		} else {
   242  			orElse = memo.NullSingleton
   243  		}
   244  		out = b.factory.ConstructCase(input, whens, orElse)
   245  
   246  	case *tree.CastExpr:
   247  		texpr := t.Expr.(tree.TypedExpr)
   248  		arg := b.buildScalar(texpr, inScope, nil, nil, colRefs)
   249  		out = b.factory.ConstructCast(arg, t.ResolvedType())
   250  
   251  	case *tree.CoalesceExpr:
   252  		args := make(memo.ScalarListExpr, len(t.Exprs))
   253  		for i := range args {
   254  			args[i] = b.buildScalar(t.TypedExprAt(i), inScope, nil, nil, colRefs)
   255  		}
   256  		out = b.factory.ConstructCoalesce(args)
   257  
   258  	case *tree.ColumnAccessExpr:
   259  		input := b.buildScalar(t.Expr.(tree.TypedExpr), inScope, nil, nil, colRefs)
   260  		out = b.factory.ConstructColumnAccess(input, memo.TupleOrdinal(t.ColIndex))
   261  
   262  	case *tree.ComparisonExpr:
   263  		if sub, ok := t.Right.(*subquery); ok && sub.isMultiRow() {
   264  			out, _ = b.buildMultiRowSubquery(t, inScope, colRefs)
   265  			// Perform correctness checks on the outer cols, update colRefs and
   266  			// b.subquery.outerCols.
   267  			b.checkSubqueryOuterCols(sub.outerCols, inGroupingContext, inScope, colRefs)
   268  		} else if b.hasSubOperator(t) {
   269  			// Cases where the RHS is a multi-row subquery were handled above, so this
   270  			// only handles explicit tuples and arrays.
   271  			out = b.buildAnyScalar(t, inScope, colRefs)
   272  		} else {
   273  			left := b.buildScalar(t.TypedLeft(), inScope, nil, nil, colRefs)
   274  			right := b.buildScalar(t.TypedRight(), inScope, nil, nil, colRefs)
   275  			out = b.constructComparison(t.Operator, left, right)
   276  		}
   277  
   278  	case *tree.DTuple:
   279  		els := make(memo.ScalarListExpr, len(t.D))
   280  		for i := range t.D {
   281  			els[i] = b.buildScalar(t.D[i], inScope, nil, nil, colRefs)
   282  		}
   283  		out = b.factory.ConstructTuple(els, t.ResolvedType())
   284  
   285  	case *tree.FuncExpr:
   286  		return b.buildFunction(t, inScope, outScope, outCol, colRefs)
   287  
   288  	case *tree.IfExpr:
   289  		input := b.buildScalar(t.Cond.(tree.TypedExpr), inScope, nil, nil, colRefs)
   290  		ifTrue := b.buildScalar(t.True.(tree.TypedExpr), inScope, nil, nil, colRefs)
   291  		whens := memo.ScalarListExpr{b.factory.ConstructWhen(memo.TrueSingleton, ifTrue)}
   292  		orElse := b.buildScalar(t.Else.(tree.TypedExpr), inScope, nil, nil, colRefs)
   293  		out = b.factory.ConstructCase(input, whens, orElse)
   294  
   295  	case *tree.IndexedVar:
   296  		if t.Idx < 0 || t.Idx >= len(inScope.cols) {
   297  			panic(pgerror.Newf(pgcode.UndefinedColumn,
   298  				"invalid column ordinal: @%d", t.Idx+1))
   299  		}
   300  		out = b.factory.ConstructVariable(inScope.cols[t.Idx].id)
   301  
   302  	case *tree.NotExpr:
   303  		input := b.buildScalar(t.TypedInnerExpr(), inScope, nil, nil, colRefs)
   304  		out = b.factory.ConstructNot(input)
   305  
   306  	case *tree.IsNullExpr:
   307  		input := b.buildScalar(t.TypedInnerExpr(), inScope, nil, nil, colRefs)
   308  		if t.TypedInnerExpr().ResolvedType().Family() == types.TupleFamily {
   309  			out = b.factory.ConstructIsTupleNull(input)
   310  		} else {
   311  			out = b.factory.ConstructIs(input, memo.NullSingleton)
   312  		}
   313  
   314  	case *tree.IsNotNullExpr:
   315  		input := b.buildScalar(t.TypedInnerExpr(), inScope, nil, nil, colRefs)
   316  		if t.TypedInnerExpr().ResolvedType().Family() == types.TupleFamily {
   317  			out = b.factory.ConstructIsTupleNotNull(input)
   318  		} else {
   319  			out = b.factory.ConstructIsNot(input, memo.NullSingleton)
   320  		}
   321  
   322  	case *tree.NullIfExpr:
   323  		// Ensure that the type of the first expression matches the resolved type
   324  		// of the NULLIF expression so that type inference will be correct in the
   325  		// CASE expression constructed below. For example, the type of
   326  		// NULLIF(NULL, 0) should be int.
   327  		expr1 := tree.ReType(t.Expr1.(tree.TypedExpr), t.ResolvedType())
   328  		input := b.buildScalar(expr1, inScope, nil, nil, colRefs)
   329  		cond := b.buildScalar(t.Expr2.(tree.TypedExpr), inScope, nil, nil, colRefs)
   330  		whens := memo.ScalarListExpr{b.factory.ConstructWhen(cond, memo.NullSingleton)}
   331  		out = b.factory.ConstructCase(input, whens, input)
   332  
   333  	case *tree.OrExpr:
   334  		left := b.buildScalar(t.TypedLeft(), inScope, nil, nil, colRefs)
   335  		right := b.buildScalar(t.TypedRight(), inScope, nil, nil, colRefs)
   336  		out = b.factory.ConstructOr(left, right)
   337  
   338  	case *tree.ParenExpr:
   339  		// Treat ParenExpr as if it wasn't present.
   340  		return b.buildScalar(t.TypedInnerExpr(), inScope, outScope, outCol, colRefs)
   341  
   342  	case *tree.Placeholder:
   343  		if !b.KeepPlaceholders && b.evalCtx.HasPlaceholders() {
   344  			b.HadPlaceholders = true
   345  			// Replace placeholders with their value.
   346  			d, err := t.Eval(b.evalCtx)
   347  			if err != nil {
   348  				panic(err)
   349  			}
   350  			out = b.factory.ConstructConstVal(d, t.ResolvedType())
   351  		} else {
   352  			out = b.factory.ConstructPlaceholder(t)
   353  		}
   354  
   355  	case *tree.RangeCond:
   356  		inputFrom := b.buildScalar(t.TypedLeftFrom(), inScope, nil, nil, colRefs)
   357  		from := b.buildScalar(t.TypedFrom(), inScope, nil, nil, colRefs)
   358  		inputTo := b.buildScalar(t.TypedLeftTo(), inScope, nil, nil, colRefs)
   359  		to := b.buildScalar(t.TypedTo(), inScope, nil, nil, colRefs)
   360  		out = b.buildRangeCond(t.Not, t.Symmetric, inputFrom, from, inputTo, to)
   361  
   362  	case *sqlFnInfo:
   363  		out = b.buildSQLFn(t, inScope, outScope, outCol, colRefs)
   364  
   365  	case *srf:
   366  		if len(t.cols) == 1 {
   367  			if inGroupingContext {
   368  				// Non-grouping column was referenced. Note that a column that is part
   369  				// of a larger grouping expression would have been detected by the
   370  				// groupStrs checking code above.
   371  				panic(newGroupingError(&t.cols[0].name))
   372  			}
   373  			return b.finishBuildScalarRef(&t.cols[0], inScope, outScope, outCol, colRefs)
   374  		}
   375  		els := make(memo.ScalarListExpr, len(t.cols))
   376  		for i := range t.cols {
   377  			els[i] = b.buildScalar(&t.cols[i], inScope, nil, nil, colRefs)
   378  		}
   379  		out = b.factory.ConstructTuple(els, t.ResolvedType())
   380  
   381  	case *subquery:
   382  		out, _ = b.buildSingleRowSubquery(t, inScope)
   383  		// Perform correctness checks on the outer cols, update colRefs and
   384  		// b.subquery.outerCols.
   385  		b.checkSubqueryOuterCols(t.outerCols, inGroupingContext, inScope, colRefs)
   386  
   387  	case *tree.Tuple:
   388  		els := make(memo.ScalarListExpr, len(t.Exprs))
   389  		for i := range t.Exprs {
   390  			els[i] = b.buildScalar(t.Exprs[i].(tree.TypedExpr), inScope, nil, nil, colRefs)
   391  		}
   392  		out = b.factory.ConstructTuple(els, t.ResolvedType())
   393  
   394  	case *tree.UnaryExpr:
   395  		out = b.buildScalar(t.TypedInnerExpr(), inScope, nil, nil, colRefs)
   396  		out = b.constructUnary(t.Operator, out, t.ResolvedType())
   397  
   398  	case *tree.IsOfTypeExpr:
   399  		// IsOfTypeExpr is a little strange because its value can be determined
   400  		// statically just from the type of the expression.
   401  		actualType := t.Expr.(tree.TypedExpr).ResolvedType()
   402  
   403  		found := false
   404  		for _, typ := range t.ResolvedTypes() {
   405  			if actualType.Equivalent(typ) {
   406  				found = true
   407  				break
   408  			}
   409  		}
   410  
   411  		if found != t.Not {
   412  			out = b.factory.ConstructTrue()
   413  		} else {
   414  			out = b.factory.ConstructFalse()
   415  		}
   416  
   417  	// NB: this is the exception to the sorting of the case statements. The
   418  	// tree.Datum case needs to occur after *tree.Placeholder which implements
   419  	// Datum.
   420  	case tree.Datum:
   421  		out = b.factory.ConstructConstVal(t, t.ResolvedType())
   422  
   423  	default:
   424  		panic(unimplemented.Newf(fmt.Sprintf("optbuilder.%T", scalar), "not yet implemented: scalar expression: %T", scalar))
   425  	}
   426  
   427  	return b.finishBuildScalar(scalar, out, inScope, outScope, outCol)
   428  }
   429  
   430  func (b *Builder) hasSubOperator(t *tree.ComparisonExpr) bool {
   431  	return t.Operator == tree.Any || t.Operator == tree.All || t.Operator == tree.Some
   432  }
   433  
   434  func (b *Builder) buildAnyScalar(
   435  	t *tree.ComparisonExpr, inScope *scope, colRefs *opt.ColSet,
   436  ) opt.ScalarExpr {
   437  	left := b.buildScalar(t.TypedLeft(), inScope, nil, nil, colRefs)
   438  	right := b.buildScalar(t.TypedRight(), inScope, nil, nil, colRefs)
   439  
   440  	subop := opt.ComparisonOpMap[t.SubOperator]
   441  
   442  	if t.Operator == tree.All {
   443  		subop = opt.NegateOpMap[subop]
   444  	}
   445  
   446  	out := b.factory.ConstructAnyScalar(left, right, subop)
   447  	if t.Operator == tree.All {
   448  		out = b.factory.ConstructNot(out)
   449  	}
   450  	return out
   451  }
   452  
   453  // buildFunction builds a set of memo groups that represent a function
   454  // expression.
   455  //
   456  // f        The given function expression.
   457  // outCol   The output column of the function being built.
   458  // colRefs  The set of columns referenced so far by the scalar expression
   459  //          being built. If not nil, it is updated with any columns seen in
   460  //          finishBuildScalarRef.
   461  //
   462  // See Builder.buildStmt for a description of the remaining input and
   463  // return values.
   464  func (b *Builder) buildFunction(
   465  	f *tree.FuncExpr, inScope, outScope *scope, outCol *scopeColumn, colRefs *opt.ColSet,
   466  ) (out opt.ScalarExpr) {
   467  	if f.WindowDef != nil {
   468  		if inScope.inAgg {
   469  			panic(sqlbase.NewWindowInAggError())
   470  		}
   471  	}
   472  
   473  	def, err := f.Func.Resolve(b.semaCtx.SearchPath)
   474  	if err != nil {
   475  		panic(err)
   476  	}
   477  
   478  	if isAggregate(def) {
   479  		panic(errors.AssertionFailedf("aggregate function should have been replaced"))
   480  	}
   481  
   482  	if isWindow(def) {
   483  		panic(errors.AssertionFailedf("window function should have been replaced"))
   484  	}
   485  
   486  	args := make(memo.ScalarListExpr, len(f.Exprs))
   487  	for i, pexpr := range f.Exprs {
   488  		args[i] = b.buildScalar(pexpr.(tree.TypedExpr), inScope, nil, nil, colRefs)
   489  	}
   490  
   491  	// Construct a private FuncOpDef that refers to a resolved function overload.
   492  	out = b.factory.ConstructFunction(args, &memo.FunctionPrivate{
   493  		Name:       def.Name,
   494  		Typ:        f.ResolvedType(),
   495  		Properties: &def.FunctionProperties,
   496  		Overload:   f.ResolvedOverload(),
   497  	})
   498  
   499  	if isGenerator(def) {
   500  		return b.finishBuildGeneratorFunction(f, out, inScope, outScope, outCol)
   501  	}
   502  
   503  	return b.finishBuildScalar(f, out, inScope, outScope, outCol)
   504  }
   505  
   506  // buildRangeCond builds a RANGE clause as a simpler expression. Examples:
   507  // x BETWEEN a AND b                ->  x >= a AND x <= b
   508  // x NOT BETWEEN a AND b            ->  NOT (x >= a AND x <= b)
   509  // x BETWEEN SYMMETRIC a AND b      ->  (x >= a AND x <= b) OR (x >= b AND x <= a)
   510  // x NOT BETWEEN SYMMETRIC a AND b  ->  NOT ((x >= a AND x <= b) OR (x >= b AND x <= a))
   511  //
   512  // Note that x can be typed differently in the expressions (x >= a) and (x <= b)
   513  // because a and b can have different types; the function takes both "variants"
   514  // of x.
   515  //
   516  // Note that these expressions are subject to normalization rules (which can
   517  // push down the negation).
   518  // TODO(radu): this doesn't work when the expressions have side-effects.
   519  func (b *Builder) buildRangeCond(
   520  	not bool, symmetric bool, inputFrom, from, inputTo, to opt.ScalarExpr,
   521  ) opt.ScalarExpr {
   522  	// Build "input >= from AND input <= to".
   523  	out := b.factory.ConstructAnd(
   524  		b.factory.ConstructGe(inputFrom, from),
   525  		b.factory.ConstructLe(inputTo, to),
   526  	)
   527  
   528  	if symmetric {
   529  		// Build "(input >= from AND input <= to) OR (input >= to AND input <= from)".
   530  		lhs := out
   531  		rhs := b.factory.ConstructAnd(
   532  			b.factory.ConstructGe(inputTo, to),
   533  			b.factory.ConstructLe(inputFrom, from),
   534  		)
   535  		out = b.factory.ConstructOr(lhs, rhs)
   536  	}
   537  
   538  	if not {
   539  		out = b.factory.ConstructNot(out)
   540  	}
   541  	return out
   542  }
   543  
   544  // checkSubqueryOuterCols uses the subquery outer columns to update the given
   545  // set of column references and the set of outer columns for any enclosing
   546  // subuqery. It also performs the following checks:
   547  //   1. If aggregates are not allowed in the current context (e.g., if we
   548  //      are building the WHERE clause), it checks that the subquery does not
   549  //      reference any aggregates from this scope.
   550  //   2. If this is a grouping context, it checks that any outer columns from
   551  //      the given subquery that reference inScope are either aggregate or
   552  //      grouping columns in inScope.
   553  func (b *Builder) checkSubqueryOuterCols(
   554  	subqueryOuterCols opt.ColSet, inGroupingContext bool, inScope *scope, colRefs *opt.ColSet,
   555  ) {
   556  	if subqueryOuterCols.Empty() {
   557  		return
   558  	}
   559  
   560  	// Register the use of correlation to telemetry.
   561  	// Note: we don't blindly increment the counter every time this
   562  	// method is called, to avoid double counting the same query.
   563  	if !b.isCorrelated {
   564  		b.isCorrelated = true
   565  		telemetry.Inc(sqltelemetry.CorrelatedSubqueryUseCounter)
   566  	}
   567  
   568  	var inScopeCols opt.ColSet
   569  	if b.subquery != nil || inGroupingContext {
   570  		// Only calculate the set of inScope columns if it will be used below.
   571  		inScopeCols = inScope.colSet()
   572  	}
   573  
   574  	if colRefs != nil {
   575  		colRefs.UnionWith(subqueryOuterCols)
   576  	}
   577  	if b.subquery != nil {
   578  		b.subquery.outerCols.UnionWith(subqueryOuterCols.Difference(inScopeCols))
   579  	}
   580  
   581  	// Check 1 (see function comment).
   582  	if b.semaCtx.Properties.IsSet(tree.RejectAggregates) && inScope.groupby != nil {
   583  		aggCols := inScope.groupby.aggregateResultCols()
   584  		for i := range aggCols {
   585  			if subqueryOuterCols.Contains(aggCols[i].id) {
   586  				panic(tree.NewInvalidFunctionUsageError(tree.AggregateClass, inScope.context.String()))
   587  			}
   588  		}
   589  	}
   590  
   591  	// Check 2 (see function comment).
   592  	if inGroupingContext {
   593  		subqueryOuterCols.IntersectionWith(inScopeCols)
   594  		if !subqueryOuterCols.Empty() &&
   595  			!subqueryOuterCols.SubsetOf(inScope.groupby.aggOutScope.colSet()) {
   596  			subqueryOuterCols.DifferenceWith(inScope.groupby.aggOutScope.colSet())
   597  			colID, _ := subqueryOuterCols.Next(0)
   598  			col := inScope.getColumn(colID)
   599  			panic(pgerror.Newf(
   600  				pgcode.Grouping,
   601  				"subquery uses ungrouped column \"%s\" from outer query",
   602  				tree.ErrString(&col.name)))
   603  		}
   604  	}
   605  }
   606  
   607  func (b *Builder) constructComparison(
   608  	cmp tree.ComparisonOperator, left, right opt.ScalarExpr,
   609  ) opt.ScalarExpr {
   610  	switch cmp {
   611  	case tree.EQ:
   612  		return b.factory.ConstructEq(left, right)
   613  	case tree.LT:
   614  		return b.factory.ConstructLt(left, right)
   615  	case tree.GT:
   616  		return b.factory.ConstructGt(left, right)
   617  	case tree.LE:
   618  		return b.factory.ConstructLe(left, right)
   619  	case tree.GE:
   620  		return b.factory.ConstructGe(left, right)
   621  	case tree.NE:
   622  		return b.factory.ConstructNe(left, right)
   623  	case tree.In:
   624  		return b.factory.ConstructIn(left, right)
   625  	case tree.NotIn:
   626  		return b.factory.ConstructNotIn(left, right)
   627  	case tree.Like:
   628  		return b.factory.ConstructLike(left, right)
   629  	case tree.NotLike:
   630  		return b.factory.ConstructNotLike(left, right)
   631  	case tree.ILike:
   632  		return b.factory.ConstructILike(left, right)
   633  	case tree.NotILike:
   634  		return b.factory.ConstructNotILike(left, right)
   635  	case tree.SimilarTo:
   636  		return b.factory.ConstructSimilarTo(left, right)
   637  	case tree.NotSimilarTo:
   638  		return b.factory.ConstructNotSimilarTo(left, right)
   639  	case tree.RegMatch:
   640  		return b.factory.ConstructRegMatch(left, right)
   641  	case tree.NotRegMatch:
   642  		return b.factory.ConstructNotRegMatch(left, right)
   643  	case tree.RegIMatch:
   644  		return b.factory.ConstructRegIMatch(left, right)
   645  	case tree.NotRegIMatch:
   646  		return b.factory.ConstructNotRegIMatch(left, right)
   647  	case tree.IsDistinctFrom:
   648  		return b.factory.ConstructIsNot(left, right)
   649  	case tree.IsNotDistinctFrom:
   650  		return b.factory.ConstructIs(left, right)
   651  	case tree.Contains:
   652  		return b.factory.ConstructContains(left, right)
   653  	case tree.ContainedBy:
   654  		// This is just syntatic sugar that reverses the operands.
   655  		return b.factory.ConstructContains(right, left)
   656  	case tree.JSONExists:
   657  		return b.factory.ConstructJsonExists(left, right)
   658  	case tree.JSONAllExists:
   659  		return b.factory.ConstructJsonAllExists(left, right)
   660  	case tree.JSONSomeExists:
   661  		return b.factory.ConstructJsonSomeExists(left, right)
   662  	case tree.Overlaps:
   663  		return b.factory.ConstructOverlaps(left, right)
   664  	}
   665  	panic(errors.AssertionFailedf("unhandled comparison operator: %s", log.Safe(cmp)))
   666  }
   667  
   668  func (b *Builder) constructBinary(
   669  	bin tree.BinaryOperator, left, right opt.ScalarExpr, typ *types.T,
   670  ) opt.ScalarExpr {
   671  	switch bin {
   672  	case tree.Bitand:
   673  		return b.factory.ConstructBitand(left, right)
   674  	case tree.Bitor:
   675  		return b.factory.ConstructBitor(left, right)
   676  	case tree.Bitxor:
   677  		return b.factory.ConstructBitxor(left, right)
   678  	case tree.Plus:
   679  		return b.factory.ConstructPlus(left, right)
   680  	case tree.Minus:
   681  		return b.factory.ConstructMinus(left, right)
   682  	case tree.Mult:
   683  		return b.factory.ConstructMult(left, right)
   684  	case tree.Div:
   685  		return b.factory.ConstructDiv(left, right)
   686  	case tree.FloorDiv:
   687  		return b.factory.ConstructFloorDiv(left, right)
   688  	case tree.Mod:
   689  		return b.factory.ConstructMod(left, right)
   690  	case tree.Pow:
   691  		return b.factory.ConstructPow(left, right)
   692  	case tree.Concat:
   693  		return b.factory.ConstructConcat(left, right)
   694  	case tree.LShift:
   695  		return b.factory.ConstructLShift(left, right)
   696  	case tree.RShift:
   697  		return b.factory.ConstructRShift(left, right)
   698  	case tree.JSONFetchText:
   699  		return b.factory.ConstructFetchText(left, right)
   700  	case tree.JSONFetchVal:
   701  		return b.factory.ConstructFetchVal(left, right)
   702  	case tree.JSONFetchValPath:
   703  		return b.factory.ConstructFetchValPath(left, right)
   704  	case tree.JSONFetchTextPath:
   705  		return b.factory.ConstructFetchTextPath(left, right)
   706  	}
   707  	panic(errors.AssertionFailedf("unhandled binary operator: %s", log.Safe(bin)))
   708  }
   709  
   710  func (b *Builder) constructUnary(
   711  	un tree.UnaryOperator, input opt.ScalarExpr, typ *types.T,
   712  ) opt.ScalarExpr {
   713  	switch un {
   714  	case tree.UnaryMinus:
   715  		return b.factory.ConstructUnaryMinus(input)
   716  	case tree.UnaryComplement:
   717  		return b.factory.ConstructUnaryComplement(input)
   718  	case tree.UnarySqrt:
   719  		return b.factory.ConstructUnarySqrt(input)
   720  	case tree.UnaryCbrt:
   721  		return b.factory.ConstructUnaryCbrt(input)
   722  	}
   723  	panic(errors.AssertionFailedf("unhandled unary operator: %s", log.Safe(un)))
   724  }
   725  
   726  // ScalarBuilder is a specialized variant of Builder that can be used to create
   727  // a scalar from a TypedExpr. This is used to build scalar expressions for
   728  // testing. It is also used temporarily to interface with the old planning code.
   729  //
   730  // TypedExprs can refer to columns in the current scope using IndexedVars (@1,
   731  // @2, etc). When we build a scalar, we have to provide information about these
   732  // columns.
   733  type ScalarBuilder struct {
   734  	Builder
   735  	scope scope
   736  }
   737  
   738  // NewScalar creates a new ScalarBuilder. The columns in the metadata are accessible
   739  // from scalar expressions via IndexedVars.
   740  func NewScalar(
   741  	ctx context.Context, semaCtx *tree.SemaContext, evalCtx *tree.EvalContext, factory *norm.Factory,
   742  ) *ScalarBuilder {
   743  	md := factory.Metadata()
   744  	sb := &ScalarBuilder{
   745  		Builder: Builder{
   746  			factory: factory,
   747  			ctx:     ctx,
   748  			semaCtx: semaCtx,
   749  			evalCtx: evalCtx,
   750  		},
   751  	}
   752  	sb.scope.builder = &sb.Builder
   753  
   754  	// Put all the columns in the current scope.
   755  	sb.scope.cols = make([]scopeColumn, 0, md.NumColumns())
   756  	for colID := opt.ColumnID(1); int(colID) <= md.NumColumns(); colID++ {
   757  		colMeta := md.ColumnMeta(colID)
   758  		sb.scope.cols = append(sb.scope.cols, scopeColumn{
   759  			name: tree.Name(colMeta.Alias),
   760  			typ:  colMeta.Type,
   761  			id:   colID,
   762  		})
   763  	}
   764  
   765  	return sb
   766  }
   767  
   768  // Build a memo structure from a TypedExpr: the root group represents a scalar
   769  // expression equivalent to expr.
   770  func (sb *ScalarBuilder) Build(expr tree.Expr) (err error) {
   771  	defer func() {
   772  		if r := recover(); r != nil {
   773  			// This code allows us to propagate errors without adding lots of checks
   774  			// for `if err != nil` throughout the construction code. This is only
   775  			// possible because the code does not update shared state and does not
   776  			// manipulate locks.
   777  			if ok, e := errorutil.ShouldCatch(r); ok {
   778  				err = e
   779  			} else {
   780  				panic(r)
   781  			}
   782  		}
   783  	}()
   784  
   785  	typedExpr := sb.scope.resolveType(expr, types.Any)
   786  	scalar := sb.buildScalar(typedExpr, &sb.scope, nil, nil, nil)
   787  	sb.factory.Memo().SetScalarRoot(scalar)
   788  	return nil
   789  }