github.com/evanw/esbuild@v0.21.4/internal/bundler/bundler.go (about)

     1  package bundler
     2  
     3  // The bundler is the core of the "build" and "transform" API calls. Each
     4  // operation has two phases. The first phase scans the module graph, and is
     5  // represented by the "ScanBundle" function. The second phase generates the
     6  // output files from the module graph, and is implemented by the "Compile"
     7  // function.
     8  
     9  import (
    10  	"bytes"
    11  	"encoding/base32"
    12  	"encoding/base64"
    13  	"fmt"
    14  	"math/rand"
    15  	"net/http"
    16  	"sort"
    17  	"strings"
    18  	"sync"
    19  	"syscall"
    20  	"time"
    21  	"unicode"
    22  	"unicode/utf8"
    23  
    24  	"github.com/evanw/esbuild/internal/ast"
    25  	"github.com/evanw/esbuild/internal/cache"
    26  	"github.com/evanw/esbuild/internal/compat"
    27  	"github.com/evanw/esbuild/internal/config"
    28  	"github.com/evanw/esbuild/internal/css_parser"
    29  	"github.com/evanw/esbuild/internal/fs"
    30  	"github.com/evanw/esbuild/internal/graph"
    31  	"github.com/evanw/esbuild/internal/helpers"
    32  	"github.com/evanw/esbuild/internal/js_ast"
    33  	"github.com/evanw/esbuild/internal/js_lexer"
    34  	"github.com/evanw/esbuild/internal/js_parser"
    35  	"github.com/evanw/esbuild/internal/logger"
    36  	"github.com/evanw/esbuild/internal/resolver"
    37  	"github.com/evanw/esbuild/internal/runtime"
    38  	"github.com/evanw/esbuild/internal/sourcemap"
    39  	"github.com/evanw/esbuild/internal/xxhash"
    40  )
    41  
    42  type scannerFile struct {
    43  	// If "AbsMetadataFile" is present, this will be filled out with information
    44  	// about this file in JSON format. This is a partial JSON file that will be
    45  	// fully assembled later.
    46  	jsonMetadataChunk string
    47  
    48  	pluginData interface{}
    49  	inputFile  graph.InputFile
    50  }
    51  
    52  // This is data related to source maps. It's computed in parallel with linking
    53  // and must be ready by the time printing happens. This is beneficial because
    54  // it is somewhat expensive to produce.
    55  type DataForSourceMap struct {
    56  	// This data is for the printer. It maps from byte offsets in the file (which
    57  	// are stored at every AST node) to UTF-16 column offsets (required by source
    58  	// maps).
    59  	LineOffsetTables []sourcemap.LineOffsetTable
    60  
    61  	// This contains the quoted contents of the original source file. It's what
    62  	// needs to be embedded in the "sourcesContent" array in the final source
    63  	// map. Quoting is precomputed because it's somewhat expensive.
    64  	QuotedContents [][]byte
    65  }
    66  
    67  type Bundle struct {
    68  	// The unique key prefix is a random string that is unique to every bundling
    69  	// operation. It is used as a prefix for the unique keys assigned to every
    70  	// chunk during linking. These unique keys are used to identify each chunk
    71  	// before the final output paths have been computed.
    72  	uniqueKeyPrefix string
    73  
    74  	fs          fs.FS
    75  	res         *resolver.Resolver
    76  	files       []scannerFile
    77  	entryPoints []graph.EntryPoint
    78  	options     config.Options
    79  }
    80  
    81  type parseArgs struct {
    82  	fs              fs.FS
    83  	log             logger.Log
    84  	res             *resolver.Resolver
    85  	caches          *cache.CacheSet
    86  	prettyPath      string
    87  	importSource    *logger.Source
    88  	importWith      *ast.ImportAssertOrWith
    89  	sideEffects     graph.SideEffects
    90  	pluginData      interface{}
    91  	results         chan parseResult
    92  	inject          chan config.InjectedFile
    93  	uniqueKeyPrefix string
    94  	keyPath         logger.Path
    95  	options         config.Options
    96  	importPathRange logger.Range
    97  	sourceIndex     uint32
    98  	skipResolve     bool
    99  }
   100  
   101  type parseResult struct {
   102  	resolveResults     []*resolver.ResolveResult
   103  	globResolveResults map[uint32]globResolveResult
   104  	file               scannerFile
   105  	tlaCheck           tlaCheck
   106  	ok                 bool
   107  }
   108  
   109  type globResolveResult struct {
   110  	resolveResults map[string]resolver.ResolveResult
   111  	absPath        string
   112  	prettyPath     string
   113  	exportAlias    string
   114  }
   115  
   116  type tlaCheck struct {
   117  	parent            ast.Index32
   118  	depth             uint32
   119  	importRecordIndex uint32
   120  }
   121  
   122  func parseFile(args parseArgs) {
   123  	source := logger.Source{
   124  		Index:          args.sourceIndex,
   125  		KeyPath:        args.keyPath,
   126  		PrettyPath:     args.prettyPath,
   127  		IdentifierName: js_ast.GenerateNonUniqueNameFromPath(args.keyPath.Text),
   128  	}
   129  
   130  	var loader config.Loader
   131  	var absResolveDir string
   132  	var pluginName string
   133  	var pluginData interface{}
   134  
   135  	if stdin := args.options.Stdin; stdin != nil {
   136  		// Special-case stdin
   137  		source.Contents = stdin.Contents
   138  		loader = stdin.Loader
   139  		if loader == config.LoaderNone {
   140  			loader = config.LoaderJS
   141  		}
   142  		absResolveDir = args.options.Stdin.AbsResolveDir
   143  	} else {
   144  		result, ok := runOnLoadPlugins(
   145  			args.options.Plugins,
   146  			args.fs,
   147  			&args.caches.FSCache,
   148  			args.log,
   149  			&source,
   150  			args.importSource,
   151  			args.importPathRange,
   152  			args.importWith,
   153  			args.pluginData,
   154  			args.options.WatchMode,
   155  		)
   156  		if !ok {
   157  			if args.inject != nil {
   158  				args.inject <- config.InjectedFile{
   159  					Source: source,
   160  				}
   161  			}
   162  			args.results <- parseResult{}
   163  			return
   164  		}
   165  		loader = result.loader
   166  		absResolveDir = result.absResolveDir
   167  		pluginName = result.pluginName
   168  		pluginData = result.pluginData
   169  	}
   170  
   171  	_, base, ext := logger.PlatformIndependentPathDirBaseExt(source.KeyPath.Text)
   172  
   173  	// The special "default" loader determines the loader from the file path
   174  	if loader == config.LoaderDefault {
   175  		loader = loaderFromFileExtension(args.options.ExtensionToLoader, base+ext)
   176  	}
   177  
   178  	if loader == config.LoaderEmpty {
   179  		source.Contents = ""
   180  	}
   181  
   182  	result := parseResult{
   183  		file: scannerFile{
   184  			inputFile: graph.InputFile{
   185  				Source:      source,
   186  				Loader:      loader,
   187  				SideEffects: args.sideEffects,
   188  			},
   189  			pluginData: pluginData,
   190  		},
   191  	}
   192  
   193  	defer func() {
   194  		r := recover()
   195  		if r != nil {
   196  			args.log.AddErrorWithNotes(nil, logger.Range{},
   197  				fmt.Sprintf("panic: %v (while parsing %q)", r, source.PrettyPath),
   198  				[]logger.MsgData{{Text: helpers.PrettyPrintedStack()}})
   199  			args.results <- result
   200  		}
   201  	}()
   202  
   203  	switch loader {
   204  	case config.LoaderJS, config.LoaderEmpty:
   205  		ast, ok := args.caches.JSCache.Parse(args.log, source, js_parser.OptionsFromConfig(&args.options))
   206  		if len(ast.Parts) <= 1 { // Ignore the implicitly-generated namespace export part
   207  			result.file.inputFile.SideEffects.Kind = graph.NoSideEffects_EmptyAST
   208  		}
   209  		result.file.inputFile.Repr = &graph.JSRepr{AST: ast}
   210  		result.ok = ok
   211  
   212  	case config.LoaderJSX:
   213  		args.options.JSX.Parse = true
   214  		ast, ok := args.caches.JSCache.Parse(args.log, source, js_parser.OptionsFromConfig(&args.options))
   215  		if len(ast.Parts) <= 1 { // Ignore the implicitly-generated namespace export part
   216  			result.file.inputFile.SideEffects.Kind = graph.NoSideEffects_EmptyAST
   217  		}
   218  		result.file.inputFile.Repr = &graph.JSRepr{AST: ast}
   219  		result.ok = ok
   220  
   221  	case config.LoaderTS, config.LoaderTSNoAmbiguousLessThan:
   222  		args.options.TS.Parse = true
   223  		args.options.TS.NoAmbiguousLessThan = loader == config.LoaderTSNoAmbiguousLessThan
   224  		ast, ok := args.caches.JSCache.Parse(args.log, source, js_parser.OptionsFromConfig(&args.options))
   225  		if len(ast.Parts) <= 1 { // Ignore the implicitly-generated namespace export part
   226  			result.file.inputFile.SideEffects.Kind = graph.NoSideEffects_EmptyAST
   227  		}
   228  		result.file.inputFile.Repr = &graph.JSRepr{AST: ast}
   229  		result.ok = ok
   230  
   231  	case config.LoaderTSX:
   232  		args.options.TS.Parse = true
   233  		args.options.JSX.Parse = true
   234  		ast, ok := args.caches.JSCache.Parse(args.log, source, js_parser.OptionsFromConfig(&args.options))
   235  		if len(ast.Parts) <= 1 { // Ignore the implicitly-generated namespace export part
   236  			result.file.inputFile.SideEffects.Kind = graph.NoSideEffects_EmptyAST
   237  		}
   238  		result.file.inputFile.Repr = &graph.JSRepr{AST: ast}
   239  		result.ok = ok
   240  
   241  	case config.LoaderCSS, config.LoaderGlobalCSS, config.LoaderLocalCSS:
   242  		ast := args.caches.CSSCache.Parse(args.log, source, css_parser.OptionsFromConfig(loader, &args.options))
   243  		result.file.inputFile.Repr = &graph.CSSRepr{AST: ast}
   244  		result.ok = true
   245  
   246  	case config.LoaderJSON, config.LoaderWithTypeJSON:
   247  		expr, ok := args.caches.JSONCache.Parse(args.log, source, js_parser.JSONOptions{
   248  			UnsupportedJSFeatures: args.options.UnsupportedJSFeatures,
   249  		})
   250  		ast := js_parser.LazyExportAST(args.log, source, js_parser.OptionsFromConfig(&args.options), expr, "")
   251  		if loader == config.LoaderWithTypeJSON {
   252  			// The exports kind defaults to "none", in which case the linker picks
   253  			// either ESM or CommonJS depending on the situation. Dynamic imports
   254  			// causes the linker to pick CommonJS which uses "require()" and then
   255  			// converts the return value to ESM, which adds extra properties that
   256  			// aren't supposed to be there when "{ with: { type: 'json' } }" is
   257  			// present. So if there's an import attribute, we force the type to
   258  			// be ESM to avoid this.
   259  			ast.ExportsKind = js_ast.ExportsESM
   260  		}
   261  		if pluginName != "" {
   262  			result.file.inputFile.SideEffects.Kind = graph.NoSideEffects_PureData_FromPlugin
   263  		} else {
   264  			result.file.inputFile.SideEffects.Kind = graph.NoSideEffects_PureData
   265  		}
   266  		result.file.inputFile.Repr = &graph.JSRepr{AST: ast}
   267  		result.ok = ok
   268  
   269  	case config.LoaderText:
   270  		encoded := base64.StdEncoding.EncodeToString([]byte(source.Contents))
   271  		expr := js_ast.Expr{Data: &js_ast.EString{Value: helpers.StringToUTF16(source.Contents)}}
   272  		ast := js_parser.LazyExportAST(args.log, source, js_parser.OptionsFromConfig(&args.options), expr, "")
   273  		ast.URLForCSS = "data:text/plain;base64," + encoded
   274  		if pluginName != "" {
   275  			result.file.inputFile.SideEffects.Kind = graph.NoSideEffects_PureData_FromPlugin
   276  		} else {
   277  			result.file.inputFile.SideEffects.Kind = graph.NoSideEffects_PureData
   278  		}
   279  		result.file.inputFile.Repr = &graph.JSRepr{AST: ast}
   280  		result.ok = true
   281  
   282  	case config.LoaderBase64:
   283  		mimeType := guessMimeType(ext, source.Contents)
   284  		encoded := base64.StdEncoding.EncodeToString([]byte(source.Contents))
   285  		expr := js_ast.Expr{Data: &js_ast.EString{Value: helpers.StringToUTF16(encoded)}}
   286  		ast := js_parser.LazyExportAST(args.log, source, js_parser.OptionsFromConfig(&args.options), expr, "")
   287  		ast.URLForCSS = "data:" + mimeType + ";base64," + encoded
   288  		if pluginName != "" {
   289  			result.file.inputFile.SideEffects.Kind = graph.NoSideEffects_PureData_FromPlugin
   290  		} else {
   291  			result.file.inputFile.SideEffects.Kind = graph.NoSideEffects_PureData
   292  		}
   293  		result.file.inputFile.Repr = &graph.JSRepr{AST: ast}
   294  		result.ok = true
   295  
   296  	case config.LoaderBinary:
   297  		encoded := base64.StdEncoding.EncodeToString([]byte(source.Contents))
   298  		expr := js_ast.Expr{Data: &js_ast.EString{Value: helpers.StringToUTF16(encoded)}}
   299  		helper := "__toBinary"
   300  		if args.options.Platform == config.PlatformNode {
   301  			helper = "__toBinaryNode"
   302  		}
   303  		ast := js_parser.LazyExportAST(args.log, source, js_parser.OptionsFromConfig(&args.options), expr, helper)
   304  		ast.URLForCSS = "data:application/octet-stream;base64," + encoded
   305  		if pluginName != "" {
   306  			result.file.inputFile.SideEffects.Kind = graph.NoSideEffects_PureData_FromPlugin
   307  		} else {
   308  			result.file.inputFile.SideEffects.Kind = graph.NoSideEffects_PureData
   309  		}
   310  		result.file.inputFile.Repr = &graph.JSRepr{AST: ast}
   311  		result.ok = true
   312  
   313  	case config.LoaderDataURL:
   314  		mimeType := guessMimeType(ext, source.Contents)
   315  		url := helpers.EncodeStringAsShortestDataURL(mimeType, source.Contents)
   316  		expr := js_ast.Expr{Data: &js_ast.EString{Value: helpers.StringToUTF16(url)}}
   317  		ast := js_parser.LazyExportAST(args.log, source, js_parser.OptionsFromConfig(&args.options), expr, "")
   318  		ast.URLForCSS = url
   319  		if pluginName != "" {
   320  			result.file.inputFile.SideEffects.Kind = graph.NoSideEffects_PureData_FromPlugin
   321  		} else {
   322  			result.file.inputFile.SideEffects.Kind = graph.NoSideEffects_PureData
   323  		}
   324  		result.file.inputFile.Repr = &graph.JSRepr{AST: ast}
   325  		result.ok = true
   326  
   327  	case config.LoaderFile:
   328  		uniqueKey := fmt.Sprintf("%sA%08d", args.uniqueKeyPrefix, args.sourceIndex)
   329  		uniqueKeyPath := uniqueKey + source.KeyPath.IgnoredSuffix
   330  		expr := js_ast.Expr{Data: &js_ast.EString{
   331  			Value:             helpers.StringToUTF16(uniqueKeyPath),
   332  			ContainsUniqueKey: true,
   333  		}}
   334  		ast := js_parser.LazyExportAST(args.log, source, js_parser.OptionsFromConfig(&args.options), expr, "")
   335  		ast.URLForCSS = uniqueKeyPath
   336  		if pluginName != "" {
   337  			result.file.inputFile.SideEffects.Kind = graph.NoSideEffects_PureData_FromPlugin
   338  		} else {
   339  			result.file.inputFile.SideEffects.Kind = graph.NoSideEffects_PureData
   340  		}
   341  		result.file.inputFile.Repr = &graph.JSRepr{AST: ast}
   342  		result.ok = true
   343  
   344  		// Mark that this file is from the "file" loader
   345  		result.file.inputFile.UniqueKeyForAdditionalFile = uniqueKey
   346  
   347  	case config.LoaderCopy:
   348  		uniqueKey := fmt.Sprintf("%sA%08d", args.uniqueKeyPrefix, args.sourceIndex)
   349  		uniqueKeyPath := uniqueKey + source.KeyPath.IgnoredSuffix
   350  		result.file.inputFile.Repr = &graph.CopyRepr{
   351  			URLForCode: uniqueKeyPath,
   352  		}
   353  		result.ok = true
   354  
   355  		// Mark that this file is from the "copy" loader
   356  		result.file.inputFile.UniqueKeyForAdditionalFile = uniqueKey
   357  
   358  	default:
   359  		var message string
   360  		if source.KeyPath.Namespace == "file" && ext != "" {
   361  			message = fmt.Sprintf("No loader is configured for %q files: %s", ext, source.PrettyPath)
   362  		} else {
   363  			message = fmt.Sprintf("Do not know how to load path: %s", source.PrettyPath)
   364  		}
   365  		tracker := logger.MakeLineColumnTracker(args.importSource)
   366  		args.log.AddError(&tracker, args.importPathRange, message)
   367  	}
   368  
   369  	// Only continue now if parsing was successful
   370  	if result.ok {
   371  		// Run the resolver on the parse thread so it's not run on the main thread.
   372  		// That way the main thread isn't blocked if the resolver takes a while.
   373  		if recordsPtr := result.file.inputFile.Repr.ImportRecords(); args.options.Mode == config.ModeBundle && !args.skipResolve && recordsPtr != nil {
   374  			// Clone the import records because they will be mutated later
   375  			records := append([]ast.ImportRecord{}, *recordsPtr...)
   376  			*recordsPtr = records
   377  			result.resolveResults = make([]*resolver.ResolveResult, len(records))
   378  
   379  			if len(records) > 0 {
   380  				type cacheEntry struct {
   381  					resolveResult *resolver.ResolveResult
   382  					debug         resolver.DebugMeta
   383  					didLogError   bool
   384  				}
   385  
   386  				type cacheKey struct {
   387  					kind  ast.ImportKind
   388  					path  string
   389  					attrs logger.ImportAttributes
   390  				}
   391  				resolverCache := make(map[cacheKey]cacheEntry)
   392  				tracker := logger.MakeLineColumnTracker(&source)
   393  
   394  				for importRecordIndex := range records {
   395  					// Don't try to resolve imports that are already resolved
   396  					record := &records[importRecordIndex]
   397  					if record.SourceIndex.IsValid() {
   398  						continue
   399  					}
   400  
   401  					// Special-case glob pattern imports
   402  					if record.GlobPattern != nil {
   403  						prettyPath := helpers.GlobPatternToString(record.GlobPattern.Parts)
   404  						switch record.GlobPattern.Kind {
   405  						case ast.ImportRequire:
   406  							prettyPath = fmt.Sprintf("require(%q)", prettyPath)
   407  						case ast.ImportDynamic:
   408  							prettyPath = fmt.Sprintf("import(%q)", prettyPath)
   409  						}
   410  						if results, msg := args.res.ResolveGlob(absResolveDir, record.GlobPattern.Parts, record.GlobPattern.Kind, prettyPath); results != nil {
   411  							if msg != nil {
   412  								args.log.AddID(msg.ID, msg.Kind, &tracker, record.Range, msg.Data.Text)
   413  							}
   414  							if result.globResolveResults == nil {
   415  								result.globResolveResults = make(map[uint32]globResolveResult)
   416  							}
   417  							result.globResolveResults[uint32(importRecordIndex)] = globResolveResult{
   418  								resolveResults: results,
   419  								absPath:        args.fs.Join(absResolveDir, "(glob)"),
   420  								prettyPath:     fmt.Sprintf("%s in %s", prettyPath, result.file.inputFile.Source.PrettyPath),
   421  								exportAlias:    record.GlobPattern.ExportAlias,
   422  							}
   423  						} else {
   424  							args.log.AddError(&tracker, record.Range, fmt.Sprintf("Could not resolve %s", prettyPath))
   425  						}
   426  						continue
   427  					}
   428  
   429  					// Ignore records that the parser has discarded. This is used to remove
   430  					// type-only imports in TypeScript files.
   431  					if record.Flags.Has(ast.IsUnused) {
   432  						continue
   433  					}
   434  
   435  					// Encode the import attributes
   436  					var attrs logger.ImportAttributes
   437  					if record.AssertOrWith != nil && record.AssertOrWith.Keyword == ast.WithKeyword {
   438  						data := make(map[string]string, len(record.AssertOrWith.Entries))
   439  						for _, entry := range record.AssertOrWith.Entries {
   440  							data[helpers.UTF16ToString(entry.Key)] = helpers.UTF16ToString(entry.Value)
   441  						}
   442  						attrs = logger.EncodeImportAttributes(data)
   443  					}
   444  
   445  					// Cache the path in case it's imported multiple times in this file
   446  					cacheKey := cacheKey{
   447  						kind:  record.Kind,
   448  						path:  record.Path.Text,
   449  						attrs: attrs,
   450  					}
   451  					entry, ok := resolverCache[cacheKey]
   452  					if ok {
   453  						result.resolveResults[importRecordIndex] = entry.resolveResult
   454  					} else {
   455  						// Run the resolver and log an error if the path couldn't be resolved
   456  						resolveResult, didLogError, debug := RunOnResolvePlugins(
   457  							args.options.Plugins,
   458  							args.res,
   459  							args.log,
   460  							args.fs,
   461  							&args.caches.FSCache,
   462  							&source,
   463  							record.Range,
   464  							source.KeyPath,
   465  							record.Path.Text,
   466  							attrs,
   467  							record.Kind,
   468  							absResolveDir,
   469  							pluginData,
   470  						)
   471  						if resolveResult != nil {
   472  							resolveResult.PathPair.Primary.ImportAttributes = attrs
   473  							if resolveResult.PathPair.HasSecondary() {
   474  								resolveResult.PathPair.Secondary.ImportAttributes = attrs
   475  							}
   476  						}
   477  						entry = cacheEntry{
   478  							resolveResult: resolveResult,
   479  							debug:         debug,
   480  							didLogError:   didLogError,
   481  						}
   482  						resolverCache[cacheKey] = entry
   483  
   484  						// All "require.resolve()" imports should be external because we don't
   485  						// want to waste effort traversing into them
   486  						if record.Kind == ast.ImportRequireResolve {
   487  							if resolveResult != nil && resolveResult.PathPair.IsExternal {
   488  								// Allow path substitution as long as the result is external
   489  								result.resolveResults[importRecordIndex] = resolveResult
   490  							} else if !record.Flags.Has(ast.HandlesImportErrors) {
   491  								args.log.AddID(logger.MsgID_Bundler_RequireResolveNotExternal, logger.Warning, &tracker, record.Range,
   492  									fmt.Sprintf("%q should be marked as external for use with \"require.resolve\"", record.Path.Text))
   493  							}
   494  							continue
   495  						}
   496  					}
   497  
   498  					// Check whether we should log an error every time the result is nil,
   499  					// even if it's from the cache. Do this because the error may not
   500  					// have been logged for nil entries if the previous instances had
   501  					// the "HandlesImportErrors" flag.
   502  					if entry.resolveResult == nil {
   503  						// Failed imports inside a try/catch are silently turned into
   504  						// external imports instead of causing errors. This matches a common
   505  						// code pattern for conditionally importing a module with a graceful
   506  						// fallback.
   507  						if !entry.didLogError && !record.Flags.Has(ast.HandlesImportErrors) {
   508  							// Report an error
   509  							text, suggestion, notes := ResolveFailureErrorTextSuggestionNotes(args.res, record.Path.Text, record.Kind,
   510  								pluginName, args.fs, absResolveDir, args.options.Platform, source.PrettyPath, entry.debug.ModifiedImportPath)
   511  							entry.debug.LogErrorMsg(args.log, &source, record.Range, text, suggestion, notes)
   512  
   513  							// Only report this error once per unique import path in the file
   514  							entry.didLogError = true
   515  							resolverCache[cacheKey] = entry
   516  						} else if !entry.didLogError && record.Flags.Has(ast.HandlesImportErrors) {
   517  							// Report a debug message about why there was no error
   518  							args.log.AddIDWithNotes(logger.MsgID_Bundler_IgnoredDynamicImport, logger.Debug, &tracker, record.Range,
   519  								fmt.Sprintf("Importing %q was allowed even though it could not be resolved because dynamic import failures appear to be handled here:",
   520  									record.Path.Text), []logger.MsgData{tracker.MsgData(js_lexer.RangeOfIdentifier(source, record.ErrorHandlerLoc),
   521  									"The handler for dynamic import failures is here:")})
   522  						}
   523  						continue
   524  					}
   525  
   526  					result.resolveResults[importRecordIndex] = entry.resolveResult
   527  				}
   528  			}
   529  		}
   530  
   531  		// Attempt to parse the source map if present
   532  		if loader.CanHaveSourceMap() && args.options.SourceMap != config.SourceMapNone {
   533  			var sourceMapComment logger.Span
   534  			switch repr := result.file.inputFile.Repr.(type) {
   535  			case *graph.JSRepr:
   536  				sourceMapComment = repr.AST.SourceMapComment
   537  			case *graph.CSSRepr:
   538  				sourceMapComment = repr.AST.SourceMapComment
   539  			}
   540  
   541  			if sourceMapComment.Text != "" {
   542  				tracker := logger.MakeLineColumnTracker(&source)
   543  
   544  				if path, contents := extractSourceMapFromComment(args.log, args.fs, &args.caches.FSCache,
   545  					&source, &tracker, sourceMapComment, absResolveDir); contents != nil {
   546  					prettyPath := resolver.PrettyPath(args.fs, path)
   547  					log := logger.NewDeferLog(logger.DeferLogNoVerboseOrDebug, args.log.Overrides)
   548  
   549  					sourceMap := js_parser.ParseSourceMap(log, logger.Source{
   550  						KeyPath:    path,
   551  						PrettyPath: prettyPath,
   552  						Contents:   *contents,
   553  					})
   554  
   555  					if msgs := log.Done(); len(msgs) > 0 {
   556  						var text string
   557  						if path.Namespace == "file" {
   558  							text = fmt.Sprintf("The source map %q was referenced by the file %q here:", prettyPath, args.prettyPath)
   559  						} else {
   560  							text = fmt.Sprintf("This source map came from the file %q here:", args.prettyPath)
   561  						}
   562  						note := tracker.MsgData(sourceMapComment.Range, text)
   563  						for _, msg := range msgs {
   564  							msg.Notes = append(msg.Notes, note)
   565  							args.log.AddMsg(msg)
   566  						}
   567  					}
   568  
   569  					// If "sourcesContent" entries aren't present, try filling them in
   570  					// using the file system. This includes both generating the entire
   571  					// "sourcesContent" array if it's absent as well as filling in
   572  					// individual null entries in the array if the array is present.
   573  					if sourceMap != nil && !args.options.ExcludeSourcesContent {
   574  						// Make sure "sourcesContent" is big enough
   575  						if len(sourceMap.SourcesContent) < len(sourceMap.Sources) {
   576  							slice := make([]sourcemap.SourceContent, len(sourceMap.Sources))
   577  							copy(slice, sourceMap.SourcesContent)
   578  							sourceMap.SourcesContent = slice
   579  						}
   580  
   581  						// Attempt to fill in null entries using the file system
   582  						for i, source := range sourceMap.Sources {
   583  							if sourceMap.SourcesContent[i].Value == nil {
   584  								var absPath string
   585  								if args.fs.IsAbs(source) {
   586  									absPath = source
   587  								} else if path.Namespace == "file" {
   588  									absPath = args.fs.Join(args.fs.Dir(path.Text), source)
   589  								} else {
   590  									continue
   591  								}
   592  								if contents, err, _ := args.caches.FSCache.ReadFile(args.fs, absPath); err == nil {
   593  									sourceMap.SourcesContent[i].Value = helpers.StringToUTF16(contents)
   594  								}
   595  							}
   596  						}
   597  					}
   598  
   599  					result.file.inputFile.InputSourceMap = sourceMap
   600  				}
   601  			}
   602  		}
   603  	}
   604  
   605  	// Note: We must always send on the "inject" channel before we send on the
   606  	// "results" channel to avoid deadlock
   607  	if args.inject != nil {
   608  		var exports []config.InjectableExport
   609  
   610  		if repr, ok := result.file.inputFile.Repr.(*graph.JSRepr); ok {
   611  			aliases := make([]string, 0, len(repr.AST.NamedExports))
   612  			for alias := range repr.AST.NamedExports {
   613  				aliases = append(aliases, alias)
   614  			}
   615  			sort.Strings(aliases) // Sort for determinism
   616  			exports = make([]config.InjectableExport, len(aliases))
   617  			for i, alias := range aliases {
   618  				exports[i] = config.InjectableExport{
   619  					Alias: alias,
   620  					Loc:   repr.AST.NamedExports[alias].AliasLoc,
   621  				}
   622  			}
   623  		}
   624  
   625  		// Once we send on the "inject" channel, the main thread may mutate the
   626  		// "options" object to populate the "InjectedFiles" field. So we must
   627  		// only send on the "inject" channel after we're done using the "options"
   628  		// object so we don't introduce a data race.
   629  		isCopyLoader := loader == config.LoaderCopy
   630  		if isCopyLoader && args.skipResolve {
   631  			// This is not allowed because the import path would have to be rewritten,
   632  			// but import paths are not rewritten when bundling isn't enabled.
   633  			args.log.AddError(nil, logger.Range{},
   634  				fmt.Sprintf("Cannot inject %q with the \"copy\" loader without bundling enabled", source.PrettyPath))
   635  		}
   636  		args.inject <- config.InjectedFile{
   637  			Source:       source,
   638  			Exports:      exports,
   639  			IsCopyLoader: isCopyLoader,
   640  		}
   641  	}
   642  
   643  	args.results <- result
   644  }
   645  
   646  func ResolveFailureErrorTextSuggestionNotes(
   647  	res *resolver.Resolver,
   648  	path string,
   649  	kind ast.ImportKind,
   650  	pluginName string,
   651  	fs fs.FS,
   652  	absResolveDir string,
   653  	platform config.Platform,
   654  	originatingFilePath string,
   655  	modifiedImportPath string,
   656  ) (text string, suggestion string, notes []logger.MsgData) {
   657  	if modifiedImportPath != "" {
   658  		text = fmt.Sprintf("Could not resolve %q (originally %q)", modifiedImportPath, path)
   659  		notes = append(notes, logger.MsgData{Text: fmt.Sprintf(
   660  			"The path %q was remapped to %q using the alias feature, which then couldn't be resolved. "+
   661  				"Keep in mind that import path aliases are resolved in the current working directory.",
   662  			path, modifiedImportPath)})
   663  		path = modifiedImportPath
   664  	} else {
   665  		text = fmt.Sprintf("Could not resolve %q", path)
   666  	}
   667  	hint := ""
   668  
   669  	if resolver.IsPackagePath(path) && !fs.IsAbs(path) {
   670  		hint = fmt.Sprintf("You can mark the path %q as external to exclude it from the bundle, which will remove this error and leave the unresolved path in the bundle.", path)
   671  		if kind == ast.ImportRequire {
   672  			hint += " You can also surround this \"require\" call with a try/catch block to handle this failure at run-time instead of bundle-time."
   673  		} else if kind == ast.ImportDynamic {
   674  			hint += " You can also add \".catch()\" here to handle this failure at run-time instead of bundle-time."
   675  		}
   676  		if pluginName == "" && !fs.IsAbs(path) {
   677  			if query, _ := res.ProbeResolvePackageAsRelative(absResolveDir, path, kind); query != nil {
   678  				hint = fmt.Sprintf("Use the relative path %q to reference the file %q. "+
   679  					"Without the leading \"./\", the path %q is being interpreted as a package path instead.",
   680  					"./"+path, resolver.PrettyPath(fs, query.PathPair.Primary), path)
   681  				suggestion = string(helpers.QuoteForJSON("./"+path, false))
   682  			}
   683  		}
   684  	}
   685  
   686  	if platform != config.PlatformNode {
   687  		pkg := strings.TrimPrefix(path, "node:")
   688  		if resolver.BuiltInNodeModules[pkg] {
   689  			var how string
   690  			switch logger.API {
   691  			case logger.CLIAPI:
   692  				how = "--platform=node"
   693  			case logger.JSAPI:
   694  				how = "platform: 'node'"
   695  			case logger.GoAPI:
   696  				how = "Platform: api.PlatformNode"
   697  			}
   698  			hint = fmt.Sprintf("The package %q wasn't found on the file system but is built into node. "+
   699  				"Are you trying to bundle for node? You can use %q to do that, which will remove this error.", path, how)
   700  		}
   701  	}
   702  
   703  	if absResolveDir == "" && pluginName != "" {
   704  		where := ""
   705  		if originatingFilePath != "" {
   706  			where = fmt.Sprintf(" for the file %q", originatingFilePath)
   707  		}
   708  		hint = fmt.Sprintf("The plugin %q didn't set a resolve directory%s, "+
   709  			"so esbuild did not search for %q on the file system.", pluginName, where, path)
   710  	}
   711  
   712  	if hint != "" {
   713  		if modifiedImportPath != "" {
   714  			// Add a newline if there's already a paragraph of text
   715  			notes = append(notes, logger.MsgData{})
   716  
   717  			// Don't add a suggestion if the path was rewritten using an alias
   718  			suggestion = ""
   719  		}
   720  		notes = append(notes, logger.MsgData{Text: hint})
   721  	}
   722  	return
   723  }
   724  
   725  func isASCIIOnly(text string) bool {
   726  	for _, c := range text {
   727  		if c < 0x20 || c > 0x7E {
   728  			return false
   729  		}
   730  	}
   731  	return true
   732  }
   733  
   734  func guessMimeType(extension string, contents string) string {
   735  	mimeType := helpers.MimeTypeByExtension(extension)
   736  	if mimeType == "" {
   737  		mimeType = http.DetectContentType([]byte(contents))
   738  	}
   739  
   740  	// Turn "text/plain; charset=utf-8" into "text/plain;charset=utf-8"
   741  	return strings.ReplaceAll(mimeType, "; ", ";")
   742  }
   743  
   744  func extractSourceMapFromComment(
   745  	log logger.Log,
   746  	fs fs.FS,
   747  	fsCache *cache.FSCache,
   748  	source *logger.Source,
   749  	tracker *logger.LineColumnTracker,
   750  	comment logger.Span,
   751  	absResolveDir string,
   752  ) (logger.Path, *string) {
   753  	// Support data URLs
   754  	if parsed, ok := resolver.ParseDataURL(comment.Text); ok {
   755  		if contents, err := parsed.DecodeData(); err == nil {
   756  			return logger.Path{Text: source.PrettyPath, IgnoredSuffix: "#sourceMappingURL"}, &contents
   757  		} else {
   758  			log.AddID(logger.MsgID_SourceMap_UnsupportedSourceMapComment, logger.Warning, tracker, comment.Range,
   759  				fmt.Sprintf("Unsupported source map comment: %s", err.Error()))
   760  			return logger.Path{}, nil
   761  		}
   762  	}
   763  
   764  	// Relative path in a file with an absolute path
   765  	if absResolveDir != "" {
   766  		absPath := fs.Join(absResolveDir, comment.Text)
   767  		path := logger.Path{Text: absPath, Namespace: "file"}
   768  		contents, err, originalError := fsCache.ReadFile(fs, absPath)
   769  		if log.Level <= logger.LevelDebug && originalError != nil {
   770  			log.AddID(logger.MsgID_None, logger.Debug, tracker, comment.Range, fmt.Sprintf("Failed to read file %q: %s", resolver.PrettyPath(fs, path), originalError.Error()))
   771  		}
   772  		if err != nil {
   773  			kind := logger.Warning
   774  			if err == syscall.ENOENT {
   775  				// Don't report a warning because this is likely unactionable
   776  				kind = logger.Debug
   777  			}
   778  			log.AddID(logger.MsgID_SourceMap_MissingSourceMap, kind, tracker, comment.Range,
   779  				fmt.Sprintf("Cannot read file %q: %s", resolver.PrettyPath(fs, path), err.Error()))
   780  			return logger.Path{}, nil
   781  		}
   782  		return path, &contents
   783  	}
   784  
   785  	// Anything else is unsupported
   786  	return logger.Path{}, nil
   787  }
   788  
   789  func sanitizeLocation(fs fs.FS, loc *logger.MsgLocation) {
   790  	if loc != nil {
   791  		if loc.Namespace == "" {
   792  			loc.Namespace = "file"
   793  		}
   794  		if loc.File != "" {
   795  			loc.File = resolver.PrettyPath(fs, logger.Path{Text: loc.File, Namespace: loc.Namespace})
   796  		}
   797  	}
   798  }
   799  
   800  func logPluginMessages(
   801  	fs fs.FS,
   802  	log logger.Log,
   803  	name string,
   804  	msgs []logger.Msg,
   805  	thrown error,
   806  	importSource *logger.Source,
   807  	importPathRange logger.Range,
   808  ) bool {
   809  	didLogError := false
   810  	tracker := logger.MakeLineColumnTracker(importSource)
   811  
   812  	// Report errors and warnings generated by the plugin
   813  	for _, msg := range msgs {
   814  		if msg.PluginName == "" {
   815  			msg.PluginName = name
   816  		}
   817  		if msg.Kind == logger.Error {
   818  			didLogError = true
   819  		}
   820  
   821  		// Sanitize the locations
   822  		for _, note := range msg.Notes {
   823  			sanitizeLocation(fs, note.Location)
   824  		}
   825  		if msg.Data.Location == nil {
   826  			msg.Data.Location = tracker.MsgLocationOrNil(importPathRange)
   827  		} else {
   828  			sanitizeLocation(fs, msg.Data.Location)
   829  			if importSource != nil {
   830  				if msg.Data.Location.File == "" {
   831  					msg.Data.Location.File = importSource.PrettyPath
   832  				}
   833  				msg.Notes = append(msg.Notes, tracker.MsgData(importPathRange,
   834  					fmt.Sprintf("The plugin %q was triggered by this import", name)))
   835  			}
   836  		}
   837  
   838  		log.AddMsg(msg)
   839  	}
   840  
   841  	// Report errors thrown by the plugin itself
   842  	if thrown != nil {
   843  		didLogError = true
   844  		text := thrown.Error()
   845  		log.AddMsg(logger.Msg{
   846  			PluginName: name,
   847  			Kind:       logger.Error,
   848  			Data: logger.MsgData{
   849  				Text:       text,
   850  				Location:   tracker.MsgLocationOrNil(importPathRange),
   851  				UserDetail: thrown,
   852  			},
   853  		})
   854  	}
   855  
   856  	return didLogError
   857  }
   858  
   859  func RunOnResolvePlugins(
   860  	plugins []config.Plugin,
   861  	res *resolver.Resolver,
   862  	log logger.Log,
   863  	fs fs.FS,
   864  	fsCache *cache.FSCache,
   865  	importSource *logger.Source,
   866  	importPathRange logger.Range,
   867  	importer logger.Path,
   868  	path string,
   869  	importAttributes logger.ImportAttributes,
   870  	kind ast.ImportKind,
   871  	absResolveDir string,
   872  	pluginData interface{},
   873  ) (*resolver.ResolveResult, bool, resolver.DebugMeta) {
   874  	resolverArgs := config.OnResolveArgs{
   875  		Path:       path,
   876  		ResolveDir: absResolveDir,
   877  		Kind:       kind,
   878  		PluginData: pluginData,
   879  		Importer:   importer,
   880  		With:       importAttributes,
   881  	}
   882  	applyPath := logger.Path{
   883  		Text:      path,
   884  		Namespace: importer.Namespace,
   885  	}
   886  	tracker := logger.MakeLineColumnTracker(importSource)
   887  
   888  	// Apply resolver plugins in order until one succeeds
   889  	for _, plugin := range plugins {
   890  		for _, onResolve := range plugin.OnResolve {
   891  			if !config.PluginAppliesToPath(applyPath, onResolve.Filter, onResolve.Namespace) {
   892  				continue
   893  			}
   894  
   895  			result := onResolve.Callback(resolverArgs)
   896  			pluginName := result.PluginName
   897  			if pluginName == "" {
   898  				pluginName = plugin.Name
   899  			}
   900  			didLogError := logPluginMessages(fs, log, pluginName, result.Msgs, result.ThrownError, importSource, importPathRange)
   901  
   902  			// Plugins can also provide additional file system paths to watch
   903  			for _, file := range result.AbsWatchFiles {
   904  				fsCache.ReadFile(fs, file)
   905  			}
   906  			for _, dir := range result.AbsWatchDirs {
   907  				if entries, err, _ := fs.ReadDirectory(dir); err == nil {
   908  					entries.SortedKeys()
   909  				}
   910  			}
   911  
   912  			// Stop now if there was an error
   913  			if didLogError {
   914  				return nil, true, resolver.DebugMeta{}
   915  			}
   916  
   917  			// The "file" namespace is the default for non-external paths, but not
   918  			// for external paths. External paths must explicitly specify the "file"
   919  			// namespace.
   920  			nsFromPlugin := result.Path.Namespace
   921  			if result.Path.Namespace == "" && !result.External {
   922  				result.Path.Namespace = "file"
   923  			}
   924  
   925  			// Otherwise, continue on to the next resolver if this loader didn't succeed
   926  			if result.Path.Text == "" {
   927  				if result.External {
   928  					result.Path = logger.Path{Text: path}
   929  				} else {
   930  					continue
   931  				}
   932  			}
   933  
   934  			// Paths in the file namespace must be absolute paths
   935  			if result.Path.Namespace == "file" && !fs.IsAbs(result.Path.Text) {
   936  				if nsFromPlugin == "file" {
   937  					log.AddError(&tracker, importPathRange,
   938  						fmt.Sprintf("Plugin %q returned a path in the \"file\" namespace that is not an absolute path: %s", pluginName, result.Path.Text))
   939  				} else {
   940  					log.AddError(&tracker, importPathRange,
   941  						fmt.Sprintf("Plugin %q returned a non-absolute path: %s (set a namespace if this is not a file path)", pluginName, result.Path.Text))
   942  				}
   943  				return nil, true, resolver.DebugMeta{}
   944  			}
   945  
   946  			var sideEffectsData *resolver.SideEffectsData
   947  			if result.IsSideEffectFree {
   948  				sideEffectsData = &resolver.SideEffectsData{
   949  					PluginName: pluginName,
   950  				}
   951  			}
   952  
   953  			return &resolver.ResolveResult{
   954  				PathPair:               resolver.PathPair{Primary: result.Path, IsExternal: result.External},
   955  				PluginData:             result.PluginData,
   956  				PrimarySideEffectsData: sideEffectsData,
   957  			}, false, resolver.DebugMeta{}
   958  		}
   959  	}
   960  
   961  	// Resolve relative to the resolve directory by default. All paths in the
   962  	// "file" namespace automatically have a resolve directory. Loader plugins
   963  	// can also configure a custom resolve directory for files in other namespaces.
   964  	result, debug := res.Resolve(absResolveDir, path, kind)
   965  
   966  	// Warn when the case used for importing differs from the actual file name
   967  	if result != nil && result.DifferentCase != nil && !helpers.IsInsideNodeModules(absResolveDir) {
   968  		diffCase := *result.DifferentCase
   969  		log.AddID(logger.MsgID_Bundler_DifferentPathCase, logger.Warning, &tracker, importPathRange, fmt.Sprintf(
   970  			"Use %q instead of %q to avoid issues with case-sensitive file systems",
   971  			resolver.PrettyPath(fs, logger.Path{Text: fs.Join(diffCase.Dir, diffCase.Actual), Namespace: "file"}),
   972  			resolver.PrettyPath(fs, logger.Path{Text: fs.Join(diffCase.Dir, diffCase.Query), Namespace: "file"}),
   973  		))
   974  	}
   975  
   976  	return result, false, debug
   977  }
   978  
   979  type loaderPluginResult struct {
   980  	pluginData    interface{}
   981  	absResolveDir string
   982  	pluginName    string
   983  	loader        config.Loader
   984  }
   985  
   986  func runOnLoadPlugins(
   987  	plugins []config.Plugin,
   988  	fs fs.FS,
   989  	fsCache *cache.FSCache,
   990  	log logger.Log,
   991  	source *logger.Source,
   992  	importSource *logger.Source,
   993  	importPathRange logger.Range,
   994  	importWith *ast.ImportAssertOrWith,
   995  	pluginData interface{},
   996  	isWatchMode bool,
   997  ) (loaderPluginResult, bool) {
   998  	loaderArgs := config.OnLoadArgs{
   999  		Path:       source.KeyPath,
  1000  		PluginData: pluginData,
  1001  	}
  1002  	tracker := logger.MakeLineColumnTracker(importSource)
  1003  
  1004  	// Apply loader plugins in order until one succeeds
  1005  	for _, plugin := range plugins {
  1006  		for _, onLoad := range plugin.OnLoad {
  1007  			if !config.PluginAppliesToPath(source.KeyPath, onLoad.Filter, onLoad.Namespace) {
  1008  				continue
  1009  			}
  1010  
  1011  			result := onLoad.Callback(loaderArgs)
  1012  			pluginName := result.PluginName
  1013  			if pluginName == "" {
  1014  				pluginName = plugin.Name
  1015  			}
  1016  			didLogError := logPluginMessages(fs, log, pluginName, result.Msgs, result.ThrownError, importSource, importPathRange)
  1017  
  1018  			// Plugins can also provide additional file system paths to watch
  1019  			for _, file := range result.AbsWatchFiles {
  1020  				fsCache.ReadFile(fs, file)
  1021  			}
  1022  			for _, dir := range result.AbsWatchDirs {
  1023  				if entries, err, _ := fs.ReadDirectory(dir); err == nil {
  1024  					entries.SortedKeys()
  1025  				}
  1026  			}
  1027  
  1028  			// Stop now if there was an error
  1029  			if didLogError {
  1030  				if isWatchMode && source.KeyPath.Namespace == "file" {
  1031  					fsCache.ReadFile(fs, source.KeyPath.Text) // Read the file for watch mode tracking
  1032  				}
  1033  				return loaderPluginResult{}, false
  1034  			}
  1035  
  1036  			// Otherwise, continue on to the next loader if this loader didn't succeed
  1037  			if result.Contents == nil {
  1038  				continue
  1039  			}
  1040  
  1041  			source.Contents = *result.Contents
  1042  			loader := result.Loader
  1043  			if loader == config.LoaderNone {
  1044  				loader = config.LoaderJS
  1045  			}
  1046  			if result.AbsResolveDir == "" && source.KeyPath.Namespace == "file" {
  1047  				result.AbsResolveDir = fs.Dir(source.KeyPath.Text)
  1048  			}
  1049  			if isWatchMode && source.KeyPath.Namespace == "file" {
  1050  				fsCache.ReadFile(fs, source.KeyPath.Text) // Read the file for watch mode tracking
  1051  			}
  1052  			return loaderPluginResult{
  1053  				loader:        loader,
  1054  				absResolveDir: result.AbsResolveDir,
  1055  				pluginName:    pluginName,
  1056  				pluginData:    result.PluginData,
  1057  			}, true
  1058  		}
  1059  	}
  1060  
  1061  	// Reject unsupported import attributes
  1062  	loader := config.LoaderDefault
  1063  	for _, attr := range source.KeyPath.ImportAttributes.DecodeIntoArray() {
  1064  		if attr.Key == "type" {
  1065  			if attr.Value == "json" {
  1066  				loader = config.LoaderWithTypeJSON
  1067  			} else {
  1068  				r := importPathRange
  1069  				if importWith != nil {
  1070  					r = js_lexer.RangeOfImportAssertOrWith(*importSource, *ast.FindAssertOrWithEntry(importWith.Entries, attr.Key), js_lexer.ValueRange)
  1071  				}
  1072  				log.AddError(&tracker, r, fmt.Sprintf("Importing with a type attribute of %q is not supported", attr.Value))
  1073  				return loaderPluginResult{}, false
  1074  			}
  1075  		} else {
  1076  			r := importPathRange
  1077  			if importWith != nil {
  1078  				r = js_lexer.RangeOfImportAssertOrWith(*importSource, *ast.FindAssertOrWithEntry(importWith.Entries, attr.Key), js_lexer.KeyRange)
  1079  			}
  1080  			log.AddError(&tracker, r, fmt.Sprintf("Importing with the %q attribute is not supported", attr.Key))
  1081  			return loaderPluginResult{}, false
  1082  		}
  1083  	}
  1084  
  1085  	// Force disabled modules to be empty
  1086  	if source.KeyPath.IsDisabled() {
  1087  		return loaderPluginResult{loader: config.LoaderEmpty}, true
  1088  	}
  1089  
  1090  	// Read normal modules from disk
  1091  	if source.KeyPath.Namespace == "file" {
  1092  		if contents, err, originalError := fsCache.ReadFile(fs, source.KeyPath.Text); err == nil {
  1093  			source.Contents = contents
  1094  			return loaderPluginResult{
  1095  				loader:        loader,
  1096  				absResolveDir: fs.Dir(source.KeyPath.Text),
  1097  			}, true
  1098  		} else {
  1099  			if log.Level <= logger.LevelDebug && originalError != nil {
  1100  				log.AddID(logger.MsgID_None, logger.Debug, nil, logger.Range{}, fmt.Sprintf("Failed to read file %q: %s", source.KeyPath.Text, originalError.Error()))
  1101  			}
  1102  			if err == syscall.ENOENT {
  1103  				log.AddError(&tracker, importPathRange,
  1104  					fmt.Sprintf("Could not read from file: %s", source.KeyPath.Text))
  1105  				return loaderPluginResult{}, false
  1106  			} else {
  1107  				log.AddError(&tracker, importPathRange,
  1108  					fmt.Sprintf("Cannot read file %q: %s", resolver.PrettyPath(fs, source.KeyPath), err.Error()))
  1109  				return loaderPluginResult{}, false
  1110  			}
  1111  		}
  1112  	}
  1113  
  1114  	// Native support for data URLs. This is supported natively by node:
  1115  	// https://nodejs.org/docs/latest/api/esm.html#esm_data_imports
  1116  	if source.KeyPath.Namespace == "dataurl" {
  1117  		if parsed, ok := resolver.ParseDataURL(source.KeyPath.Text); ok {
  1118  			if contents, err := parsed.DecodeData(); err != nil {
  1119  				log.AddError(&tracker, importPathRange,
  1120  					fmt.Sprintf("Could not load data URL: %s", err.Error()))
  1121  				return loaderPluginResult{loader: config.LoaderNone}, true
  1122  			} else {
  1123  				source.Contents = contents
  1124  				if loader != config.LoaderDefault {
  1125  					return loaderPluginResult{loader: loader}, true
  1126  				}
  1127  				if mimeType := parsed.DecodeMIMEType(); mimeType != resolver.MIMETypeUnsupported {
  1128  					switch mimeType {
  1129  					case resolver.MIMETypeTextCSS:
  1130  						return loaderPluginResult{loader: config.LoaderCSS}, true
  1131  					case resolver.MIMETypeTextJavaScript:
  1132  						return loaderPluginResult{loader: config.LoaderJS}, true
  1133  					case resolver.MIMETypeApplicationJSON:
  1134  						return loaderPluginResult{loader: config.LoaderJSON}, true
  1135  					}
  1136  				}
  1137  			}
  1138  		}
  1139  	}
  1140  
  1141  	// Otherwise, fail to load the path
  1142  	return loaderPluginResult{loader: config.LoaderNone}, true
  1143  }
  1144  
  1145  func loaderFromFileExtension(extensionToLoader map[string]config.Loader, base string) config.Loader {
  1146  	// Pick the loader with the longest matching extension. So if there's an
  1147  	// extension for ".css" and for ".module.css", we want to match the one for
  1148  	// ".module.css" before the one for ".css".
  1149  	if i := strings.IndexByte(base, '.'); i != -1 {
  1150  		for {
  1151  			if loader, ok := extensionToLoader[base[i:]]; ok {
  1152  				return loader
  1153  			}
  1154  			base = base[i+1:]
  1155  			i = strings.IndexByte(base, '.')
  1156  			if i == -1 {
  1157  				break
  1158  			}
  1159  		}
  1160  	} else {
  1161  		// If there's no extension, explicitly check for an extensionless loader
  1162  		if loader, ok := extensionToLoader[""]; ok {
  1163  			return loader
  1164  		}
  1165  	}
  1166  	return config.LoaderNone
  1167  }
  1168  
  1169  // Identify the path by its lowercase absolute path name with Windows-specific
  1170  // slashes substituted for standard slashes. This should hopefully avoid path
  1171  // issues on Windows where multiple different paths can refer to the same
  1172  // underlying file.
  1173  func canonicalFileSystemPathForWindows(absPath string) string {
  1174  	return strings.ReplaceAll(strings.ToLower(absPath), "\\", "/")
  1175  }
  1176  
  1177  func HashForFileName(hashBytes []byte) string {
  1178  	return base32.StdEncoding.EncodeToString(hashBytes)[:8]
  1179  }
  1180  
  1181  type scanner struct {
  1182  	log             logger.Log
  1183  	fs              fs.FS
  1184  	res             *resolver.Resolver
  1185  	caches          *cache.CacheSet
  1186  	timer           *helpers.Timer
  1187  	uniqueKeyPrefix string
  1188  
  1189  	// These are not guarded by a mutex because it's only ever modified by a single
  1190  	// thread. Note that not all results in the "results" array are necessarily
  1191  	// valid. Make sure to check the "ok" flag before using them.
  1192  	results       []parseResult
  1193  	visited       map[logger.Path]visitedFile
  1194  	resultChannel chan parseResult
  1195  
  1196  	options config.Options
  1197  
  1198  	// Also not guarded by a mutex for the same reason
  1199  	remaining int
  1200  }
  1201  
  1202  type visitedFile struct {
  1203  	sourceIndex uint32
  1204  }
  1205  
  1206  type EntryPoint struct {
  1207  	InputPath                string
  1208  	OutputPath               string
  1209  	InputPathInFileNamespace bool
  1210  }
  1211  
  1212  func generateUniqueKeyPrefix() (string, error) {
  1213  	var data [12]byte
  1214  	rand.Seed(time.Now().UnixNano())
  1215  	if _, err := rand.Read(data[:]); err != nil {
  1216  		return "", err
  1217  	}
  1218  
  1219  	// This is 16 bytes and shouldn't generate escape characters when put into strings
  1220  	return base64.URLEncoding.EncodeToString(data[:]), nil
  1221  }
  1222  
  1223  // This creates a bundle by scanning over the whole module graph starting from
  1224  // the entry points until all modules are reached. Each module has some number
  1225  // of import paths which are resolved to module identifiers (i.e. "onResolve"
  1226  // in the plugin API). Each unique module identifier is loaded once (i.e.
  1227  // "onLoad" in the plugin API).
  1228  func ScanBundle(
  1229  	call config.APICall,
  1230  	log logger.Log,
  1231  	fs fs.FS,
  1232  	caches *cache.CacheSet,
  1233  	entryPoints []EntryPoint,
  1234  	options config.Options,
  1235  	timer *helpers.Timer,
  1236  ) Bundle {
  1237  	timer.Begin("Scan phase")
  1238  	defer timer.End("Scan phase")
  1239  
  1240  	applyOptionDefaults(&options)
  1241  
  1242  	// Run "onStart" plugins in parallel. IMPORTANT: We always need to run all
  1243  	// "onStart" callbacks even when the build is cancelled, because plugins may
  1244  	// rely on invariants that are started in "onStart" and ended in "onEnd".
  1245  	// This works because "onEnd" callbacks are always run as well.
  1246  	timer.Begin("On-start callbacks")
  1247  	onStartWaitGroup := sync.WaitGroup{}
  1248  	for _, plugin := range options.Plugins {
  1249  		for _, onStart := range plugin.OnStart {
  1250  			onStartWaitGroup.Add(1)
  1251  			go func(plugin config.Plugin, onStart config.OnStart) {
  1252  				result := onStart.Callback()
  1253  				logPluginMessages(fs, log, plugin.Name, result.Msgs, result.ThrownError, nil, logger.Range{})
  1254  				onStartWaitGroup.Done()
  1255  			}(plugin, onStart)
  1256  		}
  1257  	}
  1258  
  1259  	// Each bundling operation gets a separate unique key
  1260  	uniqueKeyPrefix, err := generateUniqueKeyPrefix()
  1261  	if err != nil {
  1262  		log.AddError(nil, logger.Range{}, fmt.Sprintf("Failed to read from randomness source: %s", err.Error()))
  1263  	}
  1264  
  1265  	// This may mutate "options" by the "tsconfig.json" override settings
  1266  	res := resolver.NewResolver(call, fs, log, caches, &options)
  1267  
  1268  	s := scanner{
  1269  		log:             log,
  1270  		fs:              fs,
  1271  		res:             res,
  1272  		caches:          caches,
  1273  		options:         options,
  1274  		timer:           timer,
  1275  		results:         make([]parseResult, 0, caches.SourceIndexCache.LenHint()),
  1276  		visited:         make(map[logger.Path]visitedFile),
  1277  		resultChannel:   make(chan parseResult),
  1278  		uniqueKeyPrefix: uniqueKeyPrefix,
  1279  	}
  1280  
  1281  	// Always start by parsing the runtime file
  1282  	s.results = append(s.results, parseResult{})
  1283  	s.remaining++
  1284  	go func() {
  1285  		source, ast, ok := globalRuntimeCache.parseRuntime(&options)
  1286  		s.resultChannel <- parseResult{
  1287  			file: scannerFile{
  1288  				inputFile: graph.InputFile{
  1289  					Source: source,
  1290  					Repr: &graph.JSRepr{
  1291  						AST: ast,
  1292  					},
  1293  					OmitFromSourceMapsAndMetafile: true,
  1294  				},
  1295  			},
  1296  			ok: ok,
  1297  		}
  1298  	}()
  1299  
  1300  	// Wait for all "onStart" plugins here before continuing. People sometimes run
  1301  	// setup code in "onStart" that "onLoad" expects to be able to use without
  1302  	// "onLoad" needing to block on the completion of their "onStart" callback.
  1303  	//
  1304  	// We want to enable this:
  1305  	//
  1306  	//   let plugin = {
  1307  	//     name: 'example',
  1308  	//     setup(build) {
  1309  	//       let started = false
  1310  	//       build.onStart(() => started = true)
  1311  	//       build.onLoad({ filter: /.*/ }, () => {
  1312  	//         assert(started === true)
  1313  	//       })
  1314  	//     },
  1315  	//   }
  1316  	//
  1317  	// without people having to write something like this:
  1318  	//
  1319  	//   let plugin = {
  1320  	//     name: 'example',
  1321  	//     setup(build) {
  1322  	//       let started = {}
  1323  	//       started.promise = new Promise(resolve => {
  1324  	//         started.resolve = resolve
  1325  	//       })
  1326  	//       build.onStart(() => {
  1327  	//         started.resolve(true)
  1328  	//       })
  1329  	//       build.onLoad({ filter: /.*/ }, async () => {
  1330  	//         assert(await started.promise === true)
  1331  	//       })
  1332  	//     },
  1333  	//   }
  1334  	//
  1335  	onStartWaitGroup.Wait()
  1336  	timer.End("On-start callbacks")
  1337  
  1338  	// We can check the cancel flag now that all "onStart" callbacks are done
  1339  	if options.CancelFlag.DidCancel() {
  1340  		return Bundle{options: options}
  1341  	}
  1342  
  1343  	s.preprocessInjectedFiles()
  1344  
  1345  	if options.CancelFlag.DidCancel() {
  1346  		return Bundle{options: options}
  1347  	}
  1348  
  1349  	entryPointMeta := s.addEntryPoints(entryPoints)
  1350  
  1351  	if options.CancelFlag.DidCancel() {
  1352  		return Bundle{options: options}
  1353  	}
  1354  
  1355  	s.scanAllDependencies()
  1356  
  1357  	if options.CancelFlag.DidCancel() {
  1358  		return Bundle{options: options}
  1359  	}
  1360  
  1361  	files := s.processScannedFiles(entryPointMeta)
  1362  
  1363  	if options.CancelFlag.DidCancel() {
  1364  		return Bundle{options: options}
  1365  	}
  1366  
  1367  	return Bundle{
  1368  		fs:              fs,
  1369  		res:             s.res,
  1370  		files:           files,
  1371  		entryPoints:     entryPointMeta,
  1372  		uniqueKeyPrefix: uniqueKeyPrefix,
  1373  		options:         s.options,
  1374  	}
  1375  }
  1376  
  1377  type inputKind uint8
  1378  
  1379  const (
  1380  	inputKindNormal inputKind = iota
  1381  	inputKindEntryPoint
  1382  	inputKindStdin
  1383  )
  1384  
  1385  // This returns the source index of the resulting file
  1386  func (s *scanner) maybeParseFile(
  1387  	resolveResult resolver.ResolveResult,
  1388  	prettyPath string,
  1389  	importSource *logger.Source,
  1390  	importPathRange logger.Range,
  1391  	importWith *ast.ImportAssertOrWith,
  1392  	kind inputKind,
  1393  	inject chan config.InjectedFile,
  1394  ) uint32 {
  1395  	path := resolveResult.PathPair.Primary
  1396  	visitedKey := path
  1397  	if visitedKey.Namespace == "file" {
  1398  		visitedKey.Text = canonicalFileSystemPathForWindows(visitedKey.Text)
  1399  	}
  1400  
  1401  	// Only parse a given file path once
  1402  	visited, ok := s.visited[visitedKey]
  1403  	if ok {
  1404  		if inject != nil {
  1405  			inject <- config.InjectedFile{}
  1406  		}
  1407  		return visited.sourceIndex
  1408  	}
  1409  
  1410  	visited = visitedFile{
  1411  		sourceIndex: s.allocateSourceIndex(visitedKey, cache.SourceIndexNormal),
  1412  	}
  1413  	s.visited[visitedKey] = visited
  1414  	s.remaining++
  1415  	optionsClone := s.options
  1416  	if kind != inputKindStdin {
  1417  		optionsClone.Stdin = nil
  1418  	}
  1419  
  1420  	// Allow certain properties to be overridden by "tsconfig.json"
  1421  	resolveResult.TSConfigJSX.ApplyTo(&optionsClone.JSX)
  1422  	if resolveResult.TSConfig != nil {
  1423  		optionsClone.TS.Config = *resolveResult.TSConfig
  1424  	}
  1425  	if resolveResult.TSAlwaysStrict != nil {
  1426  		optionsClone.TSAlwaysStrict = resolveResult.TSAlwaysStrict
  1427  	}
  1428  
  1429  	// Set the module type preference using node's module type rules
  1430  	if strings.HasSuffix(path.Text, ".mjs") {
  1431  		optionsClone.ModuleTypeData.Type = js_ast.ModuleESM_MJS
  1432  	} else if strings.HasSuffix(path.Text, ".mts") {
  1433  		optionsClone.ModuleTypeData.Type = js_ast.ModuleESM_MTS
  1434  	} else if strings.HasSuffix(path.Text, ".cjs") {
  1435  		optionsClone.ModuleTypeData.Type = js_ast.ModuleCommonJS_CJS
  1436  	} else if strings.HasSuffix(path.Text, ".cts") {
  1437  		optionsClone.ModuleTypeData.Type = js_ast.ModuleCommonJS_CTS
  1438  	} else if strings.HasSuffix(path.Text, ".js") || strings.HasSuffix(path.Text, ".jsx") ||
  1439  		strings.HasSuffix(path.Text, ".ts") || strings.HasSuffix(path.Text, ".tsx") {
  1440  		optionsClone.ModuleTypeData = resolveResult.ModuleTypeData
  1441  	} else {
  1442  		// The "type" setting in "package.json" only applies to ".js" files
  1443  		optionsClone.ModuleTypeData.Type = js_ast.ModuleUnknown
  1444  	}
  1445  
  1446  	// Enable bundling for injected files so we always do tree shaking. We
  1447  	// never want to include unnecessary code from injected files since they
  1448  	// are essentially bundled. However, if we do this we should skip the
  1449  	// resolving step when we're not bundling. It'd be strange to get
  1450  	// resolution errors when the top-level bundling controls are disabled.
  1451  	skipResolve := false
  1452  	if inject != nil && optionsClone.Mode != config.ModeBundle {
  1453  		optionsClone.Mode = config.ModeBundle
  1454  		skipResolve = true
  1455  	}
  1456  
  1457  	// Special-case pretty-printed paths for data URLs
  1458  	if path.Namespace == "dataurl" {
  1459  		if _, ok := resolver.ParseDataURL(path.Text); ok {
  1460  			prettyPath = path.Text
  1461  			if len(prettyPath) > 65 {
  1462  				prettyPath = prettyPath[:65]
  1463  			}
  1464  			prettyPath = strings.ReplaceAll(prettyPath, "\n", "\\n")
  1465  			if len(prettyPath) > 64 {
  1466  				prettyPath = prettyPath[:64] + "..."
  1467  			}
  1468  			prettyPath = fmt.Sprintf("<%s>", prettyPath)
  1469  		}
  1470  	}
  1471  
  1472  	var sideEffects graph.SideEffects
  1473  	if resolveResult.PrimarySideEffectsData != nil {
  1474  		sideEffects.Kind = graph.NoSideEffects_PackageJSON
  1475  		sideEffects.Data = resolveResult.PrimarySideEffectsData
  1476  	}
  1477  
  1478  	go parseFile(parseArgs{
  1479  		fs:              s.fs,
  1480  		log:             s.log,
  1481  		res:             s.res,
  1482  		caches:          s.caches,
  1483  		keyPath:         path,
  1484  		prettyPath:      prettyPath,
  1485  		sourceIndex:     visited.sourceIndex,
  1486  		importSource:    importSource,
  1487  		sideEffects:     sideEffects,
  1488  		importPathRange: importPathRange,
  1489  		importWith:      importWith,
  1490  		pluginData:      resolveResult.PluginData,
  1491  		options:         optionsClone,
  1492  		results:         s.resultChannel,
  1493  		inject:          inject,
  1494  		skipResolve:     skipResolve,
  1495  		uniqueKeyPrefix: s.uniqueKeyPrefix,
  1496  	})
  1497  
  1498  	return visited.sourceIndex
  1499  }
  1500  
  1501  func (s *scanner) allocateSourceIndex(path logger.Path, kind cache.SourceIndexKind) uint32 {
  1502  	// Allocate a source index using the shared source index cache so that
  1503  	// subsequent builds reuse the same source index and therefore use the
  1504  	// cached parse results for increased speed.
  1505  	sourceIndex := s.caches.SourceIndexCache.Get(path, kind)
  1506  
  1507  	// Grow the results array to fit this source index
  1508  	if newLen := int(sourceIndex) + 1; len(s.results) < newLen {
  1509  		// Reallocate to a bigger array
  1510  		if cap(s.results) < newLen {
  1511  			s.results = append(make([]parseResult, 0, 2*newLen), s.results...)
  1512  		}
  1513  
  1514  		// Grow in place
  1515  		s.results = s.results[:newLen]
  1516  	}
  1517  
  1518  	return sourceIndex
  1519  }
  1520  
  1521  func (s *scanner) allocateGlobSourceIndex(parentSourceIndex uint32, globIndex uint32) uint32 {
  1522  	// Allocate a source index using the shared source index cache so that
  1523  	// subsequent builds reuse the same source index and therefore use the
  1524  	// cached parse results for increased speed.
  1525  	sourceIndex := s.caches.SourceIndexCache.GetGlob(parentSourceIndex, globIndex)
  1526  
  1527  	// Grow the results array to fit this source index
  1528  	if newLen := int(sourceIndex) + 1; len(s.results) < newLen {
  1529  		// Reallocate to a bigger array
  1530  		if cap(s.results) < newLen {
  1531  			s.results = append(make([]parseResult, 0, 2*newLen), s.results...)
  1532  		}
  1533  
  1534  		// Grow in place
  1535  		s.results = s.results[:newLen]
  1536  	}
  1537  
  1538  	return sourceIndex
  1539  }
  1540  
  1541  func (s *scanner) preprocessInjectedFiles() {
  1542  	s.timer.Begin("Preprocess injected files")
  1543  	defer s.timer.End("Preprocess injected files")
  1544  
  1545  	injectedFiles := make([]config.InjectedFile, 0, len(s.options.InjectedDefines)+len(s.options.InjectPaths))
  1546  
  1547  	// These are virtual paths that are generated for compound "--define" values.
  1548  	// They are special-cased and are not available for plugins to intercept.
  1549  	for _, define := range s.options.InjectedDefines {
  1550  		// These should be unique by construction so no need to check for collisions
  1551  		visitedKey := logger.Path{Text: fmt.Sprintf("<define:%s>", define.Name)}
  1552  		sourceIndex := s.allocateSourceIndex(visitedKey, cache.SourceIndexNormal)
  1553  		s.visited[visitedKey] = visitedFile{sourceIndex: sourceIndex}
  1554  		source := logger.Source{
  1555  			Index:          sourceIndex,
  1556  			KeyPath:        visitedKey,
  1557  			PrettyPath:     resolver.PrettyPath(s.fs, visitedKey),
  1558  			IdentifierName: js_ast.EnsureValidIdentifier(visitedKey.Text),
  1559  		}
  1560  
  1561  		// The first "len(InjectedDefine)" injected files intentionally line up
  1562  		// with the injected defines by index. The index will be used to import
  1563  		// references to them in the parser.
  1564  		injectedFiles = append(injectedFiles, config.InjectedFile{
  1565  			Source:     source,
  1566  			DefineName: define.Name,
  1567  		})
  1568  
  1569  		// Generate the file inline here since it has already been parsed
  1570  		expr := js_ast.Expr{Data: define.Data}
  1571  		ast := js_parser.LazyExportAST(s.log, source, js_parser.OptionsFromConfig(&s.options), expr, "")
  1572  		result := parseResult{
  1573  			ok: true,
  1574  			file: scannerFile{
  1575  				inputFile: graph.InputFile{
  1576  					Source: source,
  1577  					Repr:   &graph.JSRepr{AST: ast},
  1578  					Loader: config.LoaderJSON,
  1579  					SideEffects: graph.SideEffects{
  1580  						Kind: graph.NoSideEffects_PureData,
  1581  					},
  1582  				},
  1583  			},
  1584  		}
  1585  
  1586  		// Append to the channel on a goroutine in case it blocks due to capacity
  1587  		s.remaining++
  1588  		go func() { s.resultChannel <- result }()
  1589  	}
  1590  
  1591  	// Add user-specified injected files. Run resolver plugins on these files
  1592  	// so plugins can alter where they resolve to. These are run in parallel in
  1593  	// case any of these plugins block.
  1594  	injectResolveResults := make([]*resolver.ResolveResult, len(s.options.InjectPaths))
  1595  	injectAbsResolveDir := s.fs.Cwd()
  1596  	injectResolveWaitGroup := sync.WaitGroup{}
  1597  	injectResolveWaitGroup.Add(len(s.options.InjectPaths))
  1598  	for i, importPath := range s.options.InjectPaths {
  1599  		go func(i int, importPath string) {
  1600  			var importer logger.Path
  1601  
  1602  			// Add a leading "./" if it's missing, similar to entry points
  1603  			absPath := importPath
  1604  			if !s.fs.IsAbs(absPath) {
  1605  				absPath = s.fs.Join(injectAbsResolveDir, absPath)
  1606  			}
  1607  			dir := s.fs.Dir(absPath)
  1608  			base := s.fs.Base(absPath)
  1609  			if entries, err, originalError := s.fs.ReadDirectory(dir); err == nil {
  1610  				if entry, _ := entries.Get(base); entry != nil && entry.Kind(s.fs) == fs.FileEntry {
  1611  					importer.Namespace = "file"
  1612  					if !s.fs.IsAbs(importPath) && resolver.IsPackagePath(importPath) {
  1613  						importPath = "./" + importPath
  1614  					}
  1615  				}
  1616  			} else if s.log.Level <= logger.LevelDebug && originalError != nil {
  1617  				s.log.AddID(logger.MsgID_None, logger.Debug, nil, logger.Range{}, fmt.Sprintf("Failed to read directory %q: %s", absPath, originalError.Error()))
  1618  			}
  1619  
  1620  			// Run the resolver and log an error if the path couldn't be resolved
  1621  			resolveResult, didLogError, debug := RunOnResolvePlugins(
  1622  				s.options.Plugins,
  1623  				s.res,
  1624  				s.log,
  1625  				s.fs,
  1626  				&s.caches.FSCache,
  1627  				nil,
  1628  				logger.Range{},
  1629  				importer,
  1630  				importPath,
  1631  				logger.ImportAttributes{},
  1632  				ast.ImportEntryPoint,
  1633  				injectAbsResolveDir,
  1634  				nil,
  1635  			)
  1636  			if resolveResult != nil {
  1637  				if resolveResult.PathPair.IsExternal {
  1638  					s.log.AddError(nil, logger.Range{}, fmt.Sprintf("The injected path %q cannot be marked as external", importPath))
  1639  				} else {
  1640  					injectResolveResults[i] = resolveResult
  1641  				}
  1642  			} else if !didLogError {
  1643  				debug.LogErrorMsg(s.log, nil, logger.Range{}, fmt.Sprintf("Could not resolve %q", importPath), "", nil)
  1644  			}
  1645  			injectResolveWaitGroup.Done()
  1646  		}(i, importPath)
  1647  	}
  1648  	injectResolveWaitGroup.Wait()
  1649  
  1650  	if s.options.CancelFlag.DidCancel() {
  1651  		return
  1652  	}
  1653  
  1654  	// Parse all entry points that were resolved successfully
  1655  	results := make([]config.InjectedFile, len(s.options.InjectPaths))
  1656  	j := 0
  1657  	var injectWaitGroup sync.WaitGroup
  1658  	for _, resolveResult := range injectResolveResults {
  1659  		if resolveResult != nil {
  1660  			channel := make(chan config.InjectedFile, 1)
  1661  			s.maybeParseFile(*resolveResult, resolver.PrettyPath(s.fs, resolveResult.PathPair.Primary), nil, logger.Range{}, nil, inputKindNormal, channel)
  1662  			injectWaitGroup.Add(1)
  1663  
  1664  			// Wait for the results in parallel. The results slice is large enough so
  1665  			// it is not reallocated during the computations.
  1666  			go func(i int) {
  1667  				results[i] = <-channel
  1668  				injectWaitGroup.Done()
  1669  			}(j)
  1670  			j++
  1671  		}
  1672  	}
  1673  	injectWaitGroup.Wait()
  1674  	injectedFiles = append(injectedFiles, results[:j]...)
  1675  
  1676  	// It's safe to mutate the options object to add the injected files here
  1677  	// because there aren't any concurrent "parseFile" goroutines at this point.
  1678  	// The only ones that were created by this point are the ones we created
  1679  	// above, and we've already waited for all of them to finish using the
  1680  	// "options" object.
  1681  	s.options.InjectedFiles = injectedFiles
  1682  }
  1683  
  1684  func (s *scanner) addEntryPoints(entryPoints []EntryPoint) []graph.EntryPoint {
  1685  	s.timer.Begin("Add entry points")
  1686  	defer s.timer.End("Add entry points")
  1687  
  1688  	// Reserve a slot for each entry point
  1689  	entryMetas := make([]graph.EntryPoint, 0, len(entryPoints)+1)
  1690  
  1691  	// Treat stdin as an extra entry point
  1692  	if stdin := s.options.Stdin; stdin != nil {
  1693  		stdinPath := logger.Path{Text: "<stdin>"}
  1694  		if stdin.SourceFile != "" {
  1695  			if stdin.AbsResolveDir == "" {
  1696  				stdinPath = logger.Path{Text: stdin.SourceFile}
  1697  			} else if s.fs.IsAbs(stdin.SourceFile) {
  1698  				stdinPath = logger.Path{Text: stdin.SourceFile, Namespace: "file"}
  1699  			} else {
  1700  				stdinPath = logger.Path{Text: s.fs.Join(stdin.AbsResolveDir, stdin.SourceFile), Namespace: "file"}
  1701  			}
  1702  		}
  1703  		resolveResult := resolver.ResolveResult{PathPair: resolver.PathPair{Primary: stdinPath}}
  1704  		sourceIndex := s.maybeParseFile(resolveResult, resolver.PrettyPath(s.fs, stdinPath), nil, logger.Range{}, nil, inputKindStdin, nil)
  1705  		entryMetas = append(entryMetas, graph.EntryPoint{
  1706  			OutputPath:  "stdin",
  1707  			SourceIndex: sourceIndex,
  1708  		})
  1709  	}
  1710  
  1711  	if s.options.CancelFlag.DidCancel() {
  1712  		return nil
  1713  	}
  1714  
  1715  	// Check each entry point ahead of time to see if it's a real file
  1716  	entryPointAbsResolveDir := s.fs.Cwd()
  1717  	for i := range entryPoints {
  1718  		entryPoint := &entryPoints[i]
  1719  		absPath := entryPoint.InputPath
  1720  		if strings.ContainsRune(absPath, '*') {
  1721  			continue // Ignore glob patterns
  1722  		}
  1723  		if !s.fs.IsAbs(absPath) {
  1724  			absPath = s.fs.Join(entryPointAbsResolveDir, absPath)
  1725  		}
  1726  		dir := s.fs.Dir(absPath)
  1727  		base := s.fs.Base(absPath)
  1728  		if entries, err, originalError := s.fs.ReadDirectory(dir); err == nil {
  1729  			if entry, _ := entries.Get(base); entry != nil && entry.Kind(s.fs) == fs.FileEntry {
  1730  				entryPoint.InputPathInFileNamespace = true
  1731  
  1732  				// Entry point paths without a leading "./" are interpreted as package
  1733  				// paths. This happens because they go through general path resolution
  1734  				// like all other import paths so that plugins can run on them. Requiring
  1735  				// a leading "./" for a relative path simplifies writing plugins because
  1736  				// entry points aren't a special case.
  1737  				//
  1738  				// However, requiring a leading "./" also breaks backward compatibility
  1739  				// and makes working with the CLI more difficult. So attempt to insert
  1740  				// "./" automatically when needed. We don't want to unconditionally insert
  1741  				// a leading "./" because the path may not be a file system path. For
  1742  				// example, it may be a URL. So only insert a leading "./" when the path
  1743  				// is an exact match for an existing file.
  1744  				if !s.fs.IsAbs(entryPoint.InputPath) && resolver.IsPackagePath(entryPoint.InputPath) {
  1745  					entryPoint.InputPath = "./" + entryPoint.InputPath
  1746  				}
  1747  			}
  1748  		} else if s.log.Level <= logger.LevelDebug && originalError != nil {
  1749  			s.log.AddID(logger.MsgID_None, logger.Debug, nil, logger.Range{}, fmt.Sprintf("Failed to read directory %q: %s", absPath, originalError.Error()))
  1750  		}
  1751  	}
  1752  
  1753  	if s.options.CancelFlag.DidCancel() {
  1754  		return nil
  1755  	}
  1756  
  1757  	// Add any remaining entry points. Run resolver plugins on these entry points
  1758  	// so plugins can alter where they resolve to. These are run in parallel in
  1759  	// case any of these plugins block.
  1760  	type entryPointInfo struct {
  1761  		results []resolver.ResolveResult
  1762  		isGlob  bool
  1763  	}
  1764  	entryPointInfos := make([]entryPointInfo, len(entryPoints))
  1765  	entryPointWaitGroup := sync.WaitGroup{}
  1766  	entryPointWaitGroup.Add(len(entryPoints))
  1767  	for i, entryPoint := range entryPoints {
  1768  		go func(i int, entryPoint EntryPoint) {
  1769  			var importer logger.Path
  1770  			if entryPoint.InputPathInFileNamespace {
  1771  				importer.Namespace = "file"
  1772  			}
  1773  
  1774  			// Special-case glob patterns here
  1775  			if strings.ContainsRune(entryPoint.InputPath, '*') {
  1776  				if pattern := helpers.ParseGlobPattern(entryPoint.InputPath); len(pattern) > 1 {
  1777  					prettyPattern := fmt.Sprintf("%q", entryPoint.InputPath)
  1778  					if results, msg := s.res.ResolveGlob(entryPointAbsResolveDir, pattern, ast.ImportEntryPoint, prettyPattern); results != nil {
  1779  						keys := make([]string, 0, len(results))
  1780  						for key := range results {
  1781  							keys = append(keys, key)
  1782  						}
  1783  						sort.Strings(keys)
  1784  						info := entryPointInfo{isGlob: true}
  1785  						for _, key := range keys {
  1786  							info.results = append(info.results, results[key])
  1787  						}
  1788  						entryPointInfos[i] = info
  1789  						if msg != nil {
  1790  							s.log.AddID(msg.ID, msg.Kind, nil, logger.Range{}, msg.Data.Text)
  1791  						}
  1792  					} else {
  1793  						s.log.AddError(nil, logger.Range{}, fmt.Sprintf("Could not resolve %q", entryPoint.InputPath))
  1794  					}
  1795  					entryPointWaitGroup.Done()
  1796  					return
  1797  				}
  1798  			}
  1799  
  1800  			// Run the resolver and log an error if the path couldn't be resolved
  1801  			resolveResult, didLogError, debug := RunOnResolvePlugins(
  1802  				s.options.Plugins,
  1803  				s.res,
  1804  				s.log,
  1805  				s.fs,
  1806  				&s.caches.FSCache,
  1807  				nil,
  1808  				logger.Range{},
  1809  				importer,
  1810  				entryPoint.InputPath,
  1811  				logger.ImportAttributes{},
  1812  				ast.ImportEntryPoint,
  1813  				entryPointAbsResolveDir,
  1814  				nil,
  1815  			)
  1816  			if resolveResult != nil {
  1817  				if resolveResult.PathPair.IsExternal {
  1818  					s.log.AddError(nil, logger.Range{}, fmt.Sprintf("The entry point %q cannot be marked as external", entryPoint.InputPath))
  1819  				} else {
  1820  					entryPointInfos[i] = entryPointInfo{results: []resolver.ResolveResult{*resolveResult}}
  1821  				}
  1822  			} else if !didLogError {
  1823  				var notes []logger.MsgData
  1824  				if !s.fs.IsAbs(entryPoint.InputPath) {
  1825  					if query, _ := s.res.ProbeResolvePackageAsRelative(entryPointAbsResolveDir, entryPoint.InputPath, ast.ImportEntryPoint); query != nil {
  1826  						notes = append(notes, logger.MsgData{
  1827  							Text: fmt.Sprintf("Use the relative path %q to reference the file %q. "+
  1828  								"Without the leading \"./\", the path %q is being interpreted as a package path instead.",
  1829  								"./"+entryPoint.InputPath, resolver.PrettyPath(s.fs, query.PathPair.Primary), entryPoint.InputPath),
  1830  						})
  1831  					}
  1832  				}
  1833  				debug.LogErrorMsg(s.log, nil, logger.Range{}, fmt.Sprintf("Could not resolve %q", entryPoint.InputPath), "", notes)
  1834  			}
  1835  			entryPointWaitGroup.Done()
  1836  		}(i, entryPoint)
  1837  	}
  1838  	entryPointWaitGroup.Wait()
  1839  
  1840  	if s.options.CancelFlag.DidCancel() {
  1841  		return nil
  1842  	}
  1843  
  1844  	// Parse all entry points that were resolved successfully
  1845  	for i, info := range entryPointInfos {
  1846  		if info.results == nil {
  1847  			continue
  1848  		}
  1849  
  1850  		for _, resolveResult := range info.results {
  1851  			prettyPath := resolver.PrettyPath(s.fs, resolveResult.PathPair.Primary)
  1852  			sourceIndex := s.maybeParseFile(resolveResult, prettyPath, nil, logger.Range{}, nil, inputKindEntryPoint, nil)
  1853  			outputPath := entryPoints[i].OutputPath
  1854  			outputPathWasAutoGenerated := false
  1855  
  1856  			// If the output path is missing, automatically generate one from the input path
  1857  			if outputPath == "" {
  1858  				if info.isGlob {
  1859  					outputPath = prettyPath
  1860  				} else {
  1861  					outputPath = entryPoints[i].InputPath
  1862  				}
  1863  				windowsVolumeLabel := ""
  1864  
  1865  				// The ":" character is invalid in file paths on Windows except when
  1866  				// it's used as a volume separator. Special-case that here so volume
  1867  				// labels don't break on Windows.
  1868  				if s.fs.IsAbs(outputPath) && len(outputPath) >= 3 && outputPath[1] == ':' {
  1869  					if c := outputPath[0]; (c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') {
  1870  						if c := outputPath[2]; c == '/' || c == '\\' {
  1871  							windowsVolumeLabel = outputPath[:3]
  1872  							outputPath = outputPath[3:]
  1873  						}
  1874  					}
  1875  				}
  1876  
  1877  				// For cross-platform robustness, do not allow characters in the output
  1878  				// path that are invalid on Windows. This is especially relevant when
  1879  				// the input path is something other than a file path, such as a URL.
  1880  				outputPath = sanitizeFilePathForVirtualModulePath(outputPath)
  1881  				if windowsVolumeLabel != "" {
  1882  					outputPath = windowsVolumeLabel + outputPath
  1883  				}
  1884  				outputPathWasAutoGenerated = true
  1885  			}
  1886  
  1887  			entryMetas = append(entryMetas, graph.EntryPoint{
  1888  				OutputPath:                 outputPath,
  1889  				SourceIndex:                sourceIndex,
  1890  				OutputPathWasAutoGenerated: outputPathWasAutoGenerated,
  1891  			})
  1892  		}
  1893  	}
  1894  
  1895  	// Turn all automatically-generated output paths into absolute paths
  1896  	for i := range entryMetas {
  1897  		entryPoint := &entryMetas[i]
  1898  		if entryPoint.OutputPathWasAutoGenerated && !s.fs.IsAbs(entryPoint.OutputPath) {
  1899  			entryPoint.OutputPath = s.fs.Join(entryPointAbsResolveDir, entryPoint.OutputPath)
  1900  		}
  1901  	}
  1902  
  1903  	// Automatically compute "outbase" if it wasn't provided
  1904  	if s.options.AbsOutputBase == "" {
  1905  		s.options.AbsOutputBase = lowestCommonAncestorDirectory(s.fs, entryMetas)
  1906  		if s.options.AbsOutputBase == "" {
  1907  			s.options.AbsOutputBase = entryPointAbsResolveDir
  1908  		}
  1909  	}
  1910  
  1911  	// Turn all output paths back into relative paths, but this time relative to
  1912  	// the "outbase" value we computed above
  1913  	for i := range entryMetas {
  1914  		entryPoint := &entryMetas[i]
  1915  		if s.fs.IsAbs(entryPoint.OutputPath) {
  1916  			if !entryPoint.OutputPathWasAutoGenerated {
  1917  				// If an explicit absolute output path was specified, use the path
  1918  				// relative to the "outdir" directory
  1919  				if relPath, ok := s.fs.Rel(s.options.AbsOutputDir, entryPoint.OutputPath); ok {
  1920  					entryPoint.OutputPath = relPath
  1921  				}
  1922  			} else {
  1923  				// Otherwise if the absolute output path was derived from the input
  1924  				// path, use the path relative to the "outbase" directory
  1925  				if relPath, ok := s.fs.Rel(s.options.AbsOutputBase, entryPoint.OutputPath); ok {
  1926  					entryPoint.OutputPath = relPath
  1927  				}
  1928  
  1929  				// Strip the file extension from the output path if there is one so the
  1930  				// "out extension" setting is used instead
  1931  				if last := strings.LastIndexAny(entryPoint.OutputPath, "/.\\"); last != -1 && entryPoint.OutputPath[last] == '.' {
  1932  					entryPoint.OutputPath = entryPoint.OutputPath[:last]
  1933  				}
  1934  			}
  1935  		}
  1936  	}
  1937  
  1938  	return entryMetas
  1939  }
  1940  
  1941  func lowestCommonAncestorDirectory(fs fs.FS, entryPoints []graph.EntryPoint) string {
  1942  	// Ignore any explicitly-specified output paths
  1943  	absPaths := make([]string, 0, len(entryPoints))
  1944  	for _, entryPoint := range entryPoints {
  1945  		if entryPoint.OutputPathWasAutoGenerated {
  1946  			absPaths = append(absPaths, entryPoint.OutputPath)
  1947  		}
  1948  	}
  1949  
  1950  	if len(absPaths) == 0 {
  1951  		return ""
  1952  	}
  1953  
  1954  	lowestAbsDir := fs.Dir(absPaths[0])
  1955  
  1956  	for _, absPath := range absPaths[1:] {
  1957  		absDir := fs.Dir(absPath)
  1958  		lastSlash := 0
  1959  		a := 0
  1960  		b := 0
  1961  
  1962  		for {
  1963  			runeA, widthA := utf8.DecodeRuneInString(absDir[a:])
  1964  			runeB, widthB := utf8.DecodeRuneInString(lowestAbsDir[b:])
  1965  			boundaryA := widthA == 0 || runeA == '/' || runeA == '\\'
  1966  			boundaryB := widthB == 0 || runeB == '/' || runeB == '\\'
  1967  
  1968  			if boundaryA && boundaryB {
  1969  				if widthA == 0 || widthB == 0 {
  1970  					// Truncate to the smaller path if one path is a prefix of the other
  1971  					lowestAbsDir = absDir[:a]
  1972  					break
  1973  				} else {
  1974  					// Track the longest common directory so far
  1975  					lastSlash = a
  1976  				}
  1977  			} else if boundaryA != boundaryB || unicode.ToLower(runeA) != unicode.ToLower(runeB) {
  1978  				// If we're at the top-level directory, then keep the slash
  1979  				if lastSlash < len(absDir) && !strings.ContainsAny(absDir[:lastSlash], "\\/") {
  1980  					lastSlash++
  1981  				}
  1982  
  1983  				// If both paths are different at this point, stop and set the lowest so
  1984  				// far to the common parent directory. Compare using a case-insensitive
  1985  				// comparison to handle paths on Windows.
  1986  				lowestAbsDir = absDir[:lastSlash]
  1987  				break
  1988  			}
  1989  
  1990  			a += widthA
  1991  			b += widthB
  1992  		}
  1993  	}
  1994  
  1995  	return lowestAbsDir
  1996  }
  1997  
  1998  func (s *scanner) scanAllDependencies() {
  1999  	s.timer.Begin("Scan all dependencies")
  2000  	defer s.timer.End("Scan all dependencies")
  2001  
  2002  	// Continue scanning until all dependencies have been discovered
  2003  	for s.remaining > 0 {
  2004  		if s.options.CancelFlag.DidCancel() {
  2005  			return
  2006  		}
  2007  
  2008  		result := <-s.resultChannel
  2009  		s.remaining--
  2010  		if !result.ok {
  2011  			continue
  2012  		}
  2013  
  2014  		// Don't try to resolve paths if we're not bundling
  2015  		if recordsPtr := result.file.inputFile.Repr.ImportRecords(); s.options.Mode == config.ModeBundle && recordsPtr != nil {
  2016  			records := *recordsPtr
  2017  			for importRecordIndex := range records {
  2018  				record := &records[importRecordIndex]
  2019  
  2020  				// This is used for error messages
  2021  				var with *ast.ImportAssertOrWith
  2022  				if record.AssertOrWith != nil && record.AssertOrWith.Keyword == ast.WithKeyword {
  2023  					with = record.AssertOrWith
  2024  				}
  2025  
  2026  				// Skip this import record if the previous resolver call failed
  2027  				resolveResult := result.resolveResults[importRecordIndex]
  2028  				if resolveResult == nil {
  2029  					if globResults := result.globResolveResults[uint32(importRecordIndex)]; globResults.resolveResults != nil {
  2030  						sourceIndex := s.allocateGlobSourceIndex(result.file.inputFile.Source.Index, uint32(importRecordIndex))
  2031  						record.SourceIndex = ast.MakeIndex32(sourceIndex)
  2032  						s.results[sourceIndex] = s.generateResultForGlobResolve(sourceIndex, globResults.absPath,
  2033  							&result.file.inputFile.Source, record.Range, with, record.GlobPattern.Kind, globResults, record.AssertOrWith)
  2034  					}
  2035  					continue
  2036  				}
  2037  
  2038  				path := resolveResult.PathPair.Primary
  2039  				if !resolveResult.PathPair.IsExternal {
  2040  					// Handle a path within the bundle
  2041  					sourceIndex := s.maybeParseFile(*resolveResult, resolver.PrettyPath(s.fs, path),
  2042  						&result.file.inputFile.Source, record.Range, with, inputKindNormal, nil)
  2043  					record.SourceIndex = ast.MakeIndex32(sourceIndex)
  2044  				} else {
  2045  					// Allow this import statement to be removed if something marked it as "sideEffects: false"
  2046  					if resolveResult.PrimarySideEffectsData != nil {
  2047  						record.Flags |= ast.IsExternalWithoutSideEffects
  2048  					}
  2049  
  2050  					// If the path to the external module is relative to the source
  2051  					// file, rewrite the path to be relative to the working directory
  2052  					if path.Namespace == "file" {
  2053  						if relPath, ok := s.fs.Rel(s.options.AbsOutputDir, path.Text); ok {
  2054  							// Prevent issues with path separators being different on Windows
  2055  							relPath = strings.ReplaceAll(relPath, "\\", "/")
  2056  							if resolver.IsPackagePath(relPath) {
  2057  								relPath = "./" + relPath
  2058  							}
  2059  							record.Path.Text = relPath
  2060  						} else {
  2061  							record.Path = path
  2062  						}
  2063  					} else {
  2064  						record.Path = path
  2065  					}
  2066  				}
  2067  			}
  2068  		}
  2069  
  2070  		s.results[result.file.inputFile.Source.Index] = result
  2071  	}
  2072  }
  2073  
  2074  func (s *scanner) generateResultForGlobResolve(
  2075  	sourceIndex uint32,
  2076  	fakeSourcePath string,
  2077  	importSource *logger.Source,
  2078  	importRange logger.Range,
  2079  	importWith *ast.ImportAssertOrWith,
  2080  	kind ast.ImportKind,
  2081  	result globResolveResult,
  2082  	assertions *ast.ImportAssertOrWith,
  2083  ) parseResult {
  2084  	keys := make([]string, 0, len(result.resolveResults))
  2085  	for key := range result.resolveResults {
  2086  		keys = append(keys, key)
  2087  	}
  2088  	sort.Strings(keys)
  2089  
  2090  	object := js_ast.EObject{Properties: make([]js_ast.Property, 0, len(result.resolveResults))}
  2091  	importRecords := make([]ast.ImportRecord, 0, len(result.resolveResults))
  2092  	resolveResults := make([]*resolver.ResolveResult, 0, len(result.resolveResults))
  2093  
  2094  	for _, key := range keys {
  2095  		resolveResult := result.resolveResults[key]
  2096  		var value js_ast.Expr
  2097  
  2098  		importRecordIndex := uint32(len(importRecords))
  2099  		var sourceIndex ast.Index32
  2100  
  2101  		if !resolveResult.PathPair.IsExternal {
  2102  			sourceIndex = ast.MakeIndex32(s.maybeParseFile(
  2103  				resolveResult,
  2104  				resolver.PrettyPath(s.fs, resolveResult.PathPair.Primary),
  2105  				importSource,
  2106  				importRange,
  2107  				importWith,
  2108  				inputKindNormal,
  2109  				nil,
  2110  			))
  2111  		}
  2112  
  2113  		path := resolveResult.PathPair.Primary
  2114  
  2115  		// If the path to the external module is relative to the source
  2116  		// file, rewrite the path to be relative to the working directory
  2117  		if path.Namespace == "file" {
  2118  			if relPath, ok := s.fs.Rel(s.options.AbsOutputDir, path.Text); ok {
  2119  				// Prevent issues with path separators being different on Windows
  2120  				relPath = strings.ReplaceAll(relPath, "\\", "/")
  2121  				if resolver.IsPackagePath(relPath) {
  2122  					relPath = "./" + relPath
  2123  				}
  2124  				path.Text = relPath
  2125  			}
  2126  		}
  2127  
  2128  		resolveResults = append(resolveResults, &resolveResult)
  2129  		importRecords = append(importRecords, ast.ImportRecord{
  2130  			Path:         path,
  2131  			SourceIndex:  sourceIndex,
  2132  			AssertOrWith: assertions,
  2133  			Kind:         kind,
  2134  		})
  2135  
  2136  		switch kind {
  2137  		case ast.ImportDynamic:
  2138  			value.Data = &js_ast.EImportString{ImportRecordIndex: importRecordIndex}
  2139  		case ast.ImportRequire:
  2140  			value.Data = &js_ast.ERequireString{ImportRecordIndex: importRecordIndex}
  2141  		default:
  2142  			panic("Internal error")
  2143  		}
  2144  
  2145  		object.Properties = append(object.Properties, js_ast.Property{
  2146  			Key: js_ast.Expr{Data: &js_ast.EString{Value: helpers.StringToUTF16(key)}},
  2147  			ValueOrNil: js_ast.Expr{Data: &js_ast.EArrow{
  2148  				Body:       js_ast.FnBody{Block: js_ast.SBlock{Stmts: []js_ast.Stmt{{Data: &js_ast.SReturn{ValueOrNil: value}}}}},
  2149  				PreferExpr: true,
  2150  			}},
  2151  		})
  2152  	}
  2153  
  2154  	source := logger.Source{
  2155  		KeyPath:    logger.Path{Text: fakeSourcePath, Namespace: "file"},
  2156  		PrettyPath: result.prettyPath,
  2157  		Index:      sourceIndex,
  2158  	}
  2159  	ast := js_parser.GlobResolveAST(s.log, source, importRecords, &object, result.exportAlias)
  2160  
  2161  	// Fill out "nil" for any additional imports (i.e. from the runtime)
  2162  	for len(resolveResults) < len(ast.ImportRecords) {
  2163  		resolveResults = append(resolveResults, nil)
  2164  	}
  2165  
  2166  	return parseResult{
  2167  		resolveResults: resolveResults,
  2168  		file: scannerFile{
  2169  			inputFile: graph.InputFile{
  2170  				Source: source,
  2171  				Repr: &graph.JSRepr{
  2172  					AST: ast,
  2173  				},
  2174  				OmitFromSourceMapsAndMetafile: true,
  2175  			},
  2176  		},
  2177  		ok: true,
  2178  	}
  2179  }
  2180  
  2181  func (s *scanner) processScannedFiles(entryPointMeta []graph.EntryPoint) []scannerFile {
  2182  	s.timer.Begin("Process scanned files")
  2183  	defer s.timer.End("Process scanned files")
  2184  
  2185  	// Build a set of entry point source indices for quick lookup
  2186  	entryPointSourceIndexToMetaIndex := make(map[uint32]uint32, len(entryPointMeta))
  2187  	for i, meta := range entryPointMeta {
  2188  		entryPointSourceIndexToMetaIndex[meta.SourceIndex] = uint32(i)
  2189  	}
  2190  
  2191  	// Check for pretty-printed path collisions
  2192  	importAttributeNameCollisions := make(map[string][]uint32)
  2193  	for sourceIndex := range s.results {
  2194  		if result := &s.results[sourceIndex]; result.ok {
  2195  			prettyPath := result.file.inputFile.Source.PrettyPath
  2196  			importAttributeNameCollisions[prettyPath] = append(importAttributeNameCollisions[prettyPath], uint32(sourceIndex))
  2197  		}
  2198  	}
  2199  
  2200  	// Import attributes can result in the same file being imported multiple
  2201  	// times in different ways. If that happens, append the import attributes
  2202  	// to the pretty-printed file names to disambiguate them. This renaming
  2203  	// must happen before we construct the metafile JSON chunks below.
  2204  	for _, sourceIndices := range importAttributeNameCollisions {
  2205  		if len(sourceIndices) == 1 {
  2206  			continue
  2207  		}
  2208  
  2209  		for _, sourceIndex := range sourceIndices {
  2210  			source := &s.results[sourceIndex].file.inputFile.Source
  2211  			attrs := source.KeyPath.ImportAttributes.DecodeIntoArray()
  2212  			if len(attrs) == 0 {
  2213  				continue
  2214  			}
  2215  
  2216  			var sb strings.Builder
  2217  			sb.WriteString(" with {")
  2218  			for i, attr := range attrs {
  2219  				if i > 0 {
  2220  					sb.WriteByte(',')
  2221  				}
  2222  				sb.WriteByte(' ')
  2223  				if js_ast.IsIdentifier(attr.Key) {
  2224  					sb.WriteString(attr.Key)
  2225  				} else {
  2226  					sb.Write(helpers.QuoteSingle(attr.Key, false))
  2227  				}
  2228  				sb.WriteString(": ")
  2229  				sb.Write(helpers.QuoteSingle(attr.Value, false))
  2230  			}
  2231  			sb.WriteString(" }")
  2232  			source.PrettyPath += sb.String()
  2233  		}
  2234  	}
  2235  
  2236  	// Now that all files have been scanned, process the final file import records
  2237  	for sourceIndex, result := range s.results {
  2238  		if !result.ok {
  2239  			continue
  2240  		}
  2241  
  2242  		sb := strings.Builder{}
  2243  		isFirstImport := true
  2244  
  2245  		// Begin the metadata chunk
  2246  		if s.options.NeedsMetafile {
  2247  			sb.Write(helpers.QuoteForJSON(result.file.inputFile.Source.PrettyPath, s.options.ASCIIOnly))
  2248  			sb.WriteString(fmt.Sprintf(": {\n      \"bytes\": %d,\n      \"imports\": [", len(result.file.inputFile.Source.Contents)))
  2249  		}
  2250  
  2251  		// Don't try to resolve paths if we're not bundling
  2252  		if recordsPtr := result.file.inputFile.Repr.ImportRecords(); s.options.Mode == config.ModeBundle && recordsPtr != nil {
  2253  			records := *recordsPtr
  2254  			tracker := logger.MakeLineColumnTracker(&result.file.inputFile.Source)
  2255  
  2256  			for importRecordIndex := range records {
  2257  				record := &records[importRecordIndex]
  2258  
  2259  				// Save the import attributes to the metafile
  2260  				var metafileWith string
  2261  				if s.options.NeedsMetafile {
  2262  					if with := record.AssertOrWith; with != nil && with.Keyword == ast.WithKeyword && len(with.Entries) > 0 {
  2263  						data := strings.Builder{}
  2264  						data.WriteString(",\n          \"with\": {")
  2265  						for i, entry := range with.Entries {
  2266  							if i > 0 {
  2267  								data.WriteByte(',')
  2268  							}
  2269  							data.WriteString("\n            ")
  2270  							data.Write(helpers.QuoteForJSON(helpers.UTF16ToString(entry.Key), s.options.ASCIIOnly))
  2271  							data.WriteString(": ")
  2272  							data.Write(helpers.QuoteForJSON(helpers.UTF16ToString(entry.Value), s.options.ASCIIOnly))
  2273  						}
  2274  						data.WriteString("\n          }")
  2275  						metafileWith = data.String()
  2276  					}
  2277  				}
  2278  
  2279  				// Skip this import record if the previous resolver call failed
  2280  				resolveResult := result.resolveResults[importRecordIndex]
  2281  				if resolveResult == nil || !record.SourceIndex.IsValid() {
  2282  					if s.options.NeedsMetafile {
  2283  						if isFirstImport {
  2284  							isFirstImport = false
  2285  							sb.WriteString("\n        ")
  2286  						} else {
  2287  							sb.WriteString(",\n        ")
  2288  						}
  2289  						sb.WriteString(fmt.Sprintf("{\n          \"path\": %s,\n          \"kind\": %s,\n          \"external\": true%s\n        }",
  2290  							helpers.QuoteForJSON(record.Path.Text, s.options.ASCIIOnly),
  2291  							helpers.QuoteForJSON(record.Kind.StringForMetafile(), s.options.ASCIIOnly),
  2292  							metafileWith))
  2293  					}
  2294  					continue
  2295  				}
  2296  
  2297  				// Now that all files have been scanned, look for packages that are imported
  2298  				// both with "import" and "require". Rewrite any imports that reference the
  2299  				// "module" package.json field to the "main" package.json field instead.
  2300  				//
  2301  				// This attempts to automatically avoid the "dual package hazard" where a
  2302  				// package has both a CommonJS module version and an ECMAScript module
  2303  				// version and exports a non-object in CommonJS (often a function). If we
  2304  				// pick the "module" field and the package is imported with "require" then
  2305  				// code expecting a function will crash.
  2306  				if resolveResult.PathPair.HasSecondary() {
  2307  					secondaryKey := resolveResult.PathPair.Secondary
  2308  					if secondaryKey.Namespace == "file" {
  2309  						secondaryKey.Text = canonicalFileSystemPathForWindows(secondaryKey.Text)
  2310  					}
  2311  					if secondaryVisited, ok := s.visited[secondaryKey]; ok {
  2312  						record.SourceIndex = ast.MakeIndex32(secondaryVisited.sourceIndex)
  2313  					}
  2314  				}
  2315  
  2316  				// Generate metadata about each import
  2317  				otherResult := &s.results[record.SourceIndex.GetIndex()]
  2318  				otherFile := &otherResult.file
  2319  				if s.options.NeedsMetafile {
  2320  					if isFirstImport {
  2321  						isFirstImport = false
  2322  						sb.WriteString("\n        ")
  2323  					} else {
  2324  						sb.WriteString(",\n        ")
  2325  					}
  2326  					sb.WriteString(fmt.Sprintf("{\n          \"path\": %s,\n          \"kind\": %s,\n          \"original\": %s%s\n        }",
  2327  						helpers.QuoteForJSON(otherFile.inputFile.Source.PrettyPath, s.options.ASCIIOnly),
  2328  						helpers.QuoteForJSON(record.Kind.StringForMetafile(), s.options.ASCIIOnly),
  2329  						helpers.QuoteForJSON(record.Path.Text, s.options.ASCIIOnly),
  2330  						metafileWith))
  2331  				}
  2332  
  2333  				// Validate that imports with "assert { type: 'json' }" were imported
  2334  				// with the JSON loader. This is done to match the behavior of these
  2335  				// import assertions in a real JavaScript runtime. In addition, we also
  2336  				// allow the copy loader since this is sort of like marking the path
  2337  				// as external (the import assertions are kept and the real JavaScript
  2338  				// runtime evaluates them, not us).
  2339  				if record.Flags.Has(ast.AssertTypeJSON) && otherResult.ok && otherFile.inputFile.Loader != config.LoaderJSON && otherFile.inputFile.Loader != config.LoaderCopy {
  2340  					s.log.AddErrorWithNotes(&tracker, record.Range,
  2341  						fmt.Sprintf("The file %q was loaded with the %q loader", otherFile.inputFile.Source.PrettyPath, config.LoaderToString[otherFile.inputFile.Loader]),
  2342  						[]logger.MsgData{
  2343  							tracker.MsgData(js_lexer.RangeOfImportAssertOrWith(result.file.inputFile.Source,
  2344  								*ast.FindAssertOrWithEntry(record.AssertOrWith.Entries, "type"), js_lexer.KeyAndValueRange),
  2345  								"This import assertion requires the loader to be \"json\" instead:"),
  2346  							{Text: "You need to either reconfigure esbuild to ensure that the loader for this file is \"json\" or you need to remove this import assertion."}})
  2347  				}
  2348  
  2349  				switch record.Kind {
  2350  				case ast.ImportComposesFrom:
  2351  					// Using a JavaScript file with CSS "composes" is not allowed
  2352  					if _, ok := otherFile.inputFile.Repr.(*graph.JSRepr); ok && otherFile.inputFile.Loader != config.LoaderEmpty {
  2353  						s.log.AddErrorWithNotes(&tracker, record.Range,
  2354  							fmt.Sprintf("Cannot use \"composes\" with %q", otherFile.inputFile.Source.PrettyPath),
  2355  							[]logger.MsgData{{Text: fmt.Sprintf(
  2356  								"You can only use \"composes\" with CSS files and %q is not a CSS file (it was loaded with the %q loader).",
  2357  								otherFile.inputFile.Source.PrettyPath, config.LoaderToString[otherFile.inputFile.Loader])}})
  2358  					}
  2359  
  2360  				case ast.ImportAt:
  2361  					// Using a JavaScript file with CSS "@import" is not allowed
  2362  					if _, ok := otherFile.inputFile.Repr.(*graph.JSRepr); ok && otherFile.inputFile.Loader != config.LoaderEmpty {
  2363  						s.log.AddErrorWithNotes(&tracker, record.Range,
  2364  							fmt.Sprintf("Cannot import %q into a CSS file", otherFile.inputFile.Source.PrettyPath),
  2365  							[]logger.MsgData{{Text: fmt.Sprintf(
  2366  								"An \"@import\" rule can only be used to import another CSS file and %q is not a CSS file (it was loaded with the %q loader).",
  2367  								otherFile.inputFile.Source.PrettyPath, config.LoaderToString[otherFile.inputFile.Loader])}})
  2368  					}
  2369  
  2370  				case ast.ImportURL:
  2371  					// Using a JavaScript or CSS file with CSS "url()" is not allowed
  2372  					switch otherRepr := otherFile.inputFile.Repr.(type) {
  2373  					case *graph.CSSRepr:
  2374  						s.log.AddErrorWithNotes(&tracker, record.Range,
  2375  							fmt.Sprintf("Cannot use %q as a URL", otherFile.inputFile.Source.PrettyPath),
  2376  							[]logger.MsgData{{Text: fmt.Sprintf(
  2377  								"You can't use a \"url()\" token to reference a CSS file, and %q is a CSS file (it was loaded with the %q loader).",
  2378  								otherFile.inputFile.Source.PrettyPath, config.LoaderToString[otherFile.inputFile.Loader])}})
  2379  
  2380  					case *graph.JSRepr:
  2381  						if otherRepr.AST.URLForCSS == "" && otherFile.inputFile.Loader != config.LoaderEmpty {
  2382  							s.log.AddErrorWithNotes(&tracker, record.Range,
  2383  								fmt.Sprintf("Cannot use %q as a URL", otherFile.inputFile.Source.PrettyPath),
  2384  								[]logger.MsgData{{Text: fmt.Sprintf(
  2385  									"You can't use a \"url()\" token to reference the file %q because it was loaded with the %q loader, which doesn't provide a URL to embed in the resulting CSS.",
  2386  									otherFile.inputFile.Source.PrettyPath, config.LoaderToString[otherFile.inputFile.Loader])}})
  2387  						}
  2388  					}
  2389  				}
  2390  
  2391  				// If the imported file uses the "copy" loader, then move it from
  2392  				// "SourceIndex" to "CopySourceIndex" so we don't end up bundling it.
  2393  				if _, ok := otherFile.inputFile.Repr.(*graph.CopyRepr); ok {
  2394  					record.CopySourceIndex = record.SourceIndex
  2395  					record.SourceIndex = ast.Index32{}
  2396  					continue
  2397  				}
  2398  
  2399  				// If an import from a JavaScript file targets a CSS file, generate a
  2400  				// JavaScript stub to ensure that JavaScript files only ever import
  2401  				// other JavaScript files.
  2402  				if _, ok := result.file.inputFile.Repr.(*graph.JSRepr); ok {
  2403  					if css, ok := otherFile.inputFile.Repr.(*graph.CSSRepr); ok {
  2404  						if s.options.WriteToStdout {
  2405  							s.log.AddError(&tracker, record.Range,
  2406  								fmt.Sprintf("Cannot import %q into a JavaScript file without an output path configured", otherFile.inputFile.Source.PrettyPath))
  2407  						} else if !css.JSSourceIndex.IsValid() {
  2408  							stubKey := otherFile.inputFile.Source.KeyPath
  2409  							if stubKey.Namespace == "file" {
  2410  								stubKey.Text = canonicalFileSystemPathForWindows(stubKey.Text)
  2411  							}
  2412  							sourceIndex := s.allocateSourceIndex(stubKey, cache.SourceIndexJSStubForCSS)
  2413  							source := otherFile.inputFile.Source
  2414  							source.Index = sourceIndex
  2415  							s.results[sourceIndex] = parseResult{
  2416  								file: scannerFile{
  2417  									inputFile: graph.InputFile{
  2418  										Source: source,
  2419  										Loader: otherFile.inputFile.Loader,
  2420  										Repr: &graph.JSRepr{
  2421  											// Note: The actual export object will be filled in by the linker
  2422  											AST: js_parser.LazyExportAST(s.log, source,
  2423  												js_parser.OptionsFromConfig(&s.options), js_ast.Expr{Data: js_ast.ENullShared}, ""),
  2424  											CSSSourceIndex: ast.MakeIndex32(record.SourceIndex.GetIndex()),
  2425  										},
  2426  									},
  2427  								},
  2428  								ok: true,
  2429  							}
  2430  							css.JSSourceIndex = ast.MakeIndex32(sourceIndex)
  2431  						}
  2432  						record.SourceIndex = css.JSSourceIndex
  2433  						if !css.JSSourceIndex.IsValid() {
  2434  							continue
  2435  						}
  2436  					}
  2437  				}
  2438  
  2439  				// Warn about this import if it's a bare import statement without any
  2440  				// imported names (i.e. a side-effect-only import) and the module has
  2441  				// been marked as having no side effects.
  2442  				//
  2443  				// Except don't do this if this file is inside "node_modules" since
  2444  				// it's a bug in the package and the user won't be able to do anything
  2445  				// about it. Note that this can result in esbuild silently generating
  2446  				// broken code. If this actually happens for people, it's probably worth
  2447  				// re-enabling the warning about code inside "node_modules".
  2448  				if record.Flags.Has(ast.WasOriginallyBareImport) && !s.options.IgnoreDCEAnnotations &&
  2449  					!helpers.IsInsideNodeModules(result.file.inputFile.Source.KeyPath.Text) {
  2450  					if otherModule := &s.results[record.SourceIndex.GetIndex()].file.inputFile; otherModule.SideEffects.Kind != graph.HasSideEffects &&
  2451  						// Do not warn if this is from a plugin, since removing the import
  2452  						// would cause the plugin to not run, and running a plugin is a side
  2453  						// effect.
  2454  						otherModule.SideEffects.Kind != graph.NoSideEffects_PureData_FromPlugin &&
  2455  
  2456  						// Do not warn if this has no side effects because the parsed AST
  2457  						// is empty. This is the case for ".d.ts" files, for example.
  2458  						otherModule.SideEffects.Kind != graph.NoSideEffects_EmptyAST {
  2459  
  2460  						var notes []logger.MsgData
  2461  						var by string
  2462  						if data := otherModule.SideEffects.Data; data != nil {
  2463  							if data.PluginName != "" {
  2464  								by = fmt.Sprintf(" by plugin %q", data.PluginName)
  2465  							} else {
  2466  								var text string
  2467  								if data.IsSideEffectsArrayInJSON {
  2468  									text = "It was excluded from the \"sideEffects\" array in the enclosing \"package.json\" file:"
  2469  								} else {
  2470  									text = "\"sideEffects\" is false in the enclosing \"package.json\" file:"
  2471  								}
  2472  								tracker := logger.MakeLineColumnTracker(data.Source)
  2473  								notes = append(notes, tracker.MsgData(data.Range, text))
  2474  							}
  2475  						}
  2476  						s.log.AddIDWithNotes(logger.MsgID_Bundler_IgnoredBareImport, logger.Warning, &tracker, record.Range,
  2477  							fmt.Sprintf("Ignoring this import because %q was marked as having no side effects%s",
  2478  								otherModule.Source.PrettyPath, by), notes)
  2479  					}
  2480  				}
  2481  			}
  2482  		}
  2483  
  2484  		// End the metadata chunk
  2485  		if s.options.NeedsMetafile {
  2486  			if !isFirstImport {
  2487  				sb.WriteString("\n      ")
  2488  			}
  2489  			if repr, ok := result.file.inputFile.Repr.(*graph.JSRepr); ok &&
  2490  				(repr.AST.ExportsKind == js_ast.ExportsCommonJS || repr.AST.ExportsKind == js_ast.ExportsESM) {
  2491  				format := "cjs"
  2492  				if repr.AST.ExportsKind == js_ast.ExportsESM {
  2493  					format = "esm"
  2494  				}
  2495  				sb.WriteString(fmt.Sprintf("],\n      \"format\": %q", format))
  2496  			} else {
  2497  				sb.WriteString("]")
  2498  			}
  2499  			if attrs := result.file.inputFile.Source.KeyPath.ImportAttributes.DecodeIntoArray(); len(attrs) > 0 {
  2500  				sb.WriteString(",\n      \"with\": {")
  2501  				for i, attr := range attrs {
  2502  					if i > 0 {
  2503  						sb.WriteByte(',')
  2504  					}
  2505  					sb.WriteString(fmt.Sprintf("\n        %s: %s",
  2506  						helpers.QuoteForJSON(attr.Key, s.options.ASCIIOnly),
  2507  						helpers.QuoteForJSON(attr.Value, s.options.ASCIIOnly),
  2508  					))
  2509  				}
  2510  				sb.WriteString("\n      }")
  2511  			}
  2512  			sb.WriteString("\n    }")
  2513  		}
  2514  
  2515  		result.file.jsonMetadataChunk = sb.String()
  2516  
  2517  		// If this file is from the "file" or "copy" loaders, generate an additional file
  2518  		if result.file.inputFile.UniqueKeyForAdditionalFile != "" {
  2519  			bytes := []byte(result.file.inputFile.Source.Contents)
  2520  			template := s.options.AssetPathTemplate
  2521  
  2522  			// Use the entry path template instead of the asset path template if this
  2523  			// file is an entry point and uses the "copy" loader. With the "file" loader
  2524  			// the JS stub is the entry point, but with the "copy" loader the file is
  2525  			// the entry point itself.
  2526  			customFilePath := ""
  2527  			useOutputFile := false
  2528  			if result.file.inputFile.Loader == config.LoaderCopy {
  2529  				if metaIndex, ok := entryPointSourceIndexToMetaIndex[uint32(sourceIndex)]; ok {
  2530  					template = s.options.EntryPathTemplate
  2531  					customFilePath = entryPointMeta[metaIndex].OutputPath
  2532  					useOutputFile = s.options.AbsOutputFile != ""
  2533  				}
  2534  			}
  2535  
  2536  			// Add a hash to the file name to prevent multiple files with the same name
  2537  			// but different contents from colliding
  2538  			var hash string
  2539  			if config.HasPlaceholder(template, config.HashPlaceholder) {
  2540  				h := xxhash.New()
  2541  				h.Write(bytes)
  2542  				hash = HashForFileName(h.Sum(nil))
  2543  			}
  2544  
  2545  			// This should use similar logic to how the linker computes output paths
  2546  			var dir, base, ext string
  2547  			if useOutputFile {
  2548  				// If the output path was configured explicitly, use it verbatim
  2549  				dir = "/"
  2550  				base = s.fs.Base(s.options.AbsOutputFile)
  2551  				ext = s.fs.Ext(base)
  2552  				base = base[:len(base)-len(ext)]
  2553  			} else {
  2554  				// Otherwise, derive the output path from the input path
  2555  				// Generate the input for the template
  2556  				_, _, originalExt := logger.PlatformIndependentPathDirBaseExt(result.file.inputFile.Source.KeyPath.Text)
  2557  				dir, base = PathRelativeToOutbase(
  2558  					&result.file.inputFile,
  2559  					&s.options,
  2560  					s.fs,
  2561  					/* avoidIndex */ false,
  2562  					customFilePath,
  2563  				)
  2564  				ext = originalExt
  2565  			}
  2566  
  2567  			// Apply the path template
  2568  			templateExt := strings.TrimPrefix(ext, ".")
  2569  			relPath := config.TemplateToString(config.SubstituteTemplate(template, config.PathPlaceholders{
  2570  				Dir:  &dir,
  2571  				Name: &base,
  2572  				Hash: &hash,
  2573  				Ext:  &templateExt,
  2574  			})) + ext
  2575  
  2576  			// Optionally add metadata about the file
  2577  			var jsonMetadataChunk string
  2578  			if s.options.NeedsMetafile {
  2579  				inputs := fmt.Sprintf("{\n        %s: {\n          \"bytesInOutput\": %d\n        }\n      }",
  2580  					helpers.QuoteForJSON(result.file.inputFile.Source.PrettyPath, s.options.ASCIIOnly),
  2581  					len(bytes),
  2582  				)
  2583  				jsonMetadataChunk = fmt.Sprintf(
  2584  					"{\n      \"imports\": [],\n      \"exports\": [],\n      \"inputs\": %s,\n      \"bytes\": %d\n    }",
  2585  					inputs,
  2586  					len(bytes),
  2587  				)
  2588  			}
  2589  
  2590  			// Generate the additional file to copy into the output directory
  2591  			result.file.inputFile.AdditionalFiles = []graph.OutputFile{{
  2592  				AbsPath:           s.fs.Join(s.options.AbsOutputDir, relPath),
  2593  				Contents:          bytes,
  2594  				JSONMetadataChunk: jsonMetadataChunk,
  2595  			}}
  2596  		}
  2597  
  2598  		s.results[sourceIndex] = result
  2599  	}
  2600  
  2601  	// The linker operates on an array of files, so construct that now. This
  2602  	// can't be constructed earlier because we generate new parse results for
  2603  	// JavaScript stub files for CSS imports above.
  2604  	files := make([]scannerFile, len(s.results))
  2605  	for sourceIndex := range s.results {
  2606  		if result := &s.results[sourceIndex]; result.ok {
  2607  			s.validateTLA(uint32(sourceIndex))
  2608  			files[sourceIndex] = result.file
  2609  		}
  2610  	}
  2611  
  2612  	return files
  2613  }
  2614  
  2615  func (s *scanner) validateTLA(sourceIndex uint32) tlaCheck {
  2616  	result := &s.results[sourceIndex]
  2617  
  2618  	if result.ok && result.tlaCheck.depth == 0 {
  2619  		if repr, ok := result.file.inputFile.Repr.(*graph.JSRepr); ok {
  2620  			result.tlaCheck.depth = 1
  2621  			if repr.AST.LiveTopLevelAwaitKeyword.Len > 0 {
  2622  				result.tlaCheck.parent = ast.MakeIndex32(sourceIndex)
  2623  			}
  2624  
  2625  			for importRecordIndex, record := range repr.AST.ImportRecords {
  2626  				if record.SourceIndex.IsValid() && (record.Kind == ast.ImportRequire || record.Kind == ast.ImportStmt) {
  2627  					parent := s.validateTLA(record.SourceIndex.GetIndex())
  2628  					if !parent.parent.IsValid() {
  2629  						continue
  2630  					}
  2631  
  2632  					// Follow any import chains
  2633  					if record.Kind == ast.ImportStmt && (!result.tlaCheck.parent.IsValid() || parent.depth < result.tlaCheck.depth) {
  2634  						result.tlaCheck.depth = parent.depth + 1
  2635  						result.tlaCheck.parent = record.SourceIndex
  2636  						result.tlaCheck.importRecordIndex = uint32(importRecordIndex)
  2637  						continue
  2638  					}
  2639  
  2640  					// Require of a top-level await chain is forbidden
  2641  					if record.Kind == ast.ImportRequire {
  2642  						var notes []logger.MsgData
  2643  						var tlaPrettyPath string
  2644  						otherSourceIndex := record.SourceIndex.GetIndex()
  2645  
  2646  						// Build up a chain of relevant notes for all of the imports
  2647  						for {
  2648  							parentResult := &s.results[otherSourceIndex]
  2649  							parentRepr := parentResult.file.inputFile.Repr.(*graph.JSRepr)
  2650  
  2651  							if parentRepr.AST.LiveTopLevelAwaitKeyword.Len > 0 {
  2652  								tlaPrettyPath = parentResult.file.inputFile.Source.PrettyPath
  2653  								tracker := logger.MakeLineColumnTracker(&parentResult.file.inputFile.Source)
  2654  								notes = append(notes, tracker.MsgData(parentRepr.AST.LiveTopLevelAwaitKeyword,
  2655  									fmt.Sprintf("The top-level await in %q is here:", tlaPrettyPath)))
  2656  								break
  2657  							}
  2658  
  2659  							if !parentResult.tlaCheck.parent.IsValid() {
  2660  								notes = append(notes, logger.MsgData{Text: "unexpected invalid index"})
  2661  								break
  2662  							}
  2663  
  2664  							otherSourceIndex = parentResult.tlaCheck.parent.GetIndex()
  2665  
  2666  							tracker := logger.MakeLineColumnTracker(&parentResult.file.inputFile.Source)
  2667  							notes = append(notes, tracker.MsgData(
  2668  								parentRepr.AST.ImportRecords[parentResult.tlaCheck.importRecordIndex].Range,
  2669  								fmt.Sprintf("The file %q imports the file %q here:",
  2670  									parentResult.file.inputFile.Source.PrettyPath, s.results[otherSourceIndex].file.inputFile.Source.PrettyPath)))
  2671  						}
  2672  
  2673  						var text string
  2674  						importedPrettyPath := s.results[record.SourceIndex.GetIndex()].file.inputFile.Source.PrettyPath
  2675  
  2676  						if importedPrettyPath == tlaPrettyPath {
  2677  							text = fmt.Sprintf("This require call is not allowed because the imported file %q contains a top-level await",
  2678  								importedPrettyPath)
  2679  						} else {
  2680  							text = fmt.Sprintf("This require call is not allowed because the transitive dependency %q contains a top-level await",
  2681  								tlaPrettyPath)
  2682  						}
  2683  
  2684  						tracker := logger.MakeLineColumnTracker(&result.file.inputFile.Source)
  2685  						s.log.AddErrorWithNotes(&tracker, record.Range, text, notes)
  2686  					}
  2687  				}
  2688  			}
  2689  
  2690  			// Make sure that if we wrap this module in a closure, the closure is also
  2691  			// async. This happens when you call "import()" on this module and code
  2692  			// splitting is off.
  2693  			if result.tlaCheck.parent.IsValid() {
  2694  				repr.Meta.IsAsyncOrHasAsyncDependency = true
  2695  			}
  2696  		}
  2697  	}
  2698  
  2699  	return result.tlaCheck
  2700  }
  2701  
  2702  func DefaultExtensionToLoaderMap() map[string]config.Loader {
  2703  	return map[string]config.Loader{
  2704  		"":            config.LoaderJS, // This represents files without an extension
  2705  		".js":         config.LoaderJS,
  2706  		".mjs":        config.LoaderJS,
  2707  		".cjs":        config.LoaderJS,
  2708  		".jsx":        config.LoaderJSX,
  2709  		".ts":         config.LoaderTS,
  2710  		".cts":        config.LoaderTSNoAmbiguousLessThan,
  2711  		".mts":        config.LoaderTSNoAmbiguousLessThan,
  2712  		".tsx":        config.LoaderTSX,
  2713  		".css":        config.LoaderCSS,
  2714  		".module.css": config.LoaderLocalCSS,
  2715  		".json":       config.LoaderJSON,
  2716  		".txt":        config.LoaderText,
  2717  	}
  2718  }
  2719  
  2720  func applyOptionDefaults(options *config.Options) {
  2721  	if options.ExtensionToLoader == nil {
  2722  		options.ExtensionToLoader = DefaultExtensionToLoaderMap()
  2723  	}
  2724  	if options.OutputExtensionJS == "" {
  2725  		options.OutputExtensionJS = ".js"
  2726  	}
  2727  	if options.OutputExtensionCSS == "" {
  2728  		options.OutputExtensionCSS = ".css"
  2729  	}
  2730  
  2731  	// Configure default path templates
  2732  	if len(options.EntryPathTemplate) == 0 {
  2733  		options.EntryPathTemplate = []config.PathTemplate{
  2734  			{Data: "./", Placeholder: config.DirPlaceholder},
  2735  			{Data: "/", Placeholder: config.NamePlaceholder},
  2736  		}
  2737  	}
  2738  	if len(options.ChunkPathTemplate) == 0 {
  2739  		options.ChunkPathTemplate = []config.PathTemplate{
  2740  			{Data: "./", Placeholder: config.NamePlaceholder},
  2741  			{Data: "-", Placeholder: config.HashPlaceholder},
  2742  		}
  2743  	}
  2744  	if len(options.AssetPathTemplate) == 0 {
  2745  		options.AssetPathTemplate = []config.PathTemplate{
  2746  			{Data: "./", Placeholder: config.NamePlaceholder},
  2747  			{Data: "-", Placeholder: config.HashPlaceholder},
  2748  		}
  2749  	}
  2750  
  2751  	options.ProfilerNames = !options.MinifyIdentifiers
  2752  
  2753  	// Automatically fix invalid configurations of unsupported features
  2754  	fixInvalidUnsupportedJSFeatureOverrides(options, compat.AsyncAwait, compat.AsyncGenerator|compat.ForAwait|compat.TopLevelAwait)
  2755  	fixInvalidUnsupportedJSFeatureOverrides(options, compat.Generator, compat.AsyncGenerator)
  2756  	fixInvalidUnsupportedJSFeatureOverrides(options, compat.ObjectAccessors, compat.ClassPrivateAccessor|compat.ClassPrivateStaticAccessor)
  2757  	fixInvalidUnsupportedJSFeatureOverrides(options, compat.ClassField, compat.ClassPrivateField)
  2758  	fixInvalidUnsupportedJSFeatureOverrides(options, compat.ClassStaticField, compat.ClassPrivateStaticField)
  2759  	fixInvalidUnsupportedJSFeatureOverrides(options, compat.Class,
  2760  		compat.ClassField|compat.ClassPrivateAccessor|compat.ClassPrivateBrandCheck|compat.ClassPrivateField|
  2761  			compat.ClassPrivateMethod|compat.ClassPrivateStaticAccessor|compat.ClassPrivateStaticField|
  2762  			compat.ClassPrivateStaticMethod|compat.ClassStaticBlocks|compat.ClassStaticField)
  2763  
  2764  	// If we're not building for the browser, automatically disable support for
  2765  	// inline </script> and </style> tags if there aren't currently any overrides
  2766  	if options.Platform != config.PlatformBrowser {
  2767  		if !options.UnsupportedJSFeatureOverridesMask.Has(compat.InlineScript) {
  2768  			options.UnsupportedJSFeatures |= compat.InlineScript
  2769  		}
  2770  		if !options.UnsupportedCSSFeatureOverridesMask.Has(compat.InlineStyle) {
  2771  			options.UnsupportedCSSFeatures |= compat.InlineStyle
  2772  		}
  2773  	}
  2774  }
  2775  
  2776  func fixInvalidUnsupportedJSFeatureOverrides(options *config.Options, implies compat.JSFeature, implied compat.JSFeature) {
  2777  	// If this feature is unsupported, that implies that the other features must also be unsupported
  2778  	if options.UnsupportedJSFeatureOverrides.Has(implies) {
  2779  		options.UnsupportedJSFeatures |= implied
  2780  		options.UnsupportedJSFeatureOverrides |= implied
  2781  		options.UnsupportedJSFeatureOverridesMask |= implied
  2782  	}
  2783  }
  2784  
  2785  type Linker func(
  2786  	options *config.Options,
  2787  	timer *helpers.Timer,
  2788  	log logger.Log,
  2789  	fs fs.FS,
  2790  	res *resolver.Resolver,
  2791  	inputFiles []graph.InputFile,
  2792  	entryPoints []graph.EntryPoint,
  2793  	uniqueKeyPrefix string,
  2794  	reachableFiles []uint32,
  2795  	dataForSourceMaps func() []DataForSourceMap,
  2796  ) []graph.OutputFile
  2797  
  2798  func (b *Bundle) Compile(log logger.Log, timer *helpers.Timer, mangleCache map[string]interface{}, link Linker) ([]graph.OutputFile, string) {
  2799  	timer.Begin("Compile phase")
  2800  	defer timer.End("Compile phase")
  2801  
  2802  	if b.options.CancelFlag.DidCancel() {
  2803  		return nil, ""
  2804  	}
  2805  
  2806  	options := b.options
  2807  
  2808  	// In most cases we don't need synchronized access to the mangle cache
  2809  	cssUsedLocalNames := make(map[string]bool)
  2810  	options.ExclusiveMangleCacheUpdate = func(cb func(
  2811  		mangleCache map[string]interface{},
  2812  		cssUsedLocalNames map[string]bool,
  2813  	)) {
  2814  		cb(mangleCache, cssUsedLocalNames)
  2815  	}
  2816  
  2817  	files := make([]graph.InputFile, len(b.files))
  2818  	for i, file := range b.files {
  2819  		files[i] = file.inputFile
  2820  	}
  2821  
  2822  	// Get the base path from the options or choose the lowest common ancestor of all entry points
  2823  	allReachableFiles := findReachableFiles(files, b.entryPoints)
  2824  
  2825  	// Compute source map data in parallel with linking
  2826  	timer.Begin("Spawn source map tasks")
  2827  	dataForSourceMaps := b.computeDataForSourceMapsInParallel(&options, allReachableFiles)
  2828  	timer.End("Spawn source map tasks")
  2829  
  2830  	var resultGroups [][]graph.OutputFile
  2831  	if options.CodeSplitting || len(b.entryPoints) == 1 {
  2832  		// If code splitting is enabled or if there's only one entry point, link all entry points together
  2833  		resultGroups = [][]graph.OutputFile{link(&options, timer, log, b.fs, b.res,
  2834  			files, b.entryPoints, b.uniqueKeyPrefix, allReachableFiles, dataForSourceMaps)}
  2835  	} else {
  2836  		// Otherwise, link each entry point with the runtime file separately
  2837  		waitGroup := sync.WaitGroup{}
  2838  		resultGroups = make([][]graph.OutputFile, len(b.entryPoints))
  2839  		serializer := helpers.MakeSerializer(len(b.entryPoints))
  2840  		for i, entryPoint := range b.entryPoints {
  2841  			waitGroup.Add(1)
  2842  			go func(i int, entryPoint graph.EntryPoint) {
  2843  				entryPoints := []graph.EntryPoint{entryPoint}
  2844  				forked := timer.Fork()
  2845  
  2846  				// Each goroutine needs a separate options object
  2847  				optionsClone := options
  2848  				optionsClone.ExclusiveMangleCacheUpdate = func(cb func(
  2849  					mangleCache map[string]interface{},
  2850  					cssUsedLocalNames map[string]bool,
  2851  				)) {
  2852  					// Serialize all accesses to the mangle cache in entry point order for determinism
  2853  					serializer.Enter(i)
  2854  					defer serializer.Leave(i)
  2855  					cb(mangleCache, cssUsedLocalNames)
  2856  				}
  2857  
  2858  				resultGroups[i] = link(&optionsClone, forked, log, b.fs, b.res, files, entryPoints,
  2859  					b.uniqueKeyPrefix, findReachableFiles(files, entryPoints), dataForSourceMaps)
  2860  				timer.Join(forked)
  2861  				waitGroup.Done()
  2862  			}(i, entryPoint)
  2863  		}
  2864  		waitGroup.Wait()
  2865  	}
  2866  
  2867  	// Join the results in entry point order for determinism
  2868  	var outputFiles []graph.OutputFile
  2869  	for _, group := range resultGroups {
  2870  		outputFiles = append(outputFiles, group...)
  2871  	}
  2872  
  2873  	// Also generate the metadata file if necessary
  2874  	var metafileJSON string
  2875  	if options.NeedsMetafile {
  2876  		timer.Begin("Generate metadata JSON")
  2877  		metafileJSON = b.generateMetadataJSON(outputFiles, allReachableFiles, options.ASCIIOnly)
  2878  		timer.End("Generate metadata JSON")
  2879  	}
  2880  
  2881  	if !options.WriteToStdout {
  2882  		// Make sure an output file never overwrites an input file
  2883  		if !options.AllowOverwrite {
  2884  			sourceAbsPaths := make(map[string]uint32)
  2885  			for _, sourceIndex := range allReachableFiles {
  2886  				keyPath := b.files[sourceIndex].inputFile.Source.KeyPath
  2887  				if keyPath.Namespace == "file" {
  2888  					absPathKey := canonicalFileSystemPathForWindows(keyPath.Text)
  2889  					sourceAbsPaths[absPathKey] = sourceIndex
  2890  				}
  2891  			}
  2892  			for _, outputFile := range outputFiles {
  2893  				absPathKey := canonicalFileSystemPathForWindows(outputFile.AbsPath)
  2894  				if sourceIndex, ok := sourceAbsPaths[absPathKey]; ok {
  2895  					hint := ""
  2896  					switch logger.API {
  2897  					case logger.CLIAPI:
  2898  						hint = " (use \"--allow-overwrite\" to allow this)"
  2899  					case logger.JSAPI:
  2900  						hint = " (use \"allowOverwrite: true\" to allow this)"
  2901  					case logger.GoAPI:
  2902  						hint = " (use \"AllowOverwrite: true\" to allow this)"
  2903  					}
  2904  					log.AddError(nil, logger.Range{},
  2905  						fmt.Sprintf("Refusing to overwrite input file %q%s",
  2906  							b.files[sourceIndex].inputFile.Source.PrettyPath, hint))
  2907  				}
  2908  			}
  2909  		}
  2910  
  2911  		// Make sure an output file never overwrites another output file. This
  2912  		// is almost certainly unintentional and would otherwise happen silently.
  2913  		//
  2914  		// Make an exception for files that have identical contents. In that case
  2915  		// the duplicate is just silently filtered out. This can happen with the
  2916  		// "file" loader, for example.
  2917  		outputFileMap := make(map[string][]byte)
  2918  		end := 0
  2919  		for _, outputFile := range outputFiles {
  2920  			absPathKey := canonicalFileSystemPathForWindows(outputFile.AbsPath)
  2921  			contents, ok := outputFileMap[absPathKey]
  2922  
  2923  			// If this isn't a duplicate, keep the output file
  2924  			if !ok {
  2925  				outputFileMap[absPathKey] = outputFile.Contents
  2926  				outputFiles[end] = outputFile
  2927  				end++
  2928  				continue
  2929  			}
  2930  
  2931  			// If the names and contents are both the same, only keep the first one
  2932  			if bytes.Equal(contents, outputFile.Contents) {
  2933  				continue
  2934  			}
  2935  
  2936  			// Otherwise, generate an error
  2937  			outputPath := outputFile.AbsPath
  2938  			if relPath, ok := b.fs.Rel(b.fs.Cwd(), outputPath); ok {
  2939  				outputPath = relPath
  2940  			}
  2941  			log.AddError(nil, logger.Range{}, "Two output files share the same path but have different contents: "+outputPath)
  2942  		}
  2943  		outputFiles = outputFiles[:end]
  2944  	}
  2945  
  2946  	return outputFiles, metafileJSON
  2947  }
  2948  
  2949  // Find all files reachable from all entry points. This order should be
  2950  // deterministic given that the entry point order is deterministic, since the
  2951  // returned order is the postorder of the graph traversal and import record
  2952  // order within a given file is deterministic.
  2953  func findReachableFiles(files []graph.InputFile, entryPoints []graph.EntryPoint) []uint32 {
  2954  	visited := make(map[uint32]bool)
  2955  	var order []uint32
  2956  	var visit func(uint32)
  2957  
  2958  	// Include this file and all files it imports
  2959  	visit = func(sourceIndex uint32) {
  2960  		if !visited[sourceIndex] {
  2961  			visited[sourceIndex] = true
  2962  			file := &files[sourceIndex]
  2963  			if repr, ok := file.Repr.(*graph.JSRepr); ok && repr.CSSSourceIndex.IsValid() {
  2964  				visit(repr.CSSSourceIndex.GetIndex())
  2965  			}
  2966  			if recordsPtr := file.Repr.ImportRecords(); recordsPtr != nil {
  2967  				for _, record := range *recordsPtr {
  2968  					if record.SourceIndex.IsValid() {
  2969  						visit(record.SourceIndex.GetIndex())
  2970  					} else if record.CopySourceIndex.IsValid() {
  2971  						visit(record.CopySourceIndex.GetIndex())
  2972  					}
  2973  				}
  2974  			}
  2975  
  2976  			// Each file must come after its dependencies
  2977  			order = append(order, sourceIndex)
  2978  		}
  2979  	}
  2980  
  2981  	// The runtime is always included in case it's needed
  2982  	visit(runtime.SourceIndex)
  2983  
  2984  	// Include all files reachable from any entry point
  2985  	for _, entryPoint := range entryPoints {
  2986  		visit(entryPoint.SourceIndex)
  2987  	}
  2988  
  2989  	return order
  2990  }
  2991  
  2992  // This is done in parallel with linking because linking is a mostly serial
  2993  // phase and there are extra resources for parallelism. This could also be done
  2994  // during parsing but that would slow down parsing and delay the start of the
  2995  // linking phase, which then delays the whole bundling process.
  2996  //
  2997  // However, doing this during parsing would allow it to be cached along with
  2998  // the parsed ASTs which would then speed up incremental builds. In the future
  2999  // it could be good to optionally have this be computed during the parsing
  3000  // phase when incremental builds are active but otherwise still have it be
  3001  // computed during linking for optimal speed during non-incremental builds.
  3002  func (b *Bundle) computeDataForSourceMapsInParallel(options *config.Options, reachableFiles []uint32) func() []DataForSourceMap {
  3003  	if options.SourceMap == config.SourceMapNone {
  3004  		return func() []DataForSourceMap {
  3005  			return nil
  3006  		}
  3007  	}
  3008  
  3009  	var waitGroup sync.WaitGroup
  3010  	results := make([]DataForSourceMap, len(b.files))
  3011  
  3012  	for _, sourceIndex := range reachableFiles {
  3013  		if f := &b.files[sourceIndex]; f.inputFile.Loader.CanHaveSourceMap() {
  3014  			var approximateLineCount int32
  3015  			switch repr := f.inputFile.Repr.(type) {
  3016  			case *graph.JSRepr:
  3017  				approximateLineCount = repr.AST.ApproximateLineCount
  3018  			case *graph.CSSRepr:
  3019  				approximateLineCount = repr.AST.ApproximateLineCount
  3020  			}
  3021  			waitGroup.Add(1)
  3022  			go func(sourceIndex uint32, f *scannerFile, approximateLineCount int32) {
  3023  				result := &results[sourceIndex]
  3024  				result.LineOffsetTables = sourcemap.GenerateLineOffsetTables(f.inputFile.Source.Contents, approximateLineCount)
  3025  				sm := f.inputFile.InputSourceMap
  3026  				if !options.ExcludeSourcesContent {
  3027  					if sm == nil {
  3028  						// Simple case: no nested source map
  3029  						result.QuotedContents = [][]byte{helpers.QuoteForJSON(f.inputFile.Source.Contents, options.ASCIIOnly)}
  3030  					} else {
  3031  						// Complex case: nested source map
  3032  						result.QuotedContents = make([][]byte, len(sm.Sources))
  3033  						nullContents := []byte("null")
  3034  						for i := range sm.Sources {
  3035  							// Missing contents become a "null" literal
  3036  							quotedContents := nullContents
  3037  							if i < len(sm.SourcesContent) {
  3038  								if value := sm.SourcesContent[i]; value.Quoted != "" && (!options.ASCIIOnly || !isASCIIOnly(value.Quoted)) {
  3039  									// Just use the value directly from the input file
  3040  									quotedContents = []byte(value.Quoted)
  3041  								} else if value.Value != nil {
  3042  									// Re-quote non-ASCII values if output is ASCII-only.
  3043  									// Also quote values that haven't been quoted yet
  3044  									// (happens when the entire "sourcesContent" array is
  3045  									// absent and the source has been found on the file
  3046  									// system using the "sources" array).
  3047  									quotedContents = helpers.QuoteForJSON(helpers.UTF16ToString(value.Value), options.ASCIIOnly)
  3048  								}
  3049  							}
  3050  							result.QuotedContents[i] = quotedContents
  3051  						}
  3052  					}
  3053  				}
  3054  				waitGroup.Done()
  3055  			}(sourceIndex, f, approximateLineCount)
  3056  		}
  3057  	}
  3058  
  3059  	return func() []DataForSourceMap {
  3060  		waitGroup.Wait()
  3061  		return results
  3062  	}
  3063  }
  3064  
  3065  func (b *Bundle) generateMetadataJSON(results []graph.OutputFile, allReachableFiles []uint32, asciiOnly bool) string {
  3066  	sb := strings.Builder{}
  3067  	sb.WriteString("{\n  \"inputs\": {")
  3068  
  3069  	// Write inputs
  3070  	isFirst := true
  3071  	for _, sourceIndex := range allReachableFiles {
  3072  		if b.files[sourceIndex].inputFile.OmitFromSourceMapsAndMetafile {
  3073  			continue
  3074  		}
  3075  		if file := &b.files[sourceIndex]; len(file.jsonMetadataChunk) > 0 {
  3076  			if isFirst {
  3077  				isFirst = false
  3078  				sb.WriteString("\n    ")
  3079  			} else {
  3080  				sb.WriteString(",\n    ")
  3081  			}
  3082  			sb.WriteString(file.jsonMetadataChunk)
  3083  		}
  3084  	}
  3085  
  3086  	sb.WriteString("\n  },\n  \"outputs\": {")
  3087  
  3088  	// Write outputs
  3089  	isFirst = true
  3090  	paths := make(map[string]bool)
  3091  	for _, result := range results {
  3092  		if len(result.JSONMetadataChunk) > 0 {
  3093  			path := resolver.PrettyPath(b.fs, logger.Path{Text: result.AbsPath, Namespace: "file"})
  3094  			if paths[path] {
  3095  				// Don't write out the same path twice (can happen with the "file" loader)
  3096  				continue
  3097  			}
  3098  			if isFirst {
  3099  				isFirst = false
  3100  				sb.WriteString("\n    ")
  3101  			} else {
  3102  				sb.WriteString(",\n    ")
  3103  			}
  3104  			paths[path] = true
  3105  			sb.WriteString(fmt.Sprintf("%s: ", helpers.QuoteForJSON(path, asciiOnly)))
  3106  			sb.WriteString(result.JSONMetadataChunk)
  3107  		}
  3108  	}
  3109  
  3110  	sb.WriteString("\n  }\n}\n")
  3111  	return sb.String()
  3112  }
  3113  
  3114  type runtimeCacheKey struct {
  3115  	unsupportedJSFeatures compat.JSFeature
  3116  	minifySyntax          bool
  3117  	minifyIdentifiers     bool
  3118  }
  3119  
  3120  type runtimeCache struct {
  3121  	astMap   map[runtimeCacheKey]js_ast.AST
  3122  	astMutex sync.Mutex
  3123  }
  3124  
  3125  var globalRuntimeCache runtimeCache
  3126  
  3127  func (cache *runtimeCache) parseRuntime(options *config.Options) (source logger.Source, runtimeAST js_ast.AST, ok bool) {
  3128  	key := runtimeCacheKey{
  3129  		// All configuration options that the runtime code depends on must go here
  3130  		unsupportedJSFeatures: options.UnsupportedJSFeatures,
  3131  		minifySyntax:          options.MinifySyntax,
  3132  		minifyIdentifiers:     options.MinifyIdentifiers,
  3133  	}
  3134  
  3135  	// Determine which source to use
  3136  	source = runtime.Source(key.unsupportedJSFeatures)
  3137  
  3138  	// Cache hit?
  3139  	(func() {
  3140  		cache.astMutex.Lock()
  3141  		defer cache.astMutex.Unlock()
  3142  		if cache.astMap != nil {
  3143  			runtimeAST, ok = cache.astMap[key]
  3144  		}
  3145  	})()
  3146  	if ok {
  3147  		return
  3148  	}
  3149  
  3150  	// Cache miss
  3151  	log := logger.NewDeferLog(logger.DeferLogAll, nil)
  3152  	runtimeAST, ok = js_parser.Parse(log, source, js_parser.OptionsFromConfig(&config.Options{
  3153  		// These configuration options must only depend on the key
  3154  		UnsupportedJSFeatures: key.unsupportedJSFeatures,
  3155  		MinifySyntax:          key.minifySyntax,
  3156  		MinifyIdentifiers:     key.minifyIdentifiers,
  3157  
  3158  		// Always do tree shaking for the runtime because we never want to
  3159  		// include unnecessary runtime code
  3160  		TreeShaking: true,
  3161  	}))
  3162  	if log.HasErrors() {
  3163  		msgs := "Internal error: failed to parse runtime:\n"
  3164  		for _, msg := range log.Done() {
  3165  			msgs += msg.String(logger.OutputOptions{IncludeSource: true}, logger.TerminalInfo{})
  3166  		}
  3167  		panic(msgs[:len(msgs)-1])
  3168  	}
  3169  
  3170  	// Cache for next time
  3171  	if ok {
  3172  		cache.astMutex.Lock()
  3173  		defer cache.astMutex.Unlock()
  3174  		if cache.astMap == nil {
  3175  			cache.astMap = make(map[runtimeCacheKey]js_ast.AST)
  3176  		}
  3177  		cache.astMap[key] = runtimeAST
  3178  	}
  3179  	return
  3180  }
  3181  
  3182  // Returns the path of this file relative to "outbase", which is then ready to
  3183  // be joined with the absolute output directory path. The directory and name
  3184  // components are returned separately for convenience.
  3185  func PathRelativeToOutbase(
  3186  	inputFile *graph.InputFile,
  3187  	options *config.Options,
  3188  	fs fs.FS,
  3189  	avoidIndex bool,
  3190  	customFilePath string,
  3191  ) (relDir string, baseName string) {
  3192  	relDir = "/"
  3193  	absPath := inputFile.Source.KeyPath.Text
  3194  
  3195  	if customFilePath != "" {
  3196  		// Use the configured output path if present
  3197  		absPath = customFilePath
  3198  		if !fs.IsAbs(absPath) {
  3199  			absPath = fs.Join(options.AbsOutputBase, absPath)
  3200  		}
  3201  	} else if inputFile.Source.KeyPath.Namespace != "file" {
  3202  		// Come up with a path for virtual paths (i.e. non-file-system paths)
  3203  		dir, base, _ := logger.PlatformIndependentPathDirBaseExt(absPath)
  3204  		if avoidIndex && base == "index" {
  3205  			_, base, _ = logger.PlatformIndependentPathDirBaseExt(dir)
  3206  		}
  3207  		baseName = sanitizeFilePathForVirtualModulePath(base)
  3208  		return
  3209  	} else {
  3210  		// Heuristic: If the file is named something like "index.js", then use
  3211  		// the name of the parent directory instead. This helps avoid the
  3212  		// situation where many chunks are named "index" because of people
  3213  		// dynamically-importing npm packages that make use of node's implicit
  3214  		// "index" file name feature.
  3215  		if avoidIndex {
  3216  			base := fs.Base(absPath)
  3217  			base = base[:len(base)-len(fs.Ext(base))]
  3218  			if base == "index" {
  3219  				absPath = fs.Dir(absPath)
  3220  			}
  3221  		}
  3222  	}
  3223  
  3224  	// Try to get a relative path to the base directory
  3225  	relPath, ok := fs.Rel(options.AbsOutputBase, absPath)
  3226  	if !ok {
  3227  		// This can fail in some situations such as on different drives on
  3228  		// Windows. In that case we just use the file name.
  3229  		baseName = fs.Base(absPath)
  3230  	} else {
  3231  		// Now we finally have a relative path
  3232  		relDir = fs.Dir(relPath) + "/"
  3233  		baseName = fs.Base(relPath)
  3234  
  3235  		// Use platform-independent slashes
  3236  		relDir = strings.ReplaceAll(relDir, "\\", "/")
  3237  
  3238  		// Replace leading "../" so we don't try to write outside of the output
  3239  		// directory. This normally can't happen because "AbsOutputBase" is
  3240  		// automatically computed to contain all entry point files, but it can
  3241  		// happen if someone sets it manually via the "outbase" API option.
  3242  		//
  3243  		// Note that we can't just strip any leading "../" because that could
  3244  		// cause two separate entry point paths to collide. For example, there
  3245  		// could be both "src/index.js" and "../src/index.js" as entry points.
  3246  		dotDotCount := 0
  3247  		for strings.HasPrefix(relDir[dotDotCount*3:], "../") {
  3248  			dotDotCount++
  3249  		}
  3250  		if dotDotCount > 0 {
  3251  			// The use of "_.._" here is somewhat arbitrary but it is unlikely to
  3252  			// collide with a folder named by a human and it works on Windows
  3253  			// (Windows doesn't like names that end with a "."). And not starting
  3254  			// with a "." means that it will not be hidden on Unix.
  3255  			relDir = strings.Repeat("_.._/", dotDotCount) + relDir[dotDotCount*3:]
  3256  		}
  3257  		for strings.HasSuffix(relDir, "/") {
  3258  			relDir = relDir[:len(relDir)-1]
  3259  		}
  3260  		relDir = "/" + relDir
  3261  		if strings.HasSuffix(relDir, "/.") {
  3262  			relDir = relDir[:len(relDir)-1]
  3263  		}
  3264  	}
  3265  
  3266  	// Strip the file extension if the output path is an input file
  3267  	if customFilePath == "" {
  3268  		ext := fs.Ext(baseName)
  3269  		baseName = baseName[:len(baseName)-len(ext)]
  3270  	}
  3271  	return
  3272  }
  3273  
  3274  func sanitizeFilePathForVirtualModulePath(path string) string {
  3275  	// Convert it to a safe file path. See: https://stackoverflow.com/a/31976060
  3276  	sb := strings.Builder{}
  3277  	needsGap := false
  3278  	for _, c := range path {
  3279  		switch c {
  3280  		case 0:
  3281  			// These characters are forbidden on Unix and Windows
  3282  
  3283  		case '<', '>', ':', '"', '|', '?', '*':
  3284  			// These characters are forbidden on Windows
  3285  
  3286  		default:
  3287  			if c < 0x20 {
  3288  				// These characters are forbidden on Windows
  3289  				break
  3290  			}
  3291  
  3292  			// Turn runs of invalid characters into a '_'
  3293  			if needsGap {
  3294  				sb.WriteByte('_')
  3295  				needsGap = false
  3296  			}
  3297  
  3298  			sb.WriteRune(c)
  3299  			continue
  3300  		}
  3301  
  3302  		if sb.Len() > 0 {
  3303  			needsGap = true
  3304  		}
  3305  	}
  3306  
  3307  	// Make sure the name isn't empty
  3308  	if sb.Len() == 0 {
  3309  		return "_"
  3310  	}
  3311  
  3312  	// Note: An extension will be added to this base name, so there is no need to
  3313  	// avoid forbidden file names such as ".." since ".js" is a valid file name.
  3314  	return sb.String()
  3315  }