github.com/neohugo/neohugo@v0.123.8/hugolib/hugo_sites_build.go (about) 1 // Copyright 2024 The Hugo Authors. All rights reserved. 2 // 3 // Licensed under the Apache License, Version 2.0 (the "License"); 4 // you may not use this file except in compliance with the License. 5 // You may obtain a copy of the License at 6 // http://www.apache.org/licenses/LICENSE-2.0 7 // 8 // Unless required by applicable law or agreed to in writing, software 9 // distributed under the License is distributed on an "AS IS" BASIS, 10 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 11 // See the License for the specific language governing permissions and 12 // limitations under the License. 13 14 package hugolib 15 16 import ( 17 "bytes" 18 "context" 19 "encoding/json" 20 "errors" 21 "fmt" 22 "os" 23 "path" 24 "path/filepath" 25 "strings" 26 "time" 27 28 "github.com/bep/logg" 29 "github.com/neohugo/neohugo/cache/dynacache" 30 "github.com/neohugo/neohugo/deps" 31 "github.com/neohugo/neohugo/hugofs/files" 32 "github.com/neohugo/neohugo/hugofs/glob" 33 "github.com/neohugo/neohugo/identity" 34 "github.com/neohugo/neohugo/output" 35 "github.com/neohugo/neohugo/publisher" 36 "github.com/neohugo/neohugo/source" 37 "github.com/neohugo/neohugo/tpl" 38 39 "github.com/neohugo/neohugo/hugofs" 40 41 "github.com/neohugo/neohugo/common/herrors" 42 "github.com/neohugo/neohugo/common/loggers" 43 "github.com/neohugo/neohugo/common/para" 44 "github.com/neohugo/neohugo/common/paths" 45 "github.com/neohugo/neohugo/config" 46 "github.com/neohugo/neohugo/resources/page" 47 "github.com/neohugo/neohugo/resources/page/siteidentities" 48 "github.com/neohugo/neohugo/resources/postpub" 49 50 "github.com/spf13/afero" 51 52 "github.com/fsnotify/fsnotify" 53 ) 54 55 // Build builds all sites. If filesystem events are provided, 56 // this is considered to be a potential partial rebuild. 57 func (h *HugoSites) Build(config BuildCfg, events ...fsnotify.Event) error { 58 infol := h.Log.InfoCommand("build") 59 defer loggers.TimeTrackf(infol, time.Now(), nil, "") 60 defer func() { 61 h.buildCounter.Add(1) 62 }() 63 64 if h.Deps == nil { 65 panic("must have deps") 66 } 67 68 if !config.NoBuildLock { 69 unlock, err := h.BaseFs.LockBuild() 70 if err != nil { 71 return fmt.Errorf("failed to acquire a build lock: %w", err) 72 } 73 defer unlock() 74 } 75 76 defer func() { 77 for _, s := range h.Sites { 78 s.Deps.BuildEndListeners.Notify() 79 } 80 }() 81 82 errCollector := h.StartErrorCollector() 83 errs := make(chan error) 84 85 go func(from, to chan error) { 86 var errors []error 87 i := 0 88 for e := range from { 89 i++ 90 if i > 50 { 91 break 92 } 93 errors = append(errors, e) 94 } 95 to <- h.pickOneAndLogTheRest(errors) 96 97 close(to) 98 }(errCollector, errs) 99 100 if h.Metrics != nil { 101 h.Metrics.Reset() 102 } 103 104 h.buildCounters = config.testCounters 105 if h.buildCounters == nil { 106 h.buildCounters = &buildCounters{} 107 } 108 109 // Need a pointer as this may be modified. 110 conf := &config 111 if conf.whatChanged == nil { 112 // Assume everything has changed 113 conf.whatChanged = &whatChanged{contentChanged: true} 114 } 115 116 var prepareErr error 117 118 if !config.PartialReRender { 119 prepare := func() error { 120 init := func(conf *BuildCfg) error { 121 for _, s := range h.Sites { 122 s.Deps.BuildStartListeners.Notify() 123 } 124 125 if len(events) > 0 { 126 // Rebuild 127 if err := h.initRebuild(conf); err != nil { 128 return fmt.Errorf("initRebuild: %w", err) 129 } 130 } else { 131 if err := h.initSites(conf); err != nil { 132 return fmt.Errorf("initSites: %w", err) 133 } 134 } 135 136 return nil 137 } 138 139 ctx := context.Background() 140 141 if err := h.process(ctx, infol, conf, init, events...); err != nil { 142 return fmt.Errorf("process: %w", err) 143 } 144 145 if err := h.assemble(ctx, infol, conf); err != nil { 146 return fmt.Errorf("assemble: %w", err) 147 } 148 149 return nil 150 } 151 152 if prepareErr = prepare(); prepareErr != nil { 153 h.SendError(prepareErr) 154 } 155 } 156 157 if prepareErr == nil { 158 if err := h.render(infol, conf); err != nil { 159 h.SendError(fmt.Errorf("render: %w", err)) 160 } 161 162 if err := h.postRenderOnce(); err != nil { 163 h.SendError(fmt.Errorf("postRenderOnce: %w", err)) 164 } 165 166 if err := h.postProcess(infol); err != nil { 167 h.SendError(fmt.Errorf("postProcess: %w", err)) 168 } 169 } 170 171 if h.Metrics != nil { 172 var b bytes.Buffer 173 h.Metrics.WriteMetrics(&b) 174 175 h.Log.Printf("\nTemplate Metrics:\n\n") 176 h.Log.Println(b.String()) 177 } 178 179 h.StopErrorCollector() 180 181 err := <-errs 182 if err != nil { 183 return err 184 } 185 186 if err := h.fatalErrorHandler.getErr(); err != nil { 187 return err 188 } 189 190 errorCount := h.Log.LoggCount(logg.LevelError) + loggers.Log().LoggCount(logg.LevelError) 191 if errorCount > 0 { 192 return fmt.Errorf("logged %d error(s)", errorCount) 193 } 194 195 return nil 196 } 197 198 // Build lifecycle methods below. 199 // The order listed matches the order of execution. 200 201 func (h *HugoSites) initSites(config *BuildCfg) error { 202 h.reset(config) 203 return nil 204 } 205 206 func (h *HugoSites) initRebuild(config *BuildCfg) error { 207 if !h.Configs.Base.Internal.Watch { 208 return errors.New("rebuild called when not in watch mode") 209 } 210 211 h.pageTrees.treePagesResources.WalkPrefixRaw("", func(key string, n contentNodeI) bool { 212 n.resetBuildState() 213 return false 214 }) 215 216 for _, s := range h.Sites { 217 s.resetBuildState(config.whatChanged.contentChanged) 218 } 219 220 h.reset(config) 221 h.resetLogs() 222 223 return nil 224 } 225 226 // process prepares the Sites' sources for a full or partial rebuild. 227 // This will also parse the source and create all the Page objects. 228 func (h *HugoSites) process(ctx context.Context, l logg.LevelLogger, config *BuildCfg, init func(config *BuildCfg) error, events ...fsnotify.Event) error { 229 l = l.WithField("step", "process") 230 defer loggers.TimeTrackf(l, time.Now(), nil, "") 231 232 if len(events) > 0 { 233 // This is a rebuild 234 return h.processPartial(ctx, l, config, init, events) 235 } 236 return h.processFull(ctx, l, *config) 237 } 238 239 // assemble creates missing sections, applies aggregate values (e.g. dates, cascading params), 240 // removes disabled pages etc. 241 func (h *HugoSites) assemble(ctx context.Context, l logg.LevelLogger, bcfg *BuildCfg) error { 242 l = l.WithField("step", "assemble") 243 defer loggers.TimeTrackf(l, time.Now(), nil, "") 244 245 if !bcfg.whatChanged.contentChanged { 246 return nil 247 } 248 249 h.translationKeyPages.Reset() 250 assemblers := make([]*sitePagesAssembler, len(h.Sites)) 251 // Changes detected during assembly (e.g. aggregate date changes) 252 assembleChanges := &whatChanged{ 253 identitySet: make(map[identity.Identity]bool), 254 } 255 for i, s := range h.Sites { 256 assemblers[i] = &sitePagesAssembler{ 257 Site: s, 258 watching: s.watching(), 259 incomingChanges: bcfg.whatChanged, 260 assembleChanges: assembleChanges, 261 ctx: ctx, 262 } 263 } 264 265 g, _ := h.workersSite.Start(ctx) 266 for _, s := range assemblers { 267 s := s 268 g.Run(func() error { 269 return s.assemblePagesStep1(ctx) 270 }) 271 } 272 if err := g.Wait(); err != nil { 273 return err 274 } 275 276 changes := assembleChanges.Changes() 277 278 // Changes from the assemble step (e.g. lastMod, cascase) needs a re-calculation 279 // of what needs to be re-built. 280 if len(changes) > 0 { 281 if err := h.resolveAndClearStateForIdentities(ctx, l, nil, changes); err != nil { 282 return err 283 } 284 } 285 h.renderFormats = output.Formats{} 286 for _, s := range h.Sites { 287 s.s.initRenderFormats() 288 h.renderFormats = append(h.renderFormats, s.renderFormats...) 289 } 290 291 for _, s := range assemblers { 292 if err := s.assemblePagesStep2(); err != nil { 293 return err 294 } 295 } 296 297 h.renderFormats = output.Formats{} 298 for _, s := range h.Sites { 299 h.renderFormats = append(h.renderFormats, s.renderFormats...) 300 } 301 302 return nil 303 } 304 305 // render renders the sites. 306 func (h *HugoSites) render(l logg.LevelLogger, config *BuildCfg) error { 307 l = l.WithField("step", "render") 308 start := time.Now() 309 defer func() { 310 loggers.TimeTrackf(l, start, h.buildCounters.loggFields(), "") 311 }() 312 313 if _, err := h.init.layouts.Do(context.Background()); err != nil { 314 return err 315 } 316 317 siteRenderContext := &siteRenderContext{cfg: config, multihost: h.Configs.IsMultihost} 318 319 i := 0 320 for _, s := range h.Sites { 321 siteRenderContext.languageIdx = s.languagei 322 h.currentSite = s 323 for siteOutIdx, renderFormat := range s.renderFormats { 324 siteRenderContext.outIdx = siteOutIdx 325 siteRenderContext.sitesOutIdx = i 326 i++ 327 328 select { 329 case <-h.Done(): 330 return nil 331 default: 332 for _, s2 := range h.Sites { 333 // We render site by site, but since the content is lazily rendered 334 // and a site can "borrow" content from other sites, every site 335 // needs this set. 336 s2.rc = &siteRenderingContext{Format: renderFormat} 337 338 if err := s2.preparePagesForRender(s == s2, siteRenderContext.sitesOutIdx); err != nil { 339 return err 340 } 341 } 342 if !config.SkipRender { 343 ll := l.WithField("substep", "pages"). 344 WithField("site", s.language.Lang). 345 WithField("outputFormat", renderFormat.Name) 346 347 start := time.Now() 348 349 if config.PartialReRender { 350 if err := s.renderPages(siteRenderContext); err != nil { 351 return err 352 } 353 } else { 354 if err := s.render(siteRenderContext); err != nil { 355 return err 356 } 357 } 358 loggers.TimeTrackf(ll, start, nil, "") 359 } 360 } 361 } 362 } 363 364 return nil 365 } 366 367 // / postRenderOnce runs some post processing that only needs to be done once, e.g. printing of unused templates. 368 func (h *HugoSites) postRenderOnce() error { 369 h.postRenderInit.Do(func() { 370 conf := h.Configs.Base 371 if conf.PrintPathWarnings { 372 // We need to do this before any post processing, as that may write to the same files twice 373 // and create false positives. 374 hugofs.WalkFilesystems(h.Fs.PublishDir, func(fs afero.Fs) bool { 375 if dfs, ok := fs.(hugofs.DuplicatesReporter); ok { 376 dupes := dfs.ReportDuplicates() 377 if dupes != "" { 378 h.Log.Warnln("Duplicate target paths:", dupes) 379 } 380 } 381 return false 382 }) 383 } 384 385 if conf.PrintUnusedTemplates { 386 unusedTemplates := h.Tmpl().(tpl.UnusedTemplatesProvider).UnusedTemplates() 387 for _, unusedTemplate := range unusedTemplates { 388 h.Log.Warnf("Template %s is unused, source file %s", unusedTemplate.Name(), unusedTemplate.Filename()) 389 } 390 } 391 }) 392 return nil 393 } 394 395 // postProcess runs the post processors, e.g. writing the hugo_stats.json file. 396 func (h *HugoSites) postProcess(l logg.LevelLogger) error { 397 l = l.WithField("step", "postProcess") 398 defer loggers.TimeTrackf(l, time.Now(), nil, "") 399 400 // Make sure to write any build stats to disk first so it's available 401 // to the post processors. 402 if err := h.writeBuildStats(); err != nil { 403 return err 404 } 405 406 // This will only be set when js.Build have been triggered with 407 // imports that resolves to the project or a module. 408 // Write a jsconfig.json file to the project's /asset directory 409 // to help JS IntelliSense in VS Code etc. 410 if !h.ResourceSpec.BuildConfig().NoJSConfigInAssets { 411 handleJSConfig := func(fi os.FileInfo) { 412 m := fi.(hugofs.FileMetaInfo).Meta() 413 if !m.IsProject { 414 return 415 } 416 417 if jsConfig := h.ResourceSpec.JSConfigBuilder.Build(m.SourceRoot); jsConfig != nil { 418 b, err := json.MarshalIndent(jsConfig, "", " ") 419 if err != nil { 420 h.Log.Warnf("Failed to create jsconfig.json: %s", err) 421 } else { 422 filename := filepath.Join(m.SourceRoot, "jsconfig.json") 423 if h.Configs.Base.Internal.Running { 424 h.skipRebuildForFilenamesMu.Lock() 425 h.skipRebuildForFilenames[filename] = true 426 h.skipRebuildForFilenamesMu.Unlock() 427 } 428 // Make sure it's written to the OS fs as this is used by 429 // editors. 430 if err := afero.WriteFile(hugofs.Os, filename, b, 0o666); err != nil { 431 h.Log.Warnf("Failed to write jsconfig.json: %s", err) 432 } 433 } 434 } 435 } 436 437 fi, err := h.BaseFs.Assets.Fs.Stat("") 438 if err != nil { 439 if !herrors.IsNotExist(err) { 440 h.Log.Warnf("Failed to resolve jsconfig.json dir: %s", err) 441 } 442 } else { 443 handleJSConfig(fi) 444 } 445 } 446 447 var toPostProcess []postpub.PostPublishedResource 448 for _, r := range h.ResourceSpec.PostProcessResources { 449 toPostProcess = append(toPostProcess, r) 450 } 451 452 if len(toPostProcess) == 0 { 453 // Nothing more to do. 454 return nil 455 } 456 457 workers := para.New(config.GetNumWorkerMultiplier()) 458 g, _ := workers.Start(context.Background()) 459 460 handleFile := func(filename string) error { 461 content, err := afero.ReadFile(h.BaseFs.PublishFs, filename) 462 if err != nil { 463 return err 464 } 465 466 k := 0 467 changed := false 468 469 for { 470 l := bytes.Index(content[k:], []byte(postpub.PostProcessPrefix)) 471 if l == -1 { 472 break 473 } 474 m := bytes.Index(content[k+l:], []byte(postpub.PostProcessSuffix)) + len(postpub.PostProcessSuffix) 475 476 low, high := k+l, k+l+m 477 478 field := content[low:high] 479 480 forward := l + m 481 482 for i, r := range toPostProcess { 483 if r == nil { 484 panic(fmt.Sprintf("resource %d to post process is nil", i+1)) 485 } 486 v, ok := r.GetFieldString(string(field)) 487 if ok { 488 content = append(content[:low], append([]byte(v), content[high:]...)...) 489 changed = true 490 forward = len(v) 491 break 492 } 493 } 494 495 k += forward 496 } 497 498 if changed { 499 return afero.WriteFile(h.BaseFs.PublishFs, filename, content, 0o666) 500 } 501 502 return nil 503 } 504 505 filenames := h.Deps.BuildState.GetFilenamesWithPostPrefix() 506 for _, filename := range filenames { 507 filename := filename 508 g.Run(func() error { 509 return handleFile(filename) 510 }) 511 } 512 513 // Prepare for a new build. 514 for _, s := range h.Sites { 515 s.ResourceSpec.PostProcessResources = make(map[string]postpub.PostPublishedResource) 516 } 517 518 return g.Wait() 519 } 520 521 func (h *HugoSites) writeBuildStats() error { 522 if h.ResourceSpec == nil { 523 panic("h.ResourceSpec is nil") 524 } 525 if !h.ResourceSpec.BuildConfig().BuildStats.Enabled() { 526 return nil 527 } 528 529 htmlElements := &publisher.HTMLElements{} 530 for _, s := range h.Sites { 531 stats := s.publisher.PublishStats() 532 htmlElements.Merge(stats.HTMLElements) 533 } 534 535 htmlElements.Sort() 536 537 stats := publisher.PublishStats{ 538 HTMLElements: *htmlElements, 539 } 540 541 var buf bytes.Buffer 542 enc := json.NewEncoder(&buf) 543 enc.SetEscapeHTML(false) 544 enc.SetIndent("", " ") 545 err := enc.Encode(stats) 546 if err != nil { 547 return err 548 } 549 js := buf.Bytes() 550 551 filename := filepath.Join(h.Configs.LoadingInfo.BaseConfig.WorkingDir, files.FilenameHugoStatsJSON) 552 553 if existingContent, err := afero.ReadFile(hugofs.Os, filename); err == nil { 554 // Check if the content has changed. 555 if bytes.Equal(existingContent, js) { 556 return nil 557 } 558 } 559 560 // Make sure it's always written to the OS fs. 561 if err := afero.WriteFile(hugofs.Os, filename, js, 0o666); err != nil { 562 return err 563 } 564 565 // Write to the destination as well if it's a in-memory fs. 566 if !hugofs.IsOsFs(h.Fs.Source) { 567 if err := afero.WriteFile(h.Fs.WorkingDirWritable, filename, js, 0o666); err != nil { 568 return err 569 } 570 } 571 572 return nil 573 } 574 575 type pathChange struct { 576 // The path to the changed file. 577 p *paths.Path 578 579 // If true, this is a delete operation (a delete or a rename). 580 delete bool 581 582 // If true, this is a directory. 583 isDir bool 584 } 585 586 // processPartial prepares the Sites' sources for a partial rebuild. 587 func (h *HugoSites) processPartial(ctx context.Context, l logg.LevelLogger, config *BuildCfg, init func(config *BuildCfg) error, events []fsnotify.Event) error { 588 h.Log.Trace(logg.StringFunc(func() string { 589 var sb strings.Builder 590 sb.WriteString("File events:\n") 591 for _, ev := range events { 592 sb.WriteString(ev.String()) 593 sb.WriteString("\n") 594 } 595 return sb.String() 596 })) 597 598 events = h.fileEventsFilter(events) 599 events = h.fileEventsTranslate(events) 600 601 logger := h.Log 602 603 var ( 604 tmplAdded bool 605 tmplChanged bool 606 i18nChanged bool 607 contentChanged bool 608 ) 609 610 changedPaths := struct { 611 changedFiles []*paths.Path 612 changedDirs []*paths.Path 613 deleted []*paths.Path 614 }{} 615 616 removeDuplicatePaths := func(ps []*paths.Path) []*paths.Path { 617 seen := make(map[string]bool) 618 var filtered []*paths.Path 619 for _, p := range ps { 620 if !seen[p.Path()] { 621 seen[p.Path()] = true 622 filtered = append(filtered, p) 623 } 624 } 625 return filtered 626 } 627 628 var ( 629 cacheBusters []func(string) bool 630 deletedDirs []string 631 addedContentPaths []*paths.Path 632 ) 633 634 for _, ev := range events { 635 removed := false 636 added := false 637 638 if ev.Op&fsnotify.Remove == fsnotify.Remove { 639 removed = true 640 } 641 642 fi, statErr := h.Fs.Source.Stat(ev.Name) 643 644 // Some editors (Vim) sometimes issue only a Rename operation when writing an existing file 645 // Sometimes a rename operation means that file has been renamed other times it means 646 // it's been updated. 647 if ev.Op.Has(fsnotify.Rename) { 648 // If the file is still on disk, it's only been updated, if it's not, it's been moved 649 if statErr != nil { 650 removed = true 651 } 652 } 653 if ev.Op.Has(fsnotify.Create) { 654 added = true 655 } 656 657 isChangedDir := statErr == nil && fi.IsDir() 658 659 cpss := h.BaseFs.ResolvePaths(ev.Name) 660 pss := make([]*paths.Path, len(cpss)) 661 for i, cps := range cpss { 662 p := cps.Path 663 if removed && !paths.HasExt(p) { 664 // Assume this is a renamed/removed directory. 665 // For deletes, we walk up the tree to find the container (e.g. branch bundle), 666 // so we will catch this even if it is a file without extension. 667 // This avoids us walking up to the home page bundle for the common case 668 // of renaming root sections. 669 p = p + "/_index.md" 670 deletedDirs = append(deletedDirs, cps.Path) 671 } 672 673 pss[i] = h.Configs.ContentPathParser.Parse(cps.Component, p) 674 if added && !isChangedDir && cps.Component == files.ComponentFolderContent { 675 addedContentPaths = append(addedContentPaths, pss[i]) 676 } 677 678 // Compile cache buster. 679 np := glob.NormalizePath(path.Join(cps.Component, cps.Path)) 680 g, err := h.ResourceSpec.BuildConfig().MatchCacheBuster(h.Log, np) 681 if err == nil && g != nil { 682 cacheBusters = append(cacheBusters, g) 683 } 684 } 685 686 if removed { 687 changedPaths.deleted = append(changedPaths.deleted, pss...) 688 } else if isChangedDir { 689 changedPaths.changedDirs = append(changedPaths.changedDirs, pss...) 690 } else { 691 changedPaths.changedFiles = append(changedPaths.changedFiles, pss...) 692 } 693 } 694 695 var ( 696 addedOrChangedContent []pathChange 697 changes []identity.Identity 698 ) 699 700 // Find the most specific identity possible. 701 handleChange := func(pathInfo *paths.Path, delete, isDir bool) { 702 switch pathInfo.Component() { 703 case files.ComponentFolderContent: 704 logger.Println("Source changed", pathInfo.Path()) 705 if ids := h.pageTrees.collectAndMarkStaleIdentities(pathInfo); len(ids) > 0 { 706 changes = append(changes, ids...) 707 } 708 709 contentChanged = true 710 711 if config.RecentlyVisited != nil { 712 // Fast render mode. Adding them to the visited queue 713 // avoids rerendering them on navigation. 714 for _, id := range changes { 715 if p, ok := id.(page.Page); ok { 716 config.RecentlyVisited.Add(p.RelPermalink()) 717 } 718 } 719 } 720 721 h.pageTrees.treeTaxonomyEntries.DeletePrefix("") 722 723 if delete { 724 _, ok := h.pageTrees.treePages.LongestPrefixAll(pathInfo.Base()) 725 if ok { 726 h.pageTrees.treePages.DeleteAll(pathInfo.Base()) 727 h.pageTrees.resourceTrees.DeleteAll(pathInfo.Base()) 728 if pathInfo.IsBundle() { 729 // Assume directory removed. 730 h.pageTrees.treePages.DeletePrefixAll(pathInfo.Base() + "/") 731 h.pageTrees.resourceTrees.DeletePrefixAll(pathInfo.Base() + "/") 732 } 733 } else { 734 h.pageTrees.resourceTrees.DeleteAll(pathInfo.Base()) 735 } 736 } 737 738 addedOrChangedContent = append(addedOrChangedContent, pathChange{p: pathInfo, delete: delete, isDir: isDir}) 739 740 case files.ComponentFolderLayouts: 741 tmplChanged = true 742 templatePath := pathInfo.TrimLeadingSlash().PathNoLang() 743 if !h.Tmpl().HasTemplate(templatePath) { 744 tmplAdded = true 745 } 746 747 if tmplAdded { 748 logger.Println("Template added", pathInfo.Path()) 749 // A new template may require a more coarse grained build. 750 base := pathInfo.Base() 751 if strings.Contains(base, "_markup") { 752 // It's hard to determine the exact change set of this, 753 // so be very coarse grained. 754 changes = append(changes, identity.GenghisKhan) 755 } 756 if strings.Contains(base, "shortcodes") { 757 changes = append(changes, identity.NewGlobIdentity(fmt.Sprintf("shortcodes/%s*", pathInfo.BaseNameNoIdentifier()))) 758 } else { 759 changes = append(changes, pathInfo) 760 } 761 } else { 762 logger.Println("Template changed", pathInfo.Path()) 763 if templ, found := h.Tmpl().GetIdentity(templatePath); found { 764 changes = append(changes, templ) 765 } else { 766 changes = append(changes, pathInfo) 767 } 768 } 769 case files.ComponentFolderAssets: 770 logger.Println("Asset changed", pathInfo.Path()) 771 772 var hasID bool 773 r, _ := h.ResourceSpec.ResourceCache.Get(context.Background(), dynacache.CleanKey(pathInfo.Base())) 774 identity.WalkIdentitiesShallow(r, func(level int, rid identity.Identity) bool { 775 hasID = true 776 changes = append(changes, rid) 777 return false 778 }) 779 if !hasID { 780 changes = append(changes, pathInfo) 781 } 782 case files.ComponentFolderData: 783 logger.Println("Data changed", pathInfo.Path()) 784 785 // This should cover all usage of site.Data. 786 // Currently very coarse grained. 787 changes = append(changes, siteidentities.Data) 788 h.init.data.Reset() 789 case files.ComponentFolderI18n: 790 logger.Println("i18n changed", pathInfo.Path()) 791 i18nChanged = true 792 // It's hard to determine the exact change set of this, 793 // so be very coarse grained for now. 794 changes = append(changes, identity.GenghisKhan) 795 default: 796 panic(fmt.Sprintf("unknown component: %q", pathInfo.Component())) 797 } 798 } 799 800 changedPaths.deleted = removeDuplicatePaths(changedPaths.deleted) 801 changedPaths.changedFiles = removeDuplicatePaths(changedPaths.changedFiles) 802 803 h.Log.Trace(logg.StringFunc(func() string { 804 var sb strings.Builder 805 sb.WriteString("Resolved paths:\n") 806 sb.WriteString("Deleted:\n") 807 for _, p := range changedPaths.deleted { 808 sb.WriteString("path: " + p.Path()) 809 sb.WriteString("\n") 810 } 811 sb.WriteString("Changed:\n") 812 for _, p := range changedPaths.changedFiles { 813 sb.WriteString("path: " + p.Path()) 814 sb.WriteString("\n") 815 } 816 return sb.String() 817 })) 818 819 for _, deletedDir := range deletedDirs { 820 prefix := deletedDir + "/" 821 predicate := func(id identity.Identity) bool { 822 // This will effectively reset all pages below this dir. 823 return strings.HasPrefix(paths.AddLeadingSlash(id.IdentifierBase()), prefix) 824 } 825 // Test in both directions. 826 changes = append(changes, identity.NewPredicateIdentity( 827 // Is dependent. 828 predicate, 829 // Is dependency. 830 predicate, 831 ), 832 ) 833 } 834 835 if len(addedContentPaths) > 0 { 836 // These content files are new and not in use anywhere. 837 // To make sure that these gets listed in any site.RegularPages ranges or similar 838 // we could invalidate everything, but first try to collect a sample set 839 // from the surrounding pages. 840 var surroundingIDs []identity.Identity 841 for _, p := range addedContentPaths { 842 if ids := h.pageTrees.collectIdentitiesSurrounding(p.Base(), 10); len(ids) > 0 { 843 surroundingIDs = append(surroundingIDs, ids...) 844 } 845 } 846 847 if len(surroundingIDs) > 0 { 848 changes = append(changes, surroundingIDs...) 849 } else { 850 // No surrounding pages found, so invalidate everything. 851 changes = append(changes, identity.GenghisKhan) 852 } 853 } 854 855 for _, deleted := range changedPaths.deleted { 856 handleChange(deleted, true, false) 857 } 858 859 for _, id := range changedPaths.changedFiles { 860 handleChange(id, false, false) 861 } 862 863 for _, id := range changedPaths.changedDirs { 864 handleChange(id, false, true) 865 } 866 867 resourceFiles := h.fileEventsContentPaths(addedOrChangedContent) 868 869 changed := &whatChanged{ 870 contentChanged: contentChanged, 871 identitySet: make(identity.Identities), 872 } 873 changed.Add(changes...) 874 875 config.whatChanged = changed 876 877 if err := init(config); err != nil { 878 return err 879 } 880 881 var cacheBusterOr func(string) bool 882 if len(cacheBusters) > 0 { 883 cacheBusterOr = func(s string) bool { 884 for _, cb := range cacheBusters { 885 if cb(s) { 886 return true 887 } 888 } 889 return false 890 } 891 } 892 893 // Removes duplicates. 894 changes = changed.identitySet.AsSlice() 895 896 if err := h.resolveAndClearStateForIdentities(ctx, l, cacheBusterOr, changes); err != nil { 897 return err 898 } 899 900 if tmplChanged || i18nChanged { 901 // TODO(bep) we should split this, but currently the loading of i18n and layout files are tied together. See #12048. 902 h.init.layouts.Reset() 903 904 if err := loggers.TimeTrackfn(func() (logg.LevelLogger, error) { 905 // TODO(bep) this could probably be optimized to somehow 906 // only load the changed templates and its dependencies, but that is non-trivial. 907 ll := l.WithField("substep", "rebuild templates") 908 var prototype *deps.Deps 909 for i, s := range h.Sites { 910 if err := s.Deps.Compile(prototype); err != nil { 911 return ll, err 912 } 913 if i == 0 { 914 prototype = s.Deps 915 } 916 } 917 return ll, nil 918 }); err != nil { 919 return err 920 } 921 } 922 923 if resourceFiles != nil { 924 if err := h.processFiles(ctx, l, *config, resourceFiles...); err != nil { 925 return err 926 } 927 } 928 929 return nil 930 } 931 932 func (h *HugoSites) processFull(ctx context.Context, l logg.LevelLogger, config BuildCfg) (err error) { 933 if err = h.processFiles(ctx, l, config); err != nil { 934 err = fmt.Errorf("readAndProcessContent: %w", err) 935 return 936 } 937 return err 938 } 939 940 func (s *HugoSites) processFiles(ctx context.Context, l logg.LevelLogger, buildConfig BuildCfg, filenames ...pathChange) error { 941 if s.Deps == nil { 942 panic("nil deps on site") 943 } 944 945 sourceSpec := source.NewSourceSpec(s.PathSpec, buildConfig.ContentInclusionFilter, s.BaseFs.Content.Fs) 946 947 // For inserts, we can pick an arbitrary pageMap. 948 pageMap := s.Sites[0].pageMap 949 950 c := newPagesCollector(ctx, s.h, sourceSpec, s.Log, l, pageMap, filenames) 951 952 if err := c.Collect(); err != nil { 953 return err 954 } 955 956 return nil 957 }