summaryrefslogtreecommitdiffstats
path: root/hugolib/content_map_page.go
diff options
context:
space:
mode:
authorBjørn Erik Pedersen <bjorn.erik.pedersen@gmail.com>2023-12-24 19:11:05 +0100
committerBjørn Erik Pedersen <bjorn.erik.pedersen@gmail.com>2024-01-27 16:28:14 +0100
commit7285e74090852b5d52f25e577850fa75f4aa8573 (patch)
tree54d07cb4a7de2db5c89f2590266595f0aca6cbd6 /hugolib/content_map_page.go
parent5fd1e7490305570872d3899f5edda950903c5213 (diff)
all: Rework page store, add a dynacache, improve partial rebuilds, and some general spring cleaningdevelop2024
There are some breaking changes in this commit, see #11455. Closes #11455 Closes #11549 This fixes a set of bugs (see issue list) and it is also paying some technical debt accumulated over the years. We now build with Staticcheck enabled in the CI build. The performance should be about the same as before for regular sized Hugo sites, but it should perform and scale much better to larger data sets, as objects that uses lots of memory (e.g. rendered Markdown, big JSON files read into maps with transform.Unmarshal etc.) will now get automatically garbage collected if needed. Performance on partial rebuilds when running the server in fast render mode should be the same, but the change detection should be much more accurate. A list of the notable new features: * A new dependency tracker that covers (almost) all of Hugo's API and is used to do fine grained partial rebuilds when running the server. * A new and simpler tree document store which allows fast lookups and prefix-walking in all dimensions (e.g. language) concurrently. * You can now configure an upper memory limit allowing for much larger data sets and/or running on lower specced PCs. We have lifted the "no resources in sub folders" restriction for branch bundles (e.g. sections). Memory Limit * Hugos will, by default, set aside a quarter of the total system memory, but you can set this via the OS environment variable HUGO_MEMORYLIMIT (in gigabytes). This is backed by a partitioned LRU cache used throughout Hugo. A cache that gets dynamically resized in low memory situations, allowing Go's Garbage Collector to free the memory. New Dependency Tracker: Hugo has had a rule based coarse grained approach to server rebuilds that has worked mostly pretty well, but there have been some surprises (e.g. stale content). This is now revamped with a new dependency tracker that can quickly calculate the delta given a changed resource (e.g. a content file, template, JS file etc.). This handles transitive relations, e.g. $page -> js.Build -> JS import, or $page1.Content -> render hook -> site.GetPage -> $page2.Title, or $page1.Content -> shortcode -> partial -> site.RegularPages -> $page2.Content -> shortcode ..., and should also handle changes to aggregated values (e.g. site.Lastmod) effectively. This covers all of Hugo's API with 2 known exceptions (a list that may not be fully exhaustive): Changes to files loaded with template func os.ReadFile may not be handled correctly. We recommend loading resources with resources.Get Changes to Hugo objects (e.g. Page) passed in the template context to lang.Translate may not be detected correctly. We recommend having simple i18n templates without too much data context passed in other than simple types such as strings and numbers. Note that the cachebuster configuration (when A changes then rebuild B) works well with the above, but we recommend that you revise that configuration, as it in most situations should not be needed. One example where it is still needed is with TailwindCSS and using changes to hugo_stats.json to trigger new CSS rebuilds. Document Store: Previously, a little simplified, we split the document store (where we store pages and resources) in a tree per language. This worked pretty well, but the structure made some operations harder than they needed to be. We have now restructured it into one Radix tree for all languages. Internally the language is considered to be a dimension of that tree, and the tree can be viewed in all dimensions concurrently. This makes some operations re. language simpler (e.g. finding translations is just a slice range), but the idea is that it should also be relatively inexpensive to add more dimensions if needed (e.g. role). Fixes #10169 Fixes #10364 Fixes #10482 Fixes #10630 Fixes #10656 Fixes #10694 Fixes #10918 Fixes #11262 Fixes #11439 Fixes #11453 Fixes #11457 Fixes #11466 Fixes #11540 Fixes #11551 Fixes #11556 Fixes #11654 Fixes #11661 Fixes #11663 Fixes #11664 Fixes #11669 Fixes #11671 Fixes #11807 Fixes #11808 Fixes #11809 Fixes #11815 Fixes #11840 Fixes #11853 Fixes #11860 Fixes #11883 Fixes #11904 Fixes #7388 Fixes #7425 Fixes #7436 Fixes #7544 Fixes #7882 Fixes #7960 Fixes #8255 Fixes #8307 Fixes #8863 Fixes #8927 Fixes #9192 Fixes #9324
Diffstat (limited to 'hugolib/content_map_page.go')
-rw-r--r--hugolib/content_map_page.go2323
1 files changed, 1586 insertions, 737 deletions
diff --git a/hugolib/content_map_page.go b/hugolib/content_map_page.go
index 2c14ffa59..536f23ccd 100644
--- a/hugolib/content_map_page.go
+++ b/hugolib/content_map_page.go
@@ -1,4 +1,4 @@
-// Copyright 2019 The Hugo Authors. All rights reserved.
+// Copyright 2024 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -16,1025 +16,1874 @@ package hugolib
import (
"context"
"fmt"
+ "io"
"path"
- "path/filepath"
+ "sort"
+ "strconv"
"strings"
-
"sync"
-
- "github.com/gohugoio/hugo/common/maps"
-
+ "sync/atomic"
+ "time"
+
+ "github.com/bep/logg"
+ "github.com/gohugoio/hugo/cache/dynacache"
+ "github.com/gohugoio/hugo/common/loggers"
+ "github.com/gohugoio/hugo/common/paths"
+ "github.com/gohugoio/hugo/common/predicate"
+ "github.com/gohugoio/hugo/common/rungroup"
"github.com/gohugoio/hugo/common/types"
+ "github.com/gohugoio/hugo/hugofs/files"
+ "github.com/gohugoio/hugo/hugolib/doctree"
+ "github.com/gohugoio/hugo/identity"
+ "github.com/gohugoio/hugo/output"
"github.com/gohugoio/hugo/resources"
+ "github.com/spf13/cast"
+
+ "github.com/gohugoio/hugo/common/maps"
- "github.com/gohugoio/hugo/common/hugio"
- "github.com/gohugoio/hugo/hugofs"
- "github.com/gohugoio/hugo/hugofs/files"
- "github.com/gohugoio/hugo/parser/pageparser"
"github.com/gohugoio/hugo/resources/kinds"
"github.com/gohugoio/hugo/resources/page"
"github.com/gohugoio/hugo/resources/resource"
- "github.com/spf13/cast"
-
- "github.com/gohugoio/hugo/common/para"
)
-func newPageMaps(h *HugoSites) *pageMaps {
- mps := make([]*pageMap, len(h.Sites))
- for i, s := range h.Sites {
- mps[i] = s.pageMap
- }
- return &pageMaps{
- workers: para.New(h.numWorkers),
- pmaps: mps,
- }
+var pagePredicates = struct {
+ KindPage predicate.P[*pageState]
+ KindSection predicate.P[*pageState]
+ KindHome predicate.P[*pageState]
+ KindTerm predicate.P[*pageState]
+ ShouldListLocal predicate.P[*pageState]
+ ShouldListGlobal predicate.P[*pageState]
+ ShouldListAny predicate.P[*pageState]
+ ShouldLink predicate.P[page.Page]
+}{
+ KindPage: func(p *pageState) bool {
+ return p.Kind() == kinds.KindPage
+ },
+ KindSection: func(p *pageState) bool {
+ return p.Kind() == kinds.KindSection
+ },
+ KindHome: func(p *pageState) bool {
+ return p.Kind() == kinds.KindHome
+ },
+ KindTerm: func(p *pageState) bool {
+ return p.Kind() == kinds.KindTerm
+ },
+ ShouldListLocal: func(p *pageState) bool {
+ return p.m.shouldList(false)
+ },
+ ShouldListGlobal: func(p *pageState) bool {
+ return p.m.shouldList(true)
+ },
+ ShouldListAny: func(p *pageState) bool {
+ return p.m.shouldListAny()
+ },
+ ShouldLink: func(p page.Page) bool {
+ return !p.(*pageState).m.noLink()
+ },
}
type pageMap struct {
+ i int
s *Site
- *contentMap
-}
-func (m *pageMap) Len() int {
- l := 0
- for _, t := range m.contentMap.pageTrees {
- l += t.Len()
- }
- return l
-}
+ // Main storage for all pages.
+ *pageTrees
-func (m *pageMap) createMissingTaxonomyNodes() error {
- if m.cfg.taxonomyDisabled {
- return nil
- }
- m.taxonomyEntries.Walk(func(s string, v any) bool {
- n := v.(*contentNode)
- vi := n.viewInfo
- k := cleanSectionTreeKey(vi.name.plural + "/" + vi.termKey)
+ // Used for simple page lookups by name, e.g. "mypage.md" or "mypage".
+ pageReverseIndex *contentTreeReverseIndex
- if _, found := m.taxonomies.Get(k); !found {
- vic := &contentBundleViewInfo{
- name: vi.name,
- termKey: vi.termKey,
- termOrigin: vi.termOrigin,
- }
- m.taxonomies.Insert(k, &contentNode{viewInfo: vic})
- }
- return false
- })
+ cachePages *dynacache.Partition[string, page.Pages]
+ cacheResources *dynacache.Partition[string, resource.Resources]
+ cacheContentRendered *dynacache.Partition[string, *resources.StaleValue[contentSummary]]
+ cacheContentPlain *dynacache.Partition[string, *resources.StaleValue[contentPlainPlainWords]]
+ contentTableOfContents *dynacache.Partition[string, *resources.StaleValue[contentTableOfContents]]
+ cacheContentSource *dynacache.Partition[string, *resources.StaleValue[[]byte]]
- return nil
+ cfg contentMapConfig
}
-func (m *pageMap) newPageFromContentNode(n *contentNode, parentBucket *pagesMapBucket, owner *pageState) (*pageState, error) {
- if n.fi == nil {
- panic("FileInfo must (currently) be set")
- }
+// pageTrees holds pages and resources in a tree structure for all sites/languages.
+// Eeach site gets its own tree set via the Shape method.
+type pageTrees struct {
+ // This tree contains all Pages.
+ // This include regular pages, sections, taxonimies and so on.
+ // Note that all of these trees share the same key structure,
+ // so you can take a leaf Page key and do a prefix search
+ // with key + "/" to get all of its resources.
+ treePages *doctree.NodeShiftTree[contentNodeI]
- f, err := newFileInfo(m.s.SourceSpec, n.fi)
- if err != nil {
- return nil, err
- }
+ // This tree contains Resoures bundled in pages.
+ treeResources *doctree.NodeShiftTree[contentNodeI]
- meta := n.fi.Meta()
- content := func() (hugio.ReadSeekCloser, error) {
- return meta.Open()
- }
+ // All pages and resources.
+ treePagesResources doctree.WalkableTrees[contentNodeI]
- bundled := owner != nil
- s := m.s
+ // This tree contains all taxonomy entries, e.g "/tags/blue/page1"
+ treeTaxonomyEntries *doctree.TreeShiftTree[*weightedContentNode]
- sections := s.sectionsFromFile(f)
+ // A slice of the resource trees.
+ resourceTrees doctree.MutableTrees
+}
- kind := s.kindFromFileInfoOrSections(f, sections)
- if kind == kinds.KindTerm {
- s.PathSpec.MakePathsSanitized(sections)
+// collectIdentities collects all identities from in all trees matching the given key.
+// This will at most match in one tree, but may give identies from multiple dimensions (e.g. language).
+func (t *pageTrees) collectIdentities(key string) []identity.Identity {
+ var ids []identity.Identity
+ if n := t.treePages.Get(key); n != nil {
+ n.ForEeachIdentity(func(id identity.Identity) bool {
+ ids = append(ids, id)
+ return false
+ })
}
-
- metaProvider := &pageMeta{kind: kind, sections: sections, bundled: bundled, s: s, f: f}
-
- ps, err := newPageBase(metaProvider)
- if err != nil {
- return nil, err
+ if n := t.treeResources.Get(key); n != nil {
+ n.ForEeachIdentity(func(id identity.Identity) bool {
+ ids = append(ids, id)
+ return false
+ })
}
- if n.fi.Meta().IsRootFile {
- // Make sure that the bundle/section we start walking from is always
- // rendered.
- // This is only relevant in server fast render mode.
- ps.forceRender = true
- }
+ return ids
+}
- n.p = ps
- if ps.IsNode() {
- ps.bucket = newPageBucket(ps)
- }
+// collectIdentitiesSurrounding collects all identities surrounding the given key.
+func (t *pageTrees) collectIdentitiesSurrounding(key string, maxSamplesPerTree int) []identity.Identity {
+ // TODO1 test language coverage from this.
+ ids := t.collectIdentitiesSurroundingIn(key, maxSamplesPerTree, t.treePages)
+ ids = append(ids, t.collectIdentitiesSurroundingIn(key, maxSamplesPerTree, t.treeResources)...)
+ return ids
+}
- gi, err := s.h.gitInfoForPage(ps)
- if err != nil {
- return nil, fmt.Errorf("failed to load Git data: %w", err)
+func (t *pageTrees) collectIdentitiesSurroundingIn(key string, maxSamples int, tree *doctree.NodeShiftTree[contentNodeI]) []identity.Identity {
+ var ids []identity.Identity
+ section, ok := tree.LongestPrefixAll(path.Dir(key))
+ if ok {
+ count := 0
+ prefix := section + "/"
+ level := strings.Count(prefix, "/")
+ tree.WalkPrefixRaw(prefix, func(s string, n contentNodeI) bool {
+ if level != strings.Count(s, "/") {
+ return true
+ }
+ n.ForEeachIdentity(func(id identity.Identity) bool {
+ ids = append(ids, id)
+ return false
+ })
+ count++
+ return count > maxSamples
+ })
}
- ps.gitInfo = gi
- owners, err := s.h.codeownersForPage(ps)
- if err != nil {
- return nil, fmt.Errorf("failed to load CODEOWNERS: %w", err)
- }
- ps.codeowners = owners
+ return ids
+}
- r, err := content()
- if err != nil {
- return nil, err
+func (t *pageTrees) DeletePageAndResourcesBelow(ss ...string) {
+ commit1 := t.resourceTrees.Lock(true)
+ defer commit1()
+ commit2 := t.treePages.Lock(true)
+ defer commit2()
+ for _, s := range ss {
+ t.resourceTrees.DeletePrefix(paths.AddTrailingSlash(s))
+ t.treePages.Delete(s)
}
- defer r.Close()
+}
- parseResult, err := pageparser.Parse(
- r,
- pageparser.Config{},
- )
- if err != nil {
- return nil, err
- }
+// Shape shapes all trees in t to the given dimension.
+func (t pageTrees) Shape(d, v int) *pageTrees {
+ t.treePages = t.treePages.Shape(d, v)
+ t.treeResources = t.treeResources.Shape(d, v)
+ t.treeTaxonomyEntries = t.treeTaxonomyEntries.Shape(d, v)
- ps.pageContent = pageContent{
- source: rawPageContent{
- parsed: parseResult,
- posMainContent: -1,
- posSummaryEnd: -1,
- posBodyStart: -1,
- },
- }
+ return &t
+}
- if err := ps.mapContent(parentBucket, metaProvider); err != nil {
- return nil, ps.wrapError(err)
- }
+var (
+ _ resource.Identifier = pageMapQueryPagesInSection{}
+ _ resource.Identifier = pageMapQueryPagesBelowPath{}
+)
- if err := metaProvider.applyDefaultValues(n); err != nil {
- return nil, err
- }
+type pageMapQueryPagesInSection struct {
+ pageMapQueryPagesBelowPath
- ps.init.Add(func(context.Context) (any, error) {
- pp, err := newPagePaths(s, ps, metaProvider)
- if err != nil {
- return nil, err
- }
+ Recursive bool
+ IncludeSelf bool
+}
- outputFormatsForPage := ps.m.outputFormats()
+func (q pageMapQueryPagesInSection) Key() string {
+ return "gagesInSection" + "/" + q.pageMapQueryPagesBelowPath.Key() + "/" + strconv.FormatBool(q.Recursive) + "/" + strconv.FormatBool(q.IncludeSelf)
+}
- // Prepare output formats for all sites.
- // We do this even if this page does not get rendered on
- // its own. It may be referenced via .Site.GetPage and
- // it will then need an output format.
- ps.pageOutputs = make([]*pageOutput, len(ps.s.h.renderFormats))
- created := make(map[string]*pageOutput)
- shouldRenderPage := !ps.m.noRender()
+// This needs to be hashable.
+type pageMapQueryPagesBelowPath struct {
+ Path string
- for i, f := range ps.s.h.renderFormats {
- if po, found := created[f.Name]; found {
- ps.pageOutputs[i] = po
- continue
- }
+ // Additional identifier for this query.
+ // Used as part of the cache key.
+ KeyPart string
- render := shouldRenderPage
- if render {
- _, render = outputFormatsForPage.GetByName(f.Name)
- }
+ // Page inclusion filter.
+ // May be nil.
+ Include predicate.P[*pageState]
+}
- po := newPageOutput(ps, pp, f, render)
+func (q pageMapQueryPagesBelowPath) Key() string {
+ return q.Path + "/" + q.KeyPart
+}
- // Create a content provider for the first,
- // we may be able to reuse it.
- if i == 0 {
- contentProvider, err := newPageContentOutput(ps, po)
- if err != nil {
- return nil, err
+// Apply fn to all pages in m matching the given predicate.
+// fn may return true to stop the walk.
+func (m *pageMap) forEachPage(include predicate.P[*pageState], fn func(p *pageState) (bool, error)) error {
+ if include == nil {
+ include = func(p *pageState) bool {
+ return true
+ }
+ }
+ w := &doctree.NodeShiftTreeWalker[contentNodeI]{
+ Tree: m.treePages,
+ LockType: doctree.LockTypeRead,
+ Handle: func(key string, n contentNodeI, match doctree.DimensionFlag) (bool, error) {
+ if p, ok := n.(*pageState); ok && include(p) {
+ if terminate, err := fn(p); terminate || err != nil {
+ return terminate, err
}
- po.initContentProvider(contentProvider)
}
+ return false, nil
+ },
+ }
- ps.pageOutputs[i] = po
- created[f.Name] = po
+ return w.Walk(context.Background())
+}
+func (m *pageMap) forEeachPageIncludingBundledPages(include predicate.P[*pageState], fn func(p *pageState) (bool, error)) error {
+ if include == nil {
+ include = func(p *pageState) bool {
+ return true
}
+ }
- if err := ps.initCommonProviders(pp); err != nil {
- return nil, err
- }
+ if err := m.forEachPage(include, fn); err != nil {
+ return err
+ }
- return nil, nil
- })
+ w := &doctree.NodeShiftTreeWalker[contentNodeI]{
+ Tree: m.treeResources,
+ LockType: doctree.LockTypeRead,
+ Handle: func(key string, n contentNodeI, match doctree.DimensionFlag) (bool, error) {
+ if rs, ok := n.(*resourceSource); ok {
+ if p, ok := rs.r.(*pageState); ok && include(p) {
+ if terminate, err := fn(p); terminate || err != nil {
+ return terminate, err
+ }
+ }
+ }
+ return false, nil
+ },
+ }
- ps.parent = owner
+ return w.Walk(context.Background())
+}
- return ps, nil
+func (m *pageMap) getOrCreatePagesFromCache(
+ key string, create func(string) (page.Pages, error),
+) (page.Pages, error) {
+ return m.cachePages.GetOrCreate(key, create)
}
-func (m *pageMap) newResource(fim hugofs.FileMetaInfo, owner *pageState) (resource.Resource, error) {
- if owner == nil {
- panic("owner is nil")
- }
- // TODO(bep) consolidate with multihost logic + clean up
- outputFormats := owner.m.outputFormats()
- seen := make(map[string]bool)
- var targetBasePaths []string
- // Make sure bundled resources are published to all of the output formats'
- // sub paths.
- for _, f := range outputFormats {
- p := f.Path
- if seen[p] {
- continue
- }
- seen[p] = true
- targetBasePaths = append(targetBasePaths, p)
+func (m *pageMap) getPagesInSection(q pageMapQueryPagesInSection) page.Pages {
+ cacheKey := q.Key()
- }
+ pages, err := m.getOrCreatePagesFromCache(cacheKey, func(string) (page.Pages, error) {
+ prefix := paths.AddTrailingSlash(q.Path)
- meta := fim.Meta()
- r := func() (hugio.ReadSeekCloser, error) {
- return meta.Open()
- }
+ var (
+ pas page.Pages
+ otherBranch string
+ )
- target := strings.TrimPrefix(meta.Path, owner.File().Dir())
+ include := q.Include
+ if include == nil {
+ include = pagePredicates.ShouldListLocal
+ }
- return owner.s.ResourceSpec.New(
- resources.ResourceSourceDescriptor{
- TargetPaths: owner.getTargetPaths,
- OpenReadSeekCloser: r,
- FileInfo: fim,
- RelTargetFilename: target,
- TargetBasePaths: targetBasePaths,
- LazyPublish: !owner.m.buildConfig.PublishResources,
- })
-}
+ w := &doctree.NodeShiftTreeWalker[contentNodeI]{
+ Tree: m.treePages,
+ Prefix: prefix,
+ Handle: func(key string, n contentNodeI, match doctree.DimensionFlag) (bool, error) {
+ if q.Recursive {
+ if p, ok := n.(*pageState); ok && include(p) {
+ pas = append(pas, p)
+ }
+ return false, nil
+ }
-func (m *pageMap) createSiteTaxonomies() error {
- m.s.taxonomies = make(page.TaxonomyList)
- var walkErr error
- m.taxonomies.Walk(func(s string, v any) bool {
- n := v.(*contentNode)
- t := n.viewInfo
+ // We store both leafs and branches in the same tree, so for non-recursive walks,
+ // we need to walk until the end, but can skip
+ // any not belonging to child branches.
+ if otherBranch != "" && strings.HasPrefix(key, otherBranch) {
+ return false, nil
+ }
- viewName := t.name
+ if p, ok := n.(*pageState); ok && include(p) {
+ pas = append(pas, p)
+ }
- if t.termKey == "" {
- m.s.taxonomies[viewName.plural] = make(page.Taxonomy)
- } else {
- taxonomy := m.s.taxonomies[viewName.plural]
- if taxonomy == nil {
- walkErr = fmt.Errorf("missing taxonomy: %s", viewName.plural)
- return true
- }
- m.taxonomyEntries.WalkPrefix(s, func(ss string, v any) bool {
- b2 := v.(*contentNode)
- info := b2.viewInfo
- taxonomy[info.termKey] = append(taxonomy[info.termKey], page.NewWeightedPage(info.weight, info.ref.p, n.p))
+ if n.isContentNodeBranch() {
+ otherBranch = key + "/"
+ }
- return false
- })
+ return false, nil
+ },
}
- return false
- })
+ err := w.Walk(context.Background())
- for _, taxonomy := range m.s.taxonomies {
- for _, v := range taxonomy {
- v.Sort()
+ if err == nil {
+ if q.IncludeSelf {
+ if n := m.treePages.Get(q.Path); n != nil {
+ if p, ok := n.(*pageState); ok && include(p) {
+ pas = append(pas, p)
+ }
+ }
+ }
+ page.SortByDefault(pas)
}
+
+ return pas, err
+ })
+ if err != nil {
+ panic(err)
}
- return walkErr
+ return pages
}
-func (m *pageMap) createListAllPages() page.Pages {
- pages := make(page.Pages, 0)
+func (m *pageMap) getPagesWithTerm(q pageMapQueryPagesBelowPath) page.Pages {
+ key := q.Key()
- m.contentMap.pageTrees.Walk(func(s string, n *contentNode) bool {
- if n.p == nil {
- panic(fmt.Sprintf("BUG: page not set for %q", s))
+ v, err := m.cachePages.GetOrCreate(key, func(string) (page.Pages, error) {
+ var pas page.Pages
+ include := q.Include
+ if include == nil {
+ include = pagePredicates.ShouldListLocal
}
- if contentTreeNoListAlwaysFilter(s, n) {
- return false
+
+ err := m.treeTaxonomyEntries.WalkPrefix(
+ doctree.LockTypeNone,
+ paths.AddTrailingSlash(q.Path),
+ func(s string, n *weightedContentNode) (bool, error) {
+ p := n.n.(*pageState)
+ if !include(p) {
+ return false, nil
+ }
+ pas = append(pas, pageWithWeight0{n.weight, p})
+ return false, nil
+ },
+ )
+ if err != nil {
+ return nil, err
}
- pages = append(pages, n.p)
- return false
+
+ page.SortByDefault(pas)
+
+ return pas, nil
})
+ if err != nil {
+ panic(err)
+ }
- page.SortByDefault(pages)
- return pages
+ return v
}
-func (m *pageMap) assemblePages() error {
- m.taxonomyEntries.DeletePrefix("/")
+func (m *pageMap) getTermsForPageInTaxonomy(path, taxonomy string) page.Pages {
+ prefix := paths.AddLeadingSlash(taxonomy)
- if err := m.assembleSections(); err != nil {
- return err
- }
+ v, err := m.cachePages.GetOrCreate(prefix+path, func(string) (page.Pages, error) {
+ var pas page.Pages
- var err error
+ err := m.treeTaxonomyEntries.WalkPrefix(
+ doctree.LockTypeNone,
+ paths.AddTrailingSlash(prefix),
+ func(s string, n *weightedContentNode) (bool, error) {
+ if strings.HasSuffix(s, path) {
+ pas = append(pas, n.term)
+ }
+ return false, nil
+ },
+ )
+ if err != nil {
+ return nil, err
+ }
+ page.SortByDefault(pas)
+
+ return pas, nil
+ })
if err != nil {
- return err
+ panic(err)
}
- m.pages.Walk(func(s string, v any) bool {
- n := v.(*contentNode)
+ return v
+}
- var shouldBuild bool
+func (m *pageMap) forEachResourceInPage(
+ ps *pageState,
+ lockType doctree.LockType,
+ exact bool,
+ handle func(resourceKey string, n contentNodeI, match doctree.DimensionFlag) (bool, error),
+) error {
+ keyPage := ps.Path()
+ if keyPage == "/" {
+ keyPage = ""
+ }
+ prefix := paths.AddTrailingSlash(ps.Path())
+ isBranch := ps.IsNode()
+
+ rw := &doctree.NodeShiftTreeWalker[contentNodeI]{
+ Tree: m.treeResources,
+ Prefix: prefix,
+ LockType: lockType,
+ Exact: exact,
+ }
- defer func() {
- // Make sure we always rebuild the view cache.
- if shouldBuild && err == nil && n.p != nil {
- m.attachPageToViews(s, n)
+ rw.Handle = func(resourceKey string, n contentNodeI, match doctree.DimensionFlag) (bool, error) {
+ if isBranch {
+ ownerKey, _ := m.treePages.LongestPrefixAll(resourceKey)
+ if ownerKey != keyPage {
+ // Stop walking downwards, someone else owns this resource.
+ rw.SkipPrefix(ownerKey + "/")
+ return false, nil
}
- }()
-
- if n.p != nil {
- // A rebuild
- shouldBuild = true
- return false
}
+ return handle(resourceKey, n, match)
+ }
- var parent *contentNode
- var parentBucket *pagesMapBucket
+ return rw.Walk(context.Background())
+}
- _, parent = m.getSection(s)
- if parent == nil {
- panic(fmt.Sprintf("BUG: parent not set for %q", s))
+func (m *pageMap) getResourcesForPage(ps *pageState) (resource.Resources, error) {
+ var res resource.Resources
+ m.forEachResourceInPage(ps, doctree.LockTypeNone, false, func(resourceKey string, n contentNodeI, match doctree.DimensionFlag) (bool, error) {
+ rs := n.(*resourceSource)
+ if rs.r != nil {
+ res = append(res, rs.r)
}
- parentBucket = parent.p.bucket
+ return false, nil
+ })
+ return res, nil
+}
- n.p, err = m.newPageFromContentNode(n, parentBucket, nil)
+func (m *pageMap) getOrCreateResourcesForPage(ps *pageState) resource.Resources {
+ keyPage := ps.Path()
+ if keyPage == "/" {
+ keyPage = ""
+ }
+ key := keyPage + "/get-resources-for-page"
+ v, err := m.cacheResources.GetOrCreate(key, func(string) (resource.Resources, error) {
+ res, err := m.getResourcesForPage(ps)
if err != nil {
- return true
- }
-
- shouldBuild = !(n.p.Kind() == kinds.KindPage && m.cfg.pageDisabled) && m.s.shouldBuild(n.p)
- if !shouldBuild {
- m.deletePage(s)
- return false
+ return nil, err
}
- n.p.treeRef = &contentTreeRef{
- m: m,
- t: m.pages,
- n: n,
- key: s,
+ if translationKey := ps.m.translationKey; translationKey != "" {
+ // This this should not be a very common case.
+ // Merge in resources from the other languages.
+ translatedPages, _ := m.s.h.translationKeyPages.Get(translationKey)
+ for _, tp := range translatedPages {
+ if tp == ps {
+ continue
+ }
+ tps := tp.(*pageState)
+ // Make sure we query from the correct language root.
+ res2, err := tps.s.pageMap.getResourcesForPage(tps)
+ if err != nil {
+ return nil, err
+ }
+ // Add if Name not already in res.
+ for _, r := range res2 {
+ var found bool
+ for _, r2 := range res {
+ if r2.Name() == r.Name() {
+ found = true
+ break
+ }
+ }
+ if !found {
+ res = append(res, r)
+ }
+ }
+ }
}
- if err = m.assembleResources(s, n.p, parentBucket); err != nil {
- return true
- }
+ lessFunc := func(i, j int) bool {
+ ri, rj := res[i], res[j]
+ if ri.ResourceType() < rj.ResourceType() {
+ return true
+ }
- return false
- })
+ p1, ok1 := ri.(page.Page)
+ p2, ok2 := rj.(page.Page)
- m.deleteOrphanSections()
+ if ok1 != ok2 {
+ // Pull pages behind other resources.
- return err
-}
+ return ok2
+ }
-func (m *pageMap) assembleResources(s string, p *pageState, parentBucket *pagesMapBucket) error {
- var err error
-
- m.resources.WalkPrefix(s, func(s string, v any) bool {
- n := v.(*contentNode)
- meta := n.fi.Meta()
- classifier := meta.Classifier
- var r resource.Resource
- switch classifier {
- case files.ContentClassContent:
- var rp *pageState
- rp, err = m.newPageFromContentNode(n, parentBucket, p)
- if err != nil {
- return true
+ if ok1 {
+ return page.DefaultPageSort(p1, p2)
}
- rp.m.resourcePath = filepath.ToSlash(strings.TrimPrefix(rp.File().Path(), p.File().Dir()))
- r = rp
- case files.ContentClassFile:
- r, err = m.newResource(n.fi, p)
- if err != nil {
- return true
+ // Make sure not to use RelPermalink or any of the other methods that
+ // trigger lazy publishing.
+ return ri.Name() < rj.Name()
+ }
+
+ sort.SliceStable(res, lessFunc)
+
+ if len(ps.m.resourcesMetadata) > 0 {
+ for i, r := range res {
+ res[i] = resources.CloneWithMetadataIfNeeded(ps.m.resourcesMetadata, r)
}
- default:
- panic(fmt.Sprintf("invalid classifier: %q", classifier))
+ sort.SliceStable(res, lessFunc)
}
- p.resources = append(p.resources, r)
- return false
+ return res, nil
})
+ if err != nil {
+ panic(err)
+ }
- return err
+ return v
}
-func (m *pageMap) assembleSections() error {
- var sectionsToDelete []string
- var err error
+type weightedContentNode struct {
+ n contentNodeI
+ weight int
+ term *pageWithOrdinal
+}
- m.sections.Walk(func(s string, v any) bool {
- n := v.(*contentNode)
- var shouldBuild bool
+type buildStateReseter interface {
+ resetBuildState()
+}
- defer func() {
- // Make sure we always rebuild the view cache.
- if shouldBuild && err == nil && n.p != nil {
- m.attachPageToViews(s, n)
- if n.p.IsHome() {
- m.s.home = n.p
- }
- }
- }()
+type contentNodeI interface {
+ identity.IdentityProvider
+ identity.ForEeachIdentityProvider
+ Path() string
+ isContentNodeBranch() bool
+ buildStateReseter
+ resource.StaleMarker
+}
- sections := m.splitKey(s)
+var _ contentNodeI = (*contentNodeIs)(nil)
- if n.p != nil {
- if n.p.IsHome() {
- m.s.home = n.p
- }
- shouldBuild = true
- return false
- }
+type contentNodeIs []contentNodeI
- var parent *contentNode
- var parentBucket *pagesMapBucket
+func (n contentNodeIs) Path() string {