summaryrefslogtreecommitdiffstats
path: root/hugolib/content_map_page.go
diff options
context:
space:
mode:
Diffstat (limited to 'hugolib/content_map_page.go')
-rw-r--r--hugolib/content_map_page.go2323
1 files changed, 1586 insertions, 737 deletions
diff --git a/hugolib/content_map_page.go b/hugolib/content_map_page.go
index 2c14ffa59..536f23ccd 100644
--- a/hugolib/content_map_page.go
+++ b/hugolib/content_map_page.go
@@ -1,4 +1,4 @@
-// Copyright 2019 The Hugo Authors. All rights reserved.
+// Copyright 2024 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -16,1025 +16,1874 @@ package hugolib
import (
"context"
"fmt"
+ "io"
"path"
- "path/filepath"
+ "sort"
+ "strconv"
"strings"
-
"sync"
-
- "github.com/gohugoio/hugo/common/maps"
-
+ "sync/atomic"
+ "time"
+
+ "github.com/bep/logg"
+ "github.com/gohugoio/hugo/cache/dynacache"
+ "github.com/gohugoio/hugo/common/loggers"
+ "github.com/gohugoio/hugo/common/paths"
+ "github.com/gohugoio/hugo/common/predicate"
+ "github.com/gohugoio/hugo/common/rungroup"
"github.com/gohugoio/hugo/common/types"
+ "github.com/gohugoio/hugo/hugofs/files"
+ "github.com/gohugoio/hugo/hugolib/doctree"
+ "github.com/gohugoio/hugo/identity"
+ "github.com/gohugoio/hugo/output"
"github.com/gohugoio/hugo/resources"
+ "github.com/spf13/cast"
+
+ "github.com/gohugoio/hugo/common/maps"
- "github.com/gohugoio/hugo/common/hugio"
- "github.com/gohugoio/hugo/hugofs"
- "github.com/gohugoio/hugo/hugofs/files"
- "github.com/gohugoio/hugo/parser/pageparser"
"github.com/gohugoio/hugo/resources/kinds"
"github.com/gohugoio/hugo/resources/page"
"github.com/gohugoio/hugo/resources/resource"
- "github.com/spf13/cast"
-
- "github.com/gohugoio/hugo/common/para"
)
-func newPageMaps(h *HugoSites) *pageMaps {
- mps := make([]*pageMap, len(h.Sites))
- for i, s := range h.Sites {
- mps[i] = s.pageMap
- }
- return &pageMaps{
- workers: para.New(h.numWorkers),
- pmaps: mps,
- }
+var pagePredicates = struct {
+ KindPage predicate.P[*pageState]
+ KindSection predicate.P[*pageState]
+ KindHome predicate.P[*pageState]
+ KindTerm predicate.P[*pageState]
+ ShouldListLocal predicate.P[*pageState]
+ ShouldListGlobal predicate.P[*pageState]
+ ShouldListAny predicate.P[*pageState]
+ ShouldLink predicate.P[page.Page]
+}{
+ KindPage: func(p *pageState) bool {
+ return p.Kind() == kinds.KindPage
+ },
+ KindSection: func(p *pageState) bool {
+ return p.Kind() == kinds.KindSection
+ },
+ KindHome: func(p *pageState) bool {
+ return p.Kind() == kinds.KindHome
+ },
+ KindTerm: func(p *pageState) bool {
+ return p.Kind() == kinds.KindTerm
+ },
+ ShouldListLocal: func(p *pageState) bool {
+ return p.m.shouldList(false)
+ },
+ ShouldListGlobal: func(p *pageState) bool {
+ return p.m.shouldList(true)
+ },
+ ShouldListAny: func(p *pageState) bool {
+ return p.m.shouldListAny()
+ },
+ ShouldLink: func(p page.Page) bool {
+ return !p.(*pageState).m.noLink()
+ },
}
type pageMap struct {
+ i int
s *Site
- *contentMap
-}
-func (m *pageMap) Len() int {
- l := 0
- for _, t := range m.contentMap.pageTrees {
- l += t.Len()
- }
- return l
-}
+ // Main storage for all pages.
+ *pageTrees
-func (m *pageMap) createMissingTaxonomyNodes() error {
- if m.cfg.taxonomyDisabled {
- return nil
- }
- m.taxonomyEntries.Walk(func(s string, v any) bool {
- n := v.(*contentNode)
- vi := n.viewInfo
- k := cleanSectionTreeKey(vi.name.plural + "/" + vi.termKey)
+ // Used for simple page lookups by name, e.g. "mypage.md" or "mypage".
+ pageReverseIndex *contentTreeReverseIndex
- if _, found := m.taxonomies.Get(k); !found {
- vic := &contentBundleViewInfo{
- name: vi.name,
- termKey: vi.termKey,
- termOrigin: vi.termOrigin,
- }
- m.taxonomies.Insert(k, &contentNode{viewInfo: vic})
- }
- return false
- })
+ cachePages *dynacache.Partition[string, page.Pages]
+ cacheResources *dynacache.Partition[string, resource.Resources]
+ cacheContentRendered *dynacache.Partition[string, *resources.StaleValue[contentSummary]]
+ cacheContentPlain *dynacache.Partition[string, *resources.StaleValue[contentPlainPlainWords]]
+ contentTableOfContents *dynacache.Partition[string, *resources.StaleValue[contentTableOfContents]]
+ cacheContentSource *dynacache.Partition[string, *resources.StaleValue[[]byte]]
- return nil
+ cfg contentMapConfig
}
-func (m *pageMap) newPageFromContentNode(n *contentNode, parentBucket *pagesMapBucket, owner *pageState) (*pageState, error) {
- if n.fi == nil {
- panic("FileInfo must (currently) be set")
- }
+// pageTrees holds pages and resources in a tree structure for all sites/languages.
+// Eeach site gets its own tree set via the Shape method.
+type pageTrees struct {
+ // This tree contains all Pages.
+ // This include regular pages, sections, taxonimies and so on.
+ // Note that all of these trees share the same key structure,
+ // so you can take a leaf Page key and do a prefix search
+ // with key + "/" to get all of its resources.
+ treePages *doctree.NodeShiftTree[contentNodeI]
- f, err := newFileInfo(m.s.SourceSpec, n.fi)
- if err != nil {
- return nil, err
- }
+ // This tree contains Resoures bundled in pages.
+ treeResources *doctree.NodeShiftTree[contentNodeI]
- meta := n.fi.Meta()
- content := func() (hugio.ReadSeekCloser, error) {
- return meta.Open()
- }
+ // All pages and resources.
+ treePagesResources doctree.WalkableTrees[contentNodeI]
- bundled := owner != nil
- s := m.s
+ // This tree contains all taxonomy entries, e.g "/tags/blue/page1"
+ treeTaxonomyEntries *doctree.TreeShiftTree[*weightedContentNode]
- sections := s.sectionsFromFile(f)
+ // A slice of the resource trees.
+ resourceTrees doctree.MutableTrees
+}
- kind := s.kindFromFileInfoOrSections(f, sections)
- if kind == kinds.KindTerm {
- s.PathSpec.MakePathsSanitized(sections)
+// collectIdentities collects all identities from in all trees matching the given key.
+// This will at most match in one tree, but may give identies from multiple dimensions (e.g. language).
+func (t *pageTrees) collectIdentities(key string) []identity.Identity {
+ var ids []identity.Identity
+ if n := t.treePages.Get(key); n != nil {
+ n.ForEeachIdentity(func(id identity.Identity) bool {
+ ids = append(ids, id)
+ return false
+ })
}
-
- metaProvider := &pageMeta{kind: kind, sections: sections, bundled: bundled, s: s, f: f}
-
- ps, err := newPageBase(metaProvider)
- if err != nil {
- return nil, err
+ if n := t.treeResources.Get(key); n != nil {
+ n.ForEeachIdentity(func(id identity.Identity) bool {
+ ids = append(ids, id)
+ return false
+ })
}
- if n.fi.Meta().IsRootFile {
- // Make sure that the bundle/section we start walking from is always
- // rendered.
- // This is only relevant in server fast render mode.
- ps.forceRender = true
- }
+ return ids
+}
- n.p = ps
- if ps.IsNode() {
- ps.bucket = newPageBucket(ps)
- }
+// collectIdentitiesSurrounding collects all identities surrounding the given key.
+func (t *pageTrees) collectIdentitiesSurrounding(key string, maxSamplesPerTree int) []identity.Identity {
+ // TODO1 test language coverage from this.
+ ids := t.collectIdentitiesSurroundingIn(key, maxSamplesPerTree, t.treePages)
+ ids = append(ids, t.collectIdentitiesSurroundingIn(key, maxSamplesPerTree, t.treeResources)...)
+ return ids
+}
- gi, err := s.h.gitInfoForPage(ps)
- if err != nil {
- return nil, fmt.Errorf("failed to load Git data: %w", err)
+func (t *pageTrees) collectIdentitiesSurroundingIn(key string, maxSamples int, tree *doctree.NodeShiftTree[contentNodeI]) []identity.Identity {
+ var ids []identity.Identity
+ section, ok := tree.LongestPrefixAll(path.Dir(key))
+ if ok {
+ count := 0
+ prefix := section + "/"
+ level := strings.Count(prefix, "/")
+ tree.WalkPrefixRaw(prefix, func(s string, n contentNodeI) bool {
+ if level != strings.Count(s, "/") {
+ return true
+ }
+ n.ForEeachIdentity(func(id identity.Identity) bool {
+ ids = append(ids, id)
+ return false
+ })
+ count++
+ return count > maxSamples
+ })
}
- ps.gitInfo = gi
- owners, err := s.h.codeownersForPage(ps)
- if err != nil {
- return nil, fmt.Errorf("failed to load CODEOWNERS: %w", err)
- }
- ps.codeowners = owners
+ return ids
+}
- r, err := content()
- if err != nil {
- return nil, err
+func (t *pageTrees) DeletePageAndResourcesBelow(ss ...string) {
+ commit1 := t.resourceTrees.Lock(true)
+ defer commit1()
+ commit2 := t.treePages.Lock(true)
+ defer commit2()
+ for _, s := range ss {
+ t.resourceTrees.DeletePrefix(paths.AddTrailingSlash(s))
+ t.treePages.Delete(s)
}
- defer r.Close()
+}
- parseResult, err := pageparser.Parse(
- r,
- pageparser.Config{},
- )
- if err != nil {
- return nil, err
- }
+// Shape shapes all trees in t to the given dimension.
+func (t pageTrees) Shape(d, v int) *pageTrees {
+ t.treePages = t.treePages.Shape(d, v)
+ t.treeResources = t.treeResources.Shape(d, v)
+ t.treeTaxonomyEntries = t.treeTaxonomyEntries.Shape(d, v)
- ps.pageContent = pageContent{
- source: rawPageContent{
- parsed: parseResult,
- posMainContent: -1,
- posSummaryEnd: -1,
- posBodyStart: -1,
- },
- }
+ return &t
+}
- if err := ps.mapContent(parentBucket, metaProvider); err != nil {
- return nil, ps.wrapError(err)
- }
+var (
+ _ resource.Identifier = pageMapQueryPagesInSection{}
+ _ resource.Identifier = pageMapQueryPagesBelowPath{}
+)
- if err := metaProvider.applyDefaultValues(n); err != nil {
- return nil, err
- }
+type pageMapQueryPagesInSection struct {
+ pageMapQueryPagesBelowPath
- ps.init.Add(func(context.Context) (any, error) {
- pp, err := newPagePaths(s, ps, metaProvider)
- if err != nil {
- return nil, err
- }
+ Recursive bool
+ IncludeSelf bool
+}
- outputFormatsForPage := ps.m.outputFormats()
+func (q pageMapQueryPagesInSection) Key() string {
+ return "gagesInSection" + "/" + q.pageMapQueryPagesBelowPath.Key() + "/" + strconv.FormatBool(q.Recursive) + "/" + strconv.FormatBool(q.IncludeSelf)
+}
- // Prepare output formats for all sites.
- // We do this even if this page does not get rendered on
- // its own. It may be referenced via .Site.GetPage and
- // it will then need an output format.
- ps.pageOutputs = make([]*pageOutput, len(ps.s.h.renderFormats))
- created := make(map[string]*pageOutput)
- shouldRenderPage := !ps.m.noRender()
+// This needs to be hashable.
+type pageMapQueryPagesBelowPath struct {
+ Path string
- for i, f := range ps.s.h.renderFormats {
- if po, found := created[f.Name]; found {
- ps.pageOutputs[i] = po
- continue
- }
+ // Additional identifier for this query.
+ // Used as part of the cache key.
+ KeyPart string
- render := shouldRenderPage
- if render {
- _, render = outputFormatsForPage.GetByName(f.Name)
- }
+ // Page inclusion filter.
+ // May be nil.
+ Include predicate.P[*pageState]
+}
- po := newPageOutput(ps, pp, f, render)
+func (q pageMapQueryPagesBelowPath) Key() string {
+ return q.Path + "/" + q.KeyPart
+}
- // Create a content provider for the first,
- // we may be able to reuse it.
- if i == 0 {
- contentProvider, err := newPageContentOutput(ps, po)
- if err != nil {
- return nil, err
+// Apply fn to all pages in m matching the given predicate.
+// fn may return true to stop the walk.
+func (m *pageMap) forEachPage(include predicate.P[*pageState], fn func(p *pageState) (bool, error)) error {
+ if include == nil {
+ include = func(p *pageState) bool {
+ return true
+ }
+ }
+ w := &doctree.NodeShiftTreeWalker[contentNodeI]{
+ Tree: m.treePages,
+ LockType: doctree.LockTypeRead,
+ Handle: func(key string, n contentNodeI, match doctree.DimensionFlag) (bool, error) {
+ if p, ok := n.(*pageState); ok && include(p) {
+ if terminate, err := fn(p); terminate || err != nil {
+ return terminate, err
}
- po.initContentProvider(contentProvider)
}
+ return false, nil
+ },
+ }
- ps.pageOutputs[i] = po
- created[f.Name] = po
+ return w.Walk(context.Background())
+}
+func (m *pageMap) forEeachPageIncludingBundledPages(include predicate.P[*pageState], fn func(p *pageState) (bool, error)) error {
+ if include == nil {
+ include = func(p *pageState) bool {
+ return true
}
+ }
- if err := ps.initCommonProviders(pp); err != nil {
- return nil, err
- }
+ if err := m.forEachPage(include, fn); err != nil {
+ return err
+ }
- return nil, nil
- })
+ w := &doctree.NodeShiftTreeWalker[contentNodeI]{
+ Tree: m.treeResources,
+ LockType: doctree.LockTypeRead,
+ Handle: func(key string, n contentNodeI, match doctree.DimensionFlag) (bool, error) {
+ if rs, ok := n.(*resourceSource); ok {
+ if p, ok := rs.r.(*pageState); ok && include(p) {
+ if terminate, err := fn(p); terminate || err != nil {
+ return terminate, err
+ }
+ }
+ }
+ return false, nil
+ },
+ }
- ps.parent = owner
+ return w.Walk(context.Background())
+}
- return ps, nil
+func (m *pageMap) getOrCreatePagesFromCache(
+ key string, create func(string) (page.Pages, error),
+) (page.Pages, error) {
+ return m.cachePages.GetOrCreate(key, create)
}
-func (m *pageMap) newResource(fim hugofs.FileMetaInfo, owner *pageState) (resource.Resource, error) {
- if owner == nil {
- panic("owner is nil")
- }
- // TODO(bep) consolidate with multihost logic + clean up
- outputFormats := owner.m.outputFormats()
- seen := make(map[string]bool)
- var targetBasePaths []string
- // Make sure bundled resources are published to all of the output formats'
- // sub paths.
- for _, f := range outputFormats {
- p := f.Path
- if seen[p] {
- continue
- }
- seen[p] = true
- targetBasePaths = append(targetBasePaths, p)
+func (m *pageMap) getPagesInSection(q pageMapQueryPagesInSection) page.Pages {
+ cacheKey := q.Key()
- }
+ pages, err := m.getOrCreatePagesFromCache(cacheKey, func(string) (page.Pages, error) {
+ prefix := paths.AddTrailingSlash(q.Path)
- meta := fim.Meta()
- r := func() (hugio.ReadSeekCloser, error) {
- return meta.Open()
- }
+ var (
+ pas page.Pages
+ otherBranch string
+ )
- target := strings.TrimPrefix(meta.Path, owner.File().Dir())
+ include := q.Include
+ if include == nil {
+ include = pagePredicates.ShouldListLocal
+ }
- return owner.s.ResourceSpec.New(
- resources.ResourceSourceDescriptor{
- TargetPaths: owner.getTargetPaths,
- OpenReadSeekCloser: r,
- FileInfo: fim,
- RelTargetFilename: target,
- TargetBasePaths: targetBasePaths,
- LazyPublish: !owner.m.buildConfig.PublishResources,
- })
-}
+ w := &doctree.NodeShiftTreeWalker[contentNodeI]{
+ Tree: m.treePages,
+ Prefix: prefix,
+ Handle: func(key string, n contentNodeI, match doctree.DimensionFlag) (bool, error) {
+ if q.Recursive {
+ if p, ok := n.(*pageState); ok && include(p) {
+ pas = append(pas, p)
+ }
+ return false, nil
+ }
-func (m *pageMap) createSiteTaxonomies() error {
- m.s.taxonomies = make(page.TaxonomyList)
- var walkErr error
- m.taxonomies.Walk(func(s string, v any) bool {
- n := v.(*contentNode)
- t := n.viewInfo
+ // We store both leafs and branches in the same tree, so for non-recursive walks,
+ // we need to walk until the end, but can skip
+ // any not belonging to child branches.
+ if otherBranch != "" && strings.HasPrefix(key, otherBranch) {
+ return false, nil
+ }
- viewName := t.name
+ if p, ok := n.(*pageState); ok && include(p) {
+ pas = append(pas, p)
+ }
- if t.termKey == "" {
- m.s.taxonomies[viewName.plural] = make(page.Taxonomy)
- } else {
- taxonomy := m.s.taxonomies[viewName.plural]
- if taxonomy == nil {
- walkErr = fmt.Errorf("missing taxonomy: %s", viewName.plural)
- return true
- }
- m.taxonomyEntries.WalkPrefix(s, func(ss string, v any) bool {
- b2 := v.(*contentNode)
- info := b2.viewInfo
- taxonomy[info.termKey] = append(taxonomy[info.termKey], page.NewWeightedPage(info.weight, info.ref.p, n.p))
+ if n.isContentNodeBranch() {
+ otherBranch = key + "/"
+ }
- return false
- })
+ return false, nil
+ },
}
- return false
- })
+ err := w.Walk(context.Background())
- for _, taxonomy := range m.s.taxonomies {
- for _, v := range taxonomy {
- v.Sort()
+ if err == nil {
+ if q.IncludeSelf {
+ if n := m.treePages.Get(q.Path); n != nil {
+ if p, ok := n.(*pageState); ok && include(p) {
+ pas = append(pas, p)
+ }
+ }
+ }
+ page.SortByDefault(pas)
}
+
+ return pas, err
+ })
+ if err != nil {
+ panic(err)
}
- return walkErr
+ return pages
}
-func (m *pageMap) createListAllPages() page.Pages {
- pages := make(page.Pages, 0)
+func (m *pageMap) getPagesWithTerm(q pageMapQueryPagesBelowPath) page.Pages {
+ key := q.Key()
- m.contentMap.pageTrees.Walk(func(s string, n *contentNode) bool {
- if n.p == nil {
- panic(fmt.Sprintf("BUG: page not set for %q", s))
+ v, err := m.cachePages.GetOrCreate(key, func(string) (page.Pages, error) {
+ var pas page.Pages
+ include := q.Include
+ if include == nil {
+ include = pagePredicates.ShouldListLocal
}
- if contentTreeNoListAlwaysFilter(s, n) {
- return false
+
+ err := m.treeTaxonomyEntries.WalkPrefix(
+ doctree.LockTypeNone,
+ paths.AddTrailingSlash(q.Path),
+ func(s string, n *weightedContentNode) (bool, error) {
+ p := n.n.(*pageState)
+ if !include(p) {
+ return false, nil
+ }
+ pas = append(pas, pageWithWeight0{n.weight, p})
+ return false, nil
+ },
+ )
+ if err != nil {
+ return nil, err
}
- pages = append(pages, n.p)
- return false
+
+ page.SortByDefault(pas)
+
+ return pas, nil
})
+ if err != nil {
+ panic(err)
+ }
- page.SortByDefault(pages)
- return pages
+ return v
}
-func (m *pageMap) assemblePages() error {
- m.taxonomyEntries.DeletePrefix("/")
+func (m *pageMap) getTermsForPageInTaxonomy(path, taxonomy string) page.Pages {
+ prefix := paths.AddLeadingSlash(taxonomy)
- if err := m.assembleSections(); err != nil {
- return err
- }
+ v, err := m.cachePages.GetOrCreate(prefix+path, func(string) (page.Pages, error) {
+ var pas page.Pages
- var err error
+ err := m.treeTaxonomyEntries.WalkPrefix(
+ doctree.LockTypeNone,
+ paths.AddTrailingSlash(prefix),
+ func(s string, n *weightedContentNode) (bool, error) {
+ if strings.HasSuffix(s, path) {
+ pas = append(pas, n.term)
+ }
+ return false, nil
+ },
+ )
+ if err != nil {
+ return nil, err
+ }
+ page.SortByDefault(pas)
+
+ return pas, nil
+ })
if err != nil {
- return err
+ panic(err)
}
- m.pages.Walk(func(s string, v any) bool {
- n := v.(*contentNode)
+ return v
+}
- var shouldBuild bool
+func (m *pageMap) forEachResourceInPage(
+ ps *pageState,
+ lockType doctree.LockType,
+ exact bool,
+ handle func(resourceKey string, n contentNodeI, match doctree.DimensionFlag) (bool, error),
+) error {
+ keyPage := ps.Path()
+ if keyPage == "/" {
+ keyPage = ""
+ }
+ prefix := paths.AddTrailingSlash(ps.Path())
+ isBranch := ps.IsNode()
+
+ rw := &doctree.NodeShiftTreeWalker[contentNodeI]{
+ Tree: m.treeResources,
+ Prefix: prefix,
+ LockType: lockType,
+ Exact: exact,
+ }
- defer func() {
- // Make sure we always rebuild the view cache.
- if shouldBuild && err == nil && n.p != nil {
- m.attachPageToViews(s, n)
+ rw.Handle = func(resourceKey string, n contentNodeI, match doctree.DimensionFlag) (bool, error) {
+ if isBranch {
+ ownerKey, _ := m.treePages.LongestPrefixAll(resourceKey)
+ if ownerKey != keyPage {
+ // Stop walking downwards, someone else owns this resource.
+ rw.SkipPrefix(ownerKey + "/")
+ return false, nil
}
- }()
-
- if n.p != nil {
- // A rebuild
- shouldBuild = true
- return false
}
+ return handle(resourceKey, n, match)
+ }
- var parent *contentNode
- var parentBucket *pagesMapBucket
+ return rw.Walk(context.Background())
+}
- _, parent = m.getSection(s)
- if parent == nil {
- panic(fmt.Sprintf("BUG: parent not set for %q", s))
+func (m *pageMap) getResourcesForPage(ps *pageState) (resource.Resources, error) {
+ var res resource.Resources
+ m.forEachResourceInPage(ps, doctree.LockTypeNone, false, func(resourceKey string, n contentNodeI, match doctree.DimensionFlag) (bool, error) {
+ rs := n.(*resourceSource)
+ if rs.r != nil {
+ res = append(res, rs.r)
}
- parentBucket = parent.p.bucket
+ return false, nil
+ })
+ return res, nil
+}
- n.p, err = m.newPageFromContentNode(n, parentBucket, nil)
+func (m *pageMap) getOrCreateResourcesForPage(ps *pageState) resource.Resources {
+ keyPage := ps.Path()
+ if keyPage == "/" {
+ keyPage = ""
+ }
+ key := keyPage + "/get-resources-for-page"
+ v, err := m.cacheResources.GetOrCreate(key, func(string) (resource.Resources, error) {
+ res, err := m.getResourcesForPage(ps)
if err != nil {
- return true
- }
-
- shouldBuild = !(n.p.Kind() == kinds.KindPage && m.cfg.pageDisabled) && m.s.shouldBuild(n.p)
- if !shouldBuild {
- m.deletePage(s)
- return false
+ return nil, err
}
- n.p.treeRef = &contentTreeRef{
- m: m,
- t: m.pages,
- n: n,
- key: s,
+ if translationKey := ps.m.translationKey; translationKey != "" {
+ // This this should not be a very common case.
+ // Merge in resources from the other languages.
+ translatedPages, _ := m.s.h.translationKeyPages.Get(translationKey)
+ for _, tp := range translatedPages {
+ if tp == ps {
+ continue
+ }
+ tps := tp.(*pageState)
+ // Make sure we query from the correct language root.
+ res2, err := tps.s.pageMap.getResourcesForPage(tps)
+ if err != nil {
+ return nil, err
+ }
+ // Add if Name not already in res.
+ for _, r := range res2 {
+ var found bool
+ for _, r2 := range res {
+ if r2.Name() == r.Name() {
+ found = true
+ break
+ }
+ }
+ if !found {
+ res = append(res, r)
+ }
+ }
+ }
}
- if err = m.assembleResources(s, n.p, parentBucket); err != nil {
- return true
- }
+ lessFunc := func(i, j int) bool {
+ ri, rj := res[i], res[j]
+ if ri.ResourceType() < rj.ResourceType() {
+ return true
+ }
- return false
- })
+ p1, ok1 := ri.(page.Page)
+ p2, ok2 := rj.(page.Page)
- m.deleteOrphanSections()
+ if ok1 != ok2 {
+ // Pull pages behind other resources.
- return err
-}
+ return ok2
+ }
-func (m *pageMap) assembleResources(s string, p *pageState, parentBucket *pagesMapBucket) error {
- var err error
-
- m.resources.WalkPrefix(s, func(s string, v any) bool {
- n := v.(*contentNode)
- meta := n.fi.Meta()
- classifier := meta.Classifier
- var r resource.Resource
- switch classifier {
- case files.ContentClassContent:
- var rp *pageState
- rp, err = m.newPageFromContentNode(n, parentBucket, p)
- if err != nil {
- return true
+ if ok1 {
+ return page.DefaultPageSort(p1, p2)
}
- rp.m.resourcePath = filepath.ToSlash(strings.TrimPrefix(rp.File().Path(), p.File().Dir()))
- r = rp
- case files.ContentClassFile:
- r, err = m.newResource(n.fi, p)
- if err != nil {
- return true
+ // Make sure not to use RelPermalink or any of the other methods that
+ // trigger lazy publishing.
+ return ri.Name() < rj.Name()
+ }
+
+ sort.SliceStable(res, lessFunc)
+
+ if len(ps.m.resourcesMetadata) > 0 {
+ for i, r := range res {
+ res[i] = resources.CloneWithMetadataIfNeeded(ps.m.resourcesMetadata, r)
}
- default:
- panic(fmt.Sprintf("invalid classifier: %q", classifier))
+ sort.SliceStable(res, lessFunc)
}
- p.resources = append(p.resources, r)
- return false
+ return res, nil
})
+ if err != nil {
+ panic(err)
+ }
- return err
+ return v
}
-func (m *pageMap) assembleSections() error {
- var sectionsToDelete []string
- var err error
+type weightedContentNode struct {
+ n contentNodeI
+ weight int
+ term *pageWithOrdinal
+}
- m.sections.Walk(func(s string, v any) bool {
- n := v.(*contentNode)
- var shouldBuild bool
+type buildStateReseter interface {
+ resetBuildState()
+}
- defer func() {
- // Make sure we always rebuild the view cache.
- if shouldBuild && err == nil && n.p != nil {
- m.attachPageToViews(s, n)
- if n.p.IsHome() {
- m.s.home = n.p
- }
- }
- }()
+type contentNodeI interface {
+ identity.IdentityProvider
+ identity.ForEeachIdentityProvider
+ Path() string
+ isContentNodeBranch() bool
+ buildStateReseter
+ resource.StaleMarker
+}
- sections := m.splitKey(s)
+var _ contentNodeI = (*contentNodeIs)(nil)
- if n.p != nil {
- if n.p.IsHome() {
- m.s.home = n.p
- }
- shouldBuild = true
- return false
- }
+type contentNodeIs []contentNodeI
- var parent *contentNode
- var parentBucket *pagesMapBucket
+func (n contentNodeIs) Path() string {
+ return n[0].Path()
+}
- if s != "/" {
- _, parent = m.getSection(s)
- if parent == nil || parent.p == nil {
- panic(fmt.Sprintf("BUG: parent not set for %q", s))
- }
+func (n contentNodeIs) isContentNodeBranch() bool {
+ return n[0].isContentNodeBranch()
+}
+
+func (n contentNodeIs) GetIdentity() identity.Identity {
+ return n[0].GetIdentity()
+}
+
+func (n contentNodeIs) ForEeachIdentity(f func(identity.Identity) bool) {
+ for _, nn := range n {
+ if nn != nil {
+ nn.ForEeachIdentity(f)
}
+ }
+}
- if parent != nil {
- parentBucket = parent.p.bucket
- } else if s == "/" {
- parentBucket = m.s.siteBucket
+func (n contentNodeIs) resetBuildState() {
+ for _, nn := range n {
+ if nn != nil {
+ nn.resetBuildState()
}
+ }
+}
- kind := kinds.KindSection
- if s == "/" {
- kind = kinds.KindHome
+func (n contentNodeIs) MarkStale() {
+ for _, nn := range n {
+ if nn != nil {
+ nn.MarkStale()
}
+ }
+}
- if n.fi != nil {
- n.p, err = m.newPageFromContentNode(n, parentBucket, nil)
- if err != nil {
- return true
+type contentNodeShifter struct {
+ numLanguages int
+}
+
+func (s *contentNodeShifter) Delete(n contentNodeI, dimension doctree.Dimension) (bool, bool) {
+ lidx := dimension[0]
+ switch v := n.(type) {
+ case contentNodeIs:
+ resource.MarkStale(v[lidx])
+ wasDeleted := v[lidx] != nil
+ v[lidx] = nil
+ isEmpty := true
+ for _, vv := range v {
+ if vv != nil {
+ isEmpty = false
+ break
}
- } else {
- n.p = m.s.newPage(n, parentBucket, kind, "", sections...)
}
-
- shouldBuild = m.s.shouldBuild(n.p)
- if !shouldBuild {
- sectionsToDelete = append(sectionsToDelete, s)
- return false
+ return wasDeleted, isEmpty
+ case resourceSources:
+ resource.MarkStale(v[lidx])
+ wasDeleted := v[lidx] != nil
+ v[lidx] = nil
+ isEmpty := true
+ for _, vv := range v {
+ if vv != nil {
+ isEmpty = false
+ break
+ }
}
+ return wasDeleted, isEmpty
+ case *resourceSource:
+ resource.MarkStale(v)
+ return true, true
+ case *pageState:
+ resource.MarkStale(v)
+ return true, true
+ default:
+ panic(fmt.Sprintf("unknown type %T", n))
+ }
+}
- n.p.treeRef = &contentTreeRef{
- m: m,
- t: m.sections,
- n: n,
- key: s,
+func (s *contentNodeShifter) Shift(n contentNodeI, dimension doctree.Dimension, exact bool) (contentNodeI, bool, doctree.DimensionFlag) {
+ lidx := dimension[0]
+ // How accurate is the match.
+ accuracy := doctree.DimensionLanguage
+ switch v := n.(type) {
+ case contentNodeIs:
+ if len(v) == 0 {
+ panic("empty contentNodeIs")
}
-
- if err = m.assembleResources(s+cmLeafSeparator, n.p, parentBucket); err != nil {
- return true