diff options
author | Bjørn Erik Pedersen <bjorn.erik.pedersen@gmail.com> | 2019-09-10 11:26:34 +0200 |
---|---|---|
committer | Bjørn Erik Pedersen <bjorn.erik.pedersen@gmail.com> | 2020-02-18 09:49:42 +0100 |
commit | eada236f87d9669885da1ff647672bb3dc6b4954 (patch) | |
tree | a0303f067b2cbe06c55637013dbd7702a551c64f /hugolib/content_map_page.go | |
parent | e5329f13c02b87f0c30f8837759c810cd90ff8da (diff) |
Introduce a tree map for all content
This commit introduces a new data structure to store pages and their resources.
This data structure is backed by radix trees.
This simplies tree operations, makes all pages a bundle, and paves the way for #6310.
It also solves a set of annoying issues (see list below).
Not a motivation behind this, but this commit also makes Hugo in general a little bit faster and more memory effective (see benchmarks). Especially for partial rebuilds on content edits, but also when taxonomies is in use.
```
name old time/op new time/op delta
SiteNew/Bundle_with_image/Edit-16 1.32ms ± 8% 1.00ms ± 9% -24.42% (p=0.029 n=4+4)
SiteNew/Bundle_with_JSON_file/Edit-16 1.28ms ± 0% 0.94ms ± 0% -26.26% (p=0.029 n=4+4)
SiteNew/Tags_and_categories/Edit-16 33.9ms ± 2% 21.8ms ± 1% -35.67% (p=0.029 n=4+4)
SiteNew/Canonify_URLs/Edit-16 40.6ms ± 1% 37.7ms ± 3% -7.20% (p=0.029 n=4+4)
SiteNew/Deep_content_tree/Edit-16 56.7ms ± 0% 51.7ms ± 1% -8.82% (p=0.029 n=4+4)
SiteNew/Many_HTML_templates/Edit-16 19.9ms ± 2% 18.3ms ± 3% -7.64% (p=0.029 n=4+4)
SiteNew/Page_collections/Edit-16 37.9ms ± 4% 34.0ms ± 2% -10.28% (p=0.029 n=4+4)
SiteNew/Bundle_with_image-16 10.7ms ± 0% 10.6ms ± 0% -1.15% (p=0.029 n=4+4)
SiteNew/Bundle_with_JSON_file-16 10.8ms ± 0% 10.7ms ± 0% -1.05% (p=0.029 n=4+4)
SiteNew/Tags_and_categories-16 43.2ms ± 1% 39.6ms ± 1% -8.35% (p=0.029 n=4+4)
SiteNew/Canonify_URLs-16 47.6ms ± 1% 47.3ms ± 0% ~ (p=0.057 n=4+4)
SiteNew/Deep_content_tree-16 73.0ms ± 1% 74.2ms ± 1% ~ (p=0.114 n=4+4)
SiteNew/Many_HTML_templates-16 37.9ms ± 0% 38.1ms ± 1% ~ (p=0.114 n=4+4)
SiteNew/Page_collections-16 53.6ms ± 1% 54.7ms ± 1% +2.09% (p=0.029 n=4+4)
name old alloc/op new alloc/op delta
SiteNew/Bundle_with_image/Edit-16 486kB ± 0% 430kB ± 0% -11.47% (p=0.029 n=4+4)
SiteNew/Bundle_with_JSON_file/Edit-16 265kB ± 0% 209kB ± 0% -21.06% (p=0.029 n=4+4)
SiteNew/Tags_and_categories/Edit-16 13.6MB ± 0% 8.8MB ± 0% -34.93% (p=0.029 n=4+4)
SiteNew/Canonify_URLs/Edit-16 66.5MB ± 0% 63.9MB ± 0% -3.95% (p=0.029 n=4+4)
SiteNew/Deep_content_tree/Edit-16 28.8MB ± 0% 25.8MB ± 0% -10.55% (p=0.029 n=4+4)
SiteNew/Many_HTML_templates/Edit-16 6.16MB ± 0% 5.56MB ± 0% -9.86% (p=0.029 n=4+4)
SiteNew/Page_collections/Edit-16 16.9MB ± 0% 16.0MB ± 0% -5.19% (p=0.029 n=4+4)
SiteNew/Bundle_with_image-16 2.28MB ± 0% 2.29MB ± 0% +0.35% (p=0.029 n=4+4)
SiteNew/Bundle_with_JSON_file-16 2.07MB ± 0% 2.07MB ± 0% ~ (p=0.114 n=4+4)
SiteNew/Tags_and_categories-16 14.3MB ± 0% 13.2MB ± 0% -7.30% (p=0.029 n=4+4)
SiteNew/Canonify_URLs-16 69.1MB ± 0% 69.0MB ± 0% ~ (p=0.343 n=4+4)
SiteNew/Deep_content_tree-16 31.3MB ± 0% 31.8MB ± 0% +1.49% (p=0.029 n=4+4)
SiteNew/Many_HTML_templates-16 10.8MB ± 0% 10.9MB ± 0% +1.11% (p=0.029 n=4+4)
SiteNew/Page_collections-16 21.4MB ± 0% 21.6MB ± 0% +1.15% (p=0.029 n=4+4)
name old allocs/op new allocs/op delta
SiteNew/Bundle_with_image/Edit-16 4.74k ± 0% 3.86k ± 0% -18.57% (p=0.029 n=4+4)
SiteNew/Bundle_with_JSON_file/Edit-16 4.73k ± 0% 3.85k ± 0% -18.58% (p=0.029 n=4+4)
SiteNew/Tags_and_categories/Edit-16 301k ± 0% 198k ± 0% -34.14% (p=0.029 n=4+4)
SiteNew/Canonify_URLs/Edit-16 389k ± 0% 373k ± 0% -4.07% (p=0.029 n=4+4)
SiteNew/Deep_content_tree/Edit-16 338k ± 0% 262k ± 0% -22.63% (p=0.029 n=4+4)
SiteNew/Many_HTML_templates/Edit-16 102k ± 0% 88k ± 0% -13.81% (p=0.029 n=4+4)
SiteNew/Page_collections/Edit-16 176k ± 0% 152k ± 0% -13.32% (p=0.029 n=4+4)
SiteNew/Bundle_with_image-16 26.8k ± 0% 26.8k ± 0% +0.05% (p=0.029 n=4+4)
SiteNew/Bundle_with_JSON_file-16 26.8k ± 0% 26.8k ± 0% +0.05% (p=0.029 n=4+4)
SiteNew/Tags_and_categories-16 273k ± 0% 245k ± 0% -10.36% (p=0.029 n=4+4)
SiteNew/Canonify_URLs-16 396k ± 0% 398k ± 0% +0.39% (p=0.029 n=4+4)
SiteNew/Deep_content_tree-16 317k ± 0% 325k ± 0% +2.53% (p=0.029 n=4+4)
SiteNew/Many_HTML_templates-16 146k ± 0% 147k ± 0% +0.98% (p=0.029 n=4+4)
SiteNew/Page_collections-16 210k ± 0% 215k ± 0% +2.44% (p=0.029 n=4+4)
```
Fixes #6312
Fixes #6087
Fixes #6738
Fixes #6412
Fixes #6743
Fixes #6875
Fixes #6034
Fixes #6902
Fixes #6173
Fixes #6590
Diffstat (limited to 'hugolib/content_map_page.go')
-rw-r--r-- | hugolib/content_map_page.go | 998 |
1 files changed, 998 insertions, 0 deletions
diff --git a/hugolib/content_map_page.go b/hugolib/content_map_page.go new file mode 100644 index 000000000..3269abe12 --- /dev/null +++ b/hugolib/content_map_page.go @@ -0,0 +1,998 @@ +// Copyright 2019 The Hugo Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package hugolib + +import ( + "context" + "fmt" + "path" + "path/filepath" + "strings" + "sync" + + "github.com/gohugoio/hugo/common/maps" + + "github.com/gohugoio/hugo/common/types" + "github.com/gohugoio/hugo/resources" + + "github.com/gohugoio/hugo/common/hugio" + "github.com/gohugoio/hugo/hugofs" + "github.com/gohugoio/hugo/hugofs/files" + "github.com/gohugoio/hugo/parser/pageparser" + "github.com/gohugoio/hugo/resources/page" + "github.com/gohugoio/hugo/resources/resource" + "github.com/spf13/cast" + + "github.com/gohugoio/hugo/common/para" + "github.com/pkg/errors" +) + +func newPageMaps(h *HugoSites) *pageMaps { + mps := make([]*pageMap, len(h.Sites)) + for i, s := range h.Sites { + mps[i] = s.pageMap + } + return &pageMaps{ + workers: para.New(h.numWorkers), + pmaps: mps, + } + +} + +type pageMap struct { + s *Site + *contentMap +} + +func (m *pageMap) Len() int { + l := 0 + for _, t := range m.contentMap.pageTrees { + l += t.Len() + } + return l +} + +func (m *pageMap) createMissingTaxonomyNodes() error { + if m.cfg.taxonomyDisabled { + return nil + } + m.taxonomyEntries.Walk(func(s string, v interface{}) bool { + n := v.(*contentNode) + vi := n.viewInfo + k := cleanTreeKey(vi.name.plural + "/" + vi.termKey) + + if _, found := m.taxonomies.Get(k); !found { + vic := &contentBundleViewInfo{ + name: vi.name, + termKey: vi.termKey, + termOrigin: vi.termOrigin, + } + m.taxonomies.Insert(k, &contentNode{viewInfo: vic}) + } + return false + }) + + return nil +} + +func (m *pageMap) newPageFromContentNode(n *contentNode, parentBucket *pagesMapBucket, owner *pageState) (*pageState, error) { + if n.fi == nil { + panic("FileInfo must (currently) be set") + } + + f, err := newFileInfo(m.s.SourceSpec, n.fi) + if err != nil { + return nil, err + } + + meta := n.fi.Meta() + content := func() (hugio.ReadSeekCloser, error) { + return meta.Open() + } + + bundled := owner != nil + s := m.s + + sections := s.sectionsFromFile(f) + + kind := s.kindFromFileInfoOrSections(f, sections) + if kind == page.KindTaxonomy { + s.PathSpec.MakePathsSanitized(sections) + } + + metaProvider := &pageMeta{kind: kind, sections: sections, bundled: bundled, s: s, f: f} + + ps, err := newPageBase(metaProvider) + if err != nil { + return nil, err + } + + if n.fi.Meta().GetBool(walkIsRootFileMetaKey) { + // Make sure that the bundle/section we start walking from is always + // rendered. + // This is only relevant in server fast render mode. + ps.forceRender = true + } + + n.p = ps + if ps.IsNode() { + ps.bucket = newPageBucket(ps) + } + + gi, err := s.h.gitInfoForPage(ps) + if err != nil { + return nil, errors.Wrap(err, "failed to load Git data") + } + ps.gitInfo = gi + + r, err := content() + if err != nil { + return nil, err + } + defer r.Close() + + parseResult, err := pageparser.Parse( + r, + pageparser.Config{EnableEmoji: s.siteCfg.enableEmoji}, + ) + if err != nil { + return nil, err + } + + ps.pageContent = pageContent{ + source: rawPageContent{ + parsed: parseResult, + posMainContent: -1, + posSummaryEnd: -1, + posBodyStart: -1, + }, + } + + ps.shortcodeState = newShortcodeHandler(ps, ps.s, nil) + + if err := ps.mapContent(parentBucket, metaProvider); err != nil { + return nil, ps.wrapError(err) + } + + if err := metaProvider.applyDefaultValues(n); err != nil { + return nil, err + } + + ps.init.Add(func() (interface{}, error) { + pp, err := newPagePaths(s, ps, metaProvider) + if err != nil { + return nil, err + } + + outputFormatsForPage := ps.m.outputFormats() + + if !ps.m.noRender() { + // Prepare output formats for all sites. + ps.pageOutputs = make([]*pageOutput, len(ps.s.h.renderFormats)) + created := make(map[string]*pageOutput) + + for i, f := range ps.s.h.renderFormats { + if po, found := created[f.Name]; found { + ps.pageOutputs[i] = po + continue + } + + _, render := outputFormatsForPage.GetByName(f.Name) + po := newPageOutput(ps, pp, f, render) + + // Create a content provider for the first, + // we may be able to reuse it. + if i == 0 { + contentProvider, err := newPageContentOutput(ps, po) + if err != nil { + return nil, err + } + po.initContentProvider(contentProvider) + } + + ps.pageOutputs[i] = po + created[f.Name] = po + } + } else if ps.m.buildConfig.PublishResources { + // We need one output format for potential resources to publish. + po := newPageOutput(ps, pp, outputFormatsForPage[0], false) + contentProvider, err := newPageContentOutput(ps, po) + if err != nil { + return nil, err + } + po.initContentProvider(contentProvider) + ps.pageOutputs = []*pageOutput{po} + } + + if err := ps.initCommonProviders(pp); err != nil { + return nil, err + } + + return nil, nil + }) + + ps.parent = owner + + return ps, nil +} + +func (m *pageMap) newResource(fim hugofs.FileMetaInfo, owner *pageState) (resource.Resource, error) { + + if owner == nil { + panic("owner is nil") + } + // TODO(bep) consolidate with multihost logic + clean up + outputFormats := owner.m.outputFormats() + seen := make(map[string]bool) + var targetBasePaths []string + // Make sure bundled resources are published to all of the ouptput formats' + // sub paths. + for _, f := range outputFormats { + p := f.Path + if seen[p] { + continue + } + seen[p] = true + targetBasePaths = append(targetBasePaths, p) + + } + + meta := fim.Meta() + r := func() (hugio.ReadSeekCloser, error) { + return meta.Open() + } + + target := strings.TrimPrefix(meta.Path(), owner.File().Dir()) + + return owner.s.ResourceSpec.New( + resources.ResourceSourceDescriptor{ + TargetPaths: owner.getTargetPaths, + OpenReadSeekCloser: r, + FileInfo: fim, + RelTargetFilename: target, + TargetBasePaths: targetBasePaths, + }) +} + +func (m *pageMap) createSiteTaxonomies() error { + m.s.taxonomies = make(TaxonomyList) + m.taxonomies.Walk(func(s string, v interface{}) bool { + n := v.(*contentNode) + t := n.viewInfo + + viewName := t.name + + if t.termKey == "" { + m.s.taxonomies[viewName.plural] = make(Taxonomy) + } else { + taxonomy := m.s.taxonomies[viewName.plural] + m.taxonomyEntries.WalkPrefix(s+"/", func(ss string, v interface{}) bool { + b2 := v.(*contentNode) + info := b2.viewInfo + taxonomy.add(info.termKey, page.NewWeightedPage(info.weight, info.ref.p, n.p)) + + return false + }) + } + + return false + }) + + for _, taxonomy := range m.s.taxonomies { + for _, v := range taxonomy { + v.Sort() + } + } + + return nil +} + +func (m *pageMap) createListAllPages() page.Pages { + pages := make(page.Pages, 0) + + m.contentMap.pageTrees.Walk(func(s string, n *contentNode) bool { + if n.p == nil { + panic(fmt.Sprintf("BUG: page not set for %q", s)) + } + if contentTreeNoListFilter(s, n) { + return false + } + pages = append(pages, n.p) + return false + }) + + page.SortByDefault(pages) + return pages +} + +func (m *pageMap) assemblePages() error { + m.taxonomyEntries.DeletePrefix("/") + + if err := m.assembleSections(); err != nil { + return err + } + + var err error + + if err != nil { + return err + } + + m.pages.Walk(func(s string, v interface{}) bool { + n := v.(*contentNode) + + var shouldBuild bool + + defer func() { + // Make sure we always rebuild the view cache. + if shouldBuild && err == nil && n.p != nil { + m.attachPageToViews(s, n) + } + }() + + if n.p != nil { + // A rebuild + shouldBuild = true + return false + } + + var parent *contentNode + var parentBucket *pagesMapBucket + + _, parent = m.getSection(s) + if parent == nil { + panic(fmt.Sprintf("BUG: parent not set for %q", s)) + } + parentBucket = parent.p.bucket + + n.p, err = m.newPageFromContentNode(n, parentBucket, nil) + if err != nil { + return true + } + + shouldBuild = !(n.p.Kind() == page.KindPage && m.cfg.pageDisabled) && m.s.shouldBuild(n.p) + if !shouldBuild { + m.deletePage(s) + return false + } + + n.p.treeRef = &contentTreeRef{ + m: m, + t: m.pages, + n: n, + key: s, + } + + if err = m.assembleResources(s, n.p, parentBucket); err != nil { + return true + } + + return false + }) + + m.deleteOrphanSections() + + return err +} + +func (m *pageMap) assembleResources(s string, p *pageState, parentBucket *pagesMapBucket) error { + var err error + + m.resources.WalkPrefix(s, func(s string, v interface{}) bool { + n := v.(*contentNode) + meta := n.fi.Meta() + classifier := meta.Classifier() + var r resource.Resource + switch classifier { + case files.ContentClassContent: + var rp *pageState + rp, err = m.newPageFromContentNode(n, parentBucket, p) + if err != nil { + return true + } + rp.m.resourcePath = filepath.ToSlash(strings.TrimPrefix(rp.Path(), p.File().Dir())) + r = rp + + case files.ContentClassFile: + r, err = m.newResource(n.fi, p) + if err != nil { + return true + } + default: + panic(fmt.Sprintf("invalid classifier: %q", classifier)) + } + + p.resources = append(p.resources, r) + return false + }) + + return err +} + +func (m *pageMap) assembleSections() error { + + var sectionsToDelete []string + var err error + + m.sections.Walk(func(s string, v interface{}) bool { + n := v.(*contentNode) + + var shouldBuild bool + + defer func() { + // Make sure we always rebuild the view cache. + if shouldBuild && err == nil && n.p != nil { + m.attachPageToViews(s, n) + if n.p.IsHome() { + m.s.home = n.p + } + } + }() + + sections := m.splitKey(s) + + if n.p != nil { + if n.p.IsHome() { + m.s.home = n.p + } + shouldBuild = true + return false + } + + var parent *contentNode + var parentBucket *pagesMapBucket + + if s != "/" { + _, parent = m.getSection(s) + if parent == nil || parent.p == nil { + panic(fmt.Sprintf("BUG: parent not set for %q", s)) + } + } + + if parent != nil { + parentBucket = parent.p.bucket + } + + kind := page.KindSection + if s == "/" { + kind = page.KindHome + } + + if n.fi != nil { + n.p, err = m.newPageFromContentNode(n, parentBucket, nil) + if err != nil { + return true + } + } else { + n.p = m.s.newPage(n, parentBucket, kind, "", sections...) + } + + shouldBuild = m.s.shouldBuild(n.p) + if !shouldBuild { + sectionsToDelete = append(sectionsToDelete, s) + return false + } + + n.p.treeRef = &contentTreeRef{ + m: m, + t: m.sections, + n: n, + key: s, + } + + if err = m.assembleResources(s+cmLeafSeparator, n.p, parentBucket); err != nil { + return true + } + + return false + }) + + for _, s := range sectionsToDelete { + m.deleteSectionByPath(s) + } + + return err +} + +func (m *pageMap) assembleTaxonomies() error { + + var taxonomiesToDelete []string + var err error + + m.taxonomies.Walk(func(s string, v interface{}) bool { + n := v.(*contentNode) + + if n.p != nil { + return false + } + + kind := n.viewInfo.kind() + sections := n.viewInfo.sections() + + _, parent := m.getTaxonomyParent(s) + if parent == nil || parent.p == nil { + panic(fmt.Sprintf("BUG: parent not set for %q", s)) + } + parentBucket := parent.p.bucket + + if n.fi != nil { + n.p, err = m.newPageFromContentNode(n, parent.p.bucket, nil) + if err != nil { + return true + } + } else { + title := "" + if kind == page.KindTaxonomy { + title = n.viewInfo.term() + } + n.p = m.s.newPage(n, parent.p.bucket, kind, title, sections...) + } + + if !m.s.shouldBuild(n.p) { + taxonomiesToDelete = append(taxonomiesToDelete, s) + return false + } + + n.p.treeRef = &contentTreeRef{ + m: m, + t: m.taxonomies, + n: n, + key: s, + } + + if err = m.assembleResources(s+cmLeafSeparator, n.p, parentBucket); err != nil { + return true + } + + return false + }) + + for _, s := range taxonomiesToDelete { + m.deleteTaxonomy(s) + } + + return err + +} + +func (m *pageMap) attachPageToViews(s string, b *contentNode) { + if m.cfg.taxonomyDisabled { + return + } + + for _, viewName := range m.cfg.taxonomyConfig { + vals := types.ToStringSlicePreserveString(getParam(b.p, viewName.plural, false)) + if vals == nil { + continue + } + + w := getParamToLower(b.p, viewName.plural+"_weight") + weight, err := cast.ToIntE(w) + if err != nil { + m.s.Log.ERROR.Printf("Unable to convert taxonomy weight %#v to int for %q", w, b.p.Path()) + // weight will equal zero, so let the flow continue + } + + for _, v := range vals { + termKey := m.s.getTaxonomyKey(v) + + bv := &contentNode{ + viewInfo: &contentBundleViewInfo{ + name: viewName, + termKey: termKey, + termOrigin: v, + weight: weight, + ref: b, + }, + } + + if s == "/" { + // To avoid getting an empty key. + s = "home" + } + key := cleanTreeKey(path.Join(viewName.plural, termKey, s)) + m.taxonomyEntries.Insert(key, bv) + } + } +} + +func (m *pageMap) collectPages(prefix string, fn func(c *contentNode)) error { + m.pages.WalkPrefixListable(prefix, func(s string, n *contentNode) bool { + fn(n) + return false + }) + return nil +} + +func (m *pageMap) collectPagesAndSections(prefix string, fn func(c *contentNode)) error { + if err := m.collectSections(prefix, fn); err != nil { + return err + } + + if err := m.collectPages(prefix+cmBranchSeparator, fn); err != nil { + return err + } + + return nil +} + +func (m *pageMap) collectSections(prefix string, fn func(c *contentNode)) error { + var level int + isHome := prefix == "/" + + if !isHome { + level = strings.Count(prefix, "/") + } + + return m.collectSectionsFn(prefix, func(s string, c *contentNode) bool { + if s == prefix { + return false + } + + if (strings.Count(s, "/") - level) != 1 { + return false + } + + fn(c) + + return false + }) +} + +func (m *pageMap) collectSectionsFn(prefix string, fn func(s string, c *contentNode) bool) error { + if !strings.HasSuffix(prefix, "/") { + prefix += "/" + } + + m.sections.WalkPrefixListable(prefix, func(s string, n *contentNode) bool { + return fn(s, n) + }) + + return nil +} + +func (m *pageMap) collectSectionsRecursiveIncludingSelf(prefix string, fn func(c *contentNode)) error { + return m.collectSectionsFn(prefix, func(s string, c *contentNode) bool { + fn(c) + return false + }) +} + +func (m *pageMap) collectTaxonomies(prefix string, fn func(c *contentNode)) error { + m.taxonomies.WalkPrefixListable(prefix, func(s string, n *contentNode) bool { + fn(n) + return false + }) + return nil +} + +// withEveryBundlePage applies fn to every Page, including those bundled inside +// leaf bundles. +func (m *pageMap) withEveryBundlePage(fn func(p *pageState) bool) { + m.bundleTrees.Walk(func(s string, n *contentNode) bool { + if n.p != nil { + return fn(n.p) + } + return false + }) +} + +type pageMaps struct { + workers *para.Workers + pmaps []*pageMap +} + +// deleteSection deletes the entire section from s. +func (m *pageMaps) deleteSection(s string) { + m.withMaps(func(pm *pageMap) error { + pm.deleteSectionByPath(s) + return nil + }) +} + +func (m *pageMaps) AssemblePages() error { + return m.withMaps(func(pm *pageMap) error { + if err := pm.CreateMissingNodes(); err != nil { + return err + } + + if err := pm.assemblePages(); err != nil { + return err + } + + if err := pm.createMissingTaxonomyNodes(); err != nil { + return err + } + + // Handle any new sections created in the step above. + if err := pm.assembleSections(); err != nil { + return err + } + + if err := pm.assembleTaxonomies(); err != nil { + return err + } + + if err := pm.createSiteTaxonomies(); err != nil { + return err + } + + a := (§ionWalker{m: pm.contentMap}).applyAggregates() + _, mainSectionsSet := pm.s.s.Info.Params()["mainsections"] + if !mainSectionsSet && a.mainSection != "" { + mainSections := []string{a.mainSection} + pm.s.s.Info.Params()["mainSections"] = mainSections + pm.s.s.Info.Params()["mainsections"] = mainSections + } + + pm.s.lastmod = a.datesAll.Lastmod() + if resource.IsZeroDates(pm.s.home) { + pm.s.home.m.Dates = a.datesAll + } + + return nil + }) +} + +func (m *pageMaps) walkBundles(fn func(n *contentNode) bool) { + _ = m.withMaps(func(pm *pageMap) error { + pm.bundleTrees.Walk(func(s string, n *contentNode) bool { + return fn(n) + }) + return nil + }) +} + +func (m *pageMaps) walkBranchesPrefix(prefix string, fn func(s string, n *contentNode) bool) { + _ = m.withMaps(func(pm *pageMap) error { + pm.branchTrees.WalkPrefix(prefix, func(s string, n *contentNode) bool { + return fn(s, n) + }) + return nil + }) +} + +func (m *pageMaps) withMaps(fn func(pm *pageMap) error) error { + g, _ := m.workers.Start(context.Background()) + for _, pm := range m.pmaps { + pm := pm + g.Run(func() error { + return fn(pm) + }) + } + return g.Wait() +} + +type pagesMapBucket struct { + // Cascading front matter. + cascade maps.Params + + owner *pageState // The branch node + + pagesInit sync.Once + pages page.Pages + + pagesAndSectionsInit sync.Once + pagesAndSections page.Pages + + sectionsInit sync.Once + sections page.Pages +} + +func (b *pagesMapBucket) getPages() page.Pages { + b.pagesInit.Do(func() { + b.pages = b.owner.treeRef.collectPages() + page.SortByDefault(b.pages) + }) + return b.pages +} + +func (b *pagesMapBucket) getPagesAndSections() page.Pages { + b.pagesAndSectionsInit.Do(func() { + b.pagesAndSections = b.owner.treeRef.collectPagesAndSections() + }) + return b.pagesAndSections +} + +func (b *pagesMapBucket) getSections() page.Pages { + b.sectionsInit.Do(func() { + b.sections = b.owner.treeRef.collectSections() + }) + + return b.sections +} + +func (b *pagesMapBucket) getTaxonomies() page.Pages { + b.sectionsInit.Do(func() { + var pas page.Pages + ref := b.owner.treeRef + ref.m.collectTaxonomies(ref.key+"/", func(c *contentNode) { + pas = append(pas, c.p) + }) + page.SortByDefault(pas) + b.sections = pas + }) + + return b.sections +} + +type sectionAggregate struct { + datesAll resource.Dates + datesSection resource.Dates + pageCount int + mainSection string + mainSectionPageCount int +} + +type sectionAggregateHandler struct { + sectionAggregate + sectionPageCount int + + // Section + b *contentNode + s string +} + +func (h *sectionAggregateHandler) isRootSection() bool { + return h.s != "/" && strings.Count(h.s, "/") == 1 +} + +func (h *sectionAggregateHandler) handleNested(v sectionWalkHandler) error { + nested := v.(*sectionAggregateHandler) + h.sectionPageCount += nested.pageCount + h.pageCount += h.sectionPageCount + h.datesAll.UpdateDateAndLastmodIfAfter(nested.datesAll) + h.datesSection.UpdateDateAndLastmodIfAfter(nested.datesAll) + return nil +} + +func (h *sectionAggregateHandler) handlePage(s string, n *contentNode) error { + h.sectionPageCount++ + + var d resource.Dated + if n.p != nil { + d = n.p + } else if n.viewInfo != nil && n.viewInfo.ref != nil { + d = n.viewInfo.ref.p + } else { + return nil + } + + h.datesAll.UpdateDateAndLastmodIfAfter(d) + h.datesSection.UpdateDateAndLastmodIfAfter(d) + return nil +} + +func (h *sectionAggregateHandler) handleSectionPost() error { + if h.sectionPageCount > h.mainSectionPageCount && h.isRootSection() { + h.mainSectionPageCount = h.sectionPageCount + h.mainSection = strings.TrimPrefix(h.s, "/") + } + + if resource.IsZeroDates(h.b.p) { + h.b.p.m.Dates = h.datesSection + } + + h.datesSection = resource.Dates{} + + return nil +} + +func (h *sectionAggregateHandler) handleSectionPre(s string, b *contentNode) error { + h.s = s + h.b = b + h.sectionPageCount = 0 + h.datesAll.UpdateDateAndLastmodIfAfter(b.p) + return nil +} + +type sectionWalkHandler interface { + handleNested(v sectionWalkHandler) error + handlePage(s string, b *contentNode) error + handleSectionPost() error + handleSectionPre(s string, b *contentNode) error +} + +type sectionWalker struct { + err error + m *contentMap +} + +func (w *sectionWalker) applyAggregates() *sectionAggregateHandler { + return w.walkLevel("/", func() sectionWalkHandler { + return §ionAggregateHandler{} + }).(*sectionAggregateHandler) + +} + +func (w *sectionWalker) walkLevel(prefix string, createVisitor func() sectionWalkHandler) sectionWalkHandler { + + level := strings.Count(prefix, "/") + visitor := createVisitor() + + w.m.taxonomies.WalkPrefix(prefix, func(s string, v interface{}) bool { + currentLevel := strings.Count(s, "/") + if currentLevel > level { + return false + } + + n := v.(*contentNode) + + if w.err = visitor.handleSectionPre(s, n); w.err != nil { + return true + } + + if currentLevel == 1 { + nested := w.walkLevel(s+"/", createVisitor) + if w.err = visitor.handleNested(nested); w.err != nil { + return true + } + } else { + w.m.taxonomyEntries.WalkPrefix(s, func(ss string, v interface{}) bool { + n := v.(*contentNode) + w.err = visitor.handlePage(ss, n) + return w.err != nil + }) + } + + w.err = visitor.handleSectionPost() + + return w.err != nil + }) + + w.m.sections.WalkPrefix(prefix, func(s string, v interface{}) bool { + currentLevel := strings.Count(s, "/") + if currentLevel > level { + return false + } + + n := v.(*contentNode) + + if w.err = visitor.handleSectionPre(s, n); w.err != nil { + return true + } + + w.m.pages.WalkPrefix(s+cmBranchSeparator, func(s string, v interface{}) bool { + w.err = visitor.handlePage(s, v.(*contentNode)) + return w.err != nil + }) + + if w.err != nil { + return true + } + + if s != "/" { + nested := w.walkLevel(s+"/", createVisitor) + if w.err = visitor.handleNested(nested); w.err != nil { + return true + } + } + + w.err = visitor.handleSectionPost() + + return w.err != nil + }) + + return visitor + |