summaryrefslogtreecommitdiffstats
path: root/hugolib/content_map.go
diff options
context:
space:
mode:
authorBjørn Erik Pedersen <bjorn.erik.pedersen@gmail.com>2019-09-10 11:26:34 +0200
committerBjørn Erik Pedersen <bjorn.erik.pedersen@gmail.com>2020-02-18 09:49:42 +0100
commiteada236f87d9669885da1ff647672bb3dc6b4954 (patch)
treea0303f067b2cbe06c55637013dbd7702a551c64f /hugolib/content_map.go
parente5329f13c02b87f0c30f8837759c810cd90ff8da (diff)
Introduce a tree map for all content
This commit introduces a new data structure to store pages and their resources. This data structure is backed by radix trees. This simplies tree operations, makes all pages a bundle, and paves the way for #6310. It also solves a set of annoying issues (see list below). Not a motivation behind this, but this commit also makes Hugo in general a little bit faster and more memory effective (see benchmarks). Especially for partial rebuilds on content edits, but also when taxonomies is in use. ``` name old time/op new time/op delta SiteNew/Bundle_with_image/Edit-16 1.32ms ± 8% 1.00ms ± 9% -24.42% (p=0.029 n=4+4) SiteNew/Bundle_with_JSON_file/Edit-16 1.28ms ± 0% 0.94ms ± 0% -26.26% (p=0.029 n=4+4) SiteNew/Tags_and_categories/Edit-16 33.9ms ± 2% 21.8ms ± 1% -35.67% (p=0.029 n=4+4) SiteNew/Canonify_URLs/Edit-16 40.6ms ± 1% 37.7ms ± 3% -7.20% (p=0.029 n=4+4) SiteNew/Deep_content_tree/Edit-16 56.7ms ± 0% 51.7ms ± 1% -8.82% (p=0.029 n=4+4) SiteNew/Many_HTML_templates/Edit-16 19.9ms ± 2% 18.3ms ± 3% -7.64% (p=0.029 n=4+4) SiteNew/Page_collections/Edit-16 37.9ms ± 4% 34.0ms ± 2% -10.28% (p=0.029 n=4+4) SiteNew/Bundle_with_image-16 10.7ms ± 0% 10.6ms ± 0% -1.15% (p=0.029 n=4+4) SiteNew/Bundle_with_JSON_file-16 10.8ms ± 0% 10.7ms ± 0% -1.05% (p=0.029 n=4+4) SiteNew/Tags_and_categories-16 43.2ms ± 1% 39.6ms ± 1% -8.35% (p=0.029 n=4+4) SiteNew/Canonify_URLs-16 47.6ms ± 1% 47.3ms ± 0% ~ (p=0.057 n=4+4) SiteNew/Deep_content_tree-16 73.0ms ± 1% 74.2ms ± 1% ~ (p=0.114 n=4+4) SiteNew/Many_HTML_templates-16 37.9ms ± 0% 38.1ms ± 1% ~ (p=0.114 n=4+4) SiteNew/Page_collections-16 53.6ms ± 1% 54.7ms ± 1% +2.09% (p=0.029 n=4+4) name old alloc/op new alloc/op delta SiteNew/Bundle_with_image/Edit-16 486kB ± 0% 430kB ± 0% -11.47% (p=0.029 n=4+4) SiteNew/Bundle_with_JSON_file/Edit-16 265kB ± 0% 209kB ± 0% -21.06% (p=0.029 n=4+4) SiteNew/Tags_and_categories/Edit-16 13.6MB ± 0% 8.8MB ± 0% -34.93% (p=0.029 n=4+4) SiteNew/Canonify_URLs/Edit-16 66.5MB ± 0% 63.9MB ± 0% -3.95% (p=0.029 n=4+4) SiteNew/Deep_content_tree/Edit-16 28.8MB ± 0% 25.8MB ± 0% -10.55% (p=0.029 n=4+4) SiteNew/Many_HTML_templates/Edit-16 6.16MB ± 0% 5.56MB ± 0% -9.86% (p=0.029 n=4+4) SiteNew/Page_collections/Edit-16 16.9MB ± 0% 16.0MB ± 0% -5.19% (p=0.029 n=4+4) SiteNew/Bundle_with_image-16 2.28MB ± 0% 2.29MB ± 0% +0.35% (p=0.029 n=4+4) SiteNew/Bundle_with_JSON_file-16 2.07MB ± 0% 2.07MB ± 0% ~ (p=0.114 n=4+4) SiteNew/Tags_and_categories-16 14.3MB ± 0% 13.2MB ± 0% -7.30% (p=0.029 n=4+4) SiteNew/Canonify_URLs-16 69.1MB ± 0% 69.0MB ± 0% ~ (p=0.343 n=4+4) SiteNew/Deep_content_tree-16 31.3MB ± 0% 31.8MB ± 0% +1.49% (p=0.029 n=4+4) SiteNew/Many_HTML_templates-16 10.8MB ± 0% 10.9MB ± 0% +1.11% (p=0.029 n=4+4) SiteNew/Page_collections-16 21.4MB ± 0% 21.6MB ± 0% +1.15% (p=0.029 n=4+4) name old allocs/op new allocs/op delta SiteNew/Bundle_with_image/Edit-16 4.74k ± 0% 3.86k ± 0% -18.57% (p=0.029 n=4+4) SiteNew/Bundle_with_JSON_file/Edit-16 4.73k ± 0% 3.85k ± 0% -18.58% (p=0.029 n=4+4) SiteNew/Tags_and_categories/Edit-16 301k ± 0% 198k ± 0% -34.14% (p=0.029 n=4+4) SiteNew/Canonify_URLs/Edit-16 389k ± 0% 373k ± 0% -4.07% (p=0.029 n=4+4) SiteNew/Deep_content_tree/Edit-16 338k ± 0% 262k ± 0% -22.63% (p=0.029 n=4+4) SiteNew/Many_HTML_templates/Edit-16 102k ± 0% 88k ± 0% -13.81% (p=0.029 n=4+4) SiteNew/Page_collections/Edit-16 176k ± 0% 152k ± 0% -13.32% (p=0.029 n=4+4) SiteNew/Bundle_with_image-16 26.8k ± 0% 26.8k ± 0% +0.05% (p=0.029 n=4+4) SiteNew/Bundle_with_JSON_file-16 26.8k ± 0% 26.8k ± 0% +0.05% (p=0.029 n=4+4) SiteNew/Tags_and_categories-16 273k ± 0% 245k ± 0% -10.36% (p=0.029 n=4+4) SiteNew/Canonify_URLs-16 396k ± 0% 398k ± 0% +0.39% (p=0.029 n=4+4) SiteNew/Deep_content_tree-16 317k ± 0% 325k ± 0% +2.53% (p=0.029 n=4+4) SiteNew/Many_HTML_templates-16 146k ± 0% 147k ± 0% +0.98% (p=0.029 n=4+4) SiteNew/Page_collections-16 210k ± 0% 215k ± 0% +2.44% (p=0.029 n=4+4) ``` Fixes #6312 Fixes #6087 Fixes #6738 Fixes #6412 Fixes #6743 Fixes #6875 Fixes #6034 Fixes #6902 Fixes #6173 Fixes #6590
Diffstat (limited to 'hugolib/content_map.go')
-rw-r--r--hugolib/content_map.go971
1 files changed, 971 insertions, 0 deletions
diff --git a/hugolib/content_map.go b/hugolib/content_map.go
new file mode 100644
index 000000000..f0b66d859
--- /dev/null
+++ b/hugolib/content_map.go
@@ -0,0 +1,971 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+import (
+ "fmt"
+ "path"
+ "path/filepath"
+ "strings"
+ "sync"
+
+ "github.com/gohugoio/hugo/resources/page"
+ "github.com/pkg/errors"
+
+ "github.com/gohugoio/hugo/hugofs/files"
+
+ "github.com/gohugoio/hugo/hugofs"
+
+ radix "github.com/armon/go-radix"
+)
+
+// We store the branch nodes in either the `sections` or `taxonomies` tree
+// with their path as a key; Unix style slashes, a leading slash but no
+// trailing slash.
+//
+// E.g. "/blog" or "/categories/funny"
+//
+// Pages that belongs to a section are stored in the `pages` tree below
+// the section name and a branch separator, e.g. "/blog__hb_". A page is
+// given a key using the path below the section and the base filename with no extension
+// with a leaf separator added.
+//
+// For bundled pages (/mybundle/index.md), we use the folder name.
+//
+// An exmple of a full page key would be "/blog__hb_/page1__hl_"
+//
+// Bundled resources are stored in the `resources` having their path prefixed
+// with the bundle they belong to, e.g.
+// "/blog__hb_/bundle__hl_data.json".
+//
+// The weighted taxonomy entries extracted from page front matter are stored in
+// the `taxonomyEntries` tree below /plural/term/page-key, e.g.
+// "/categories/funny/blog__hb_/bundle__hl_".
+const (
+ cmBranchSeparator = "__hb_"
+ cmLeafSeparator = "__hl_"
+)
+
+// Used to mark ambigous keys in reverse index lookups.
+var ambigousContentNode = &contentNode{}
+
+func newContentMap(cfg contentMapConfig) *contentMap {
+ m := &contentMap{
+ cfg: &cfg,
+ pages: &contentTree{Name: "pages", Tree: radix.New()},
+ sections: &contentTree{Name: "sections", Tree: radix.New()},
+ taxonomies: &contentTree{Name: "taxonomies", Tree: radix.New()},
+ taxonomyEntries: &contentTree{Name: "taxonomyEntries", Tree: radix.New()},
+ resources: &contentTree{Name: "resources", Tree: radix.New()},
+ }
+
+ m.pageTrees = []*contentTree{
+ m.pages, m.sections, m.taxonomies,
+ }
+
+ m.bundleTrees = []*contentTree{
+ m.pages, m.sections, m.taxonomies, m.resources,
+ }
+
+ m.branchTrees = []*contentTree{
+ m.sections, m.taxonomies,
+ }
+
+ addToReverseMap := func(k string, n *contentNode, m map[interface{}]*contentNode) {
+ k = strings.ToLower(k)
+ existing, found := m[k]
+ if found && existing != ambigousContentNode {
+ m[k] = ambigousContentNode
+ } else if !found {
+ m[k] = n
+ }
+ }
+
+ m.pageReverseIndex = &contentTreeReverseIndex{
+ t: []*contentTree{m.pages, m.sections, m.taxonomies},
+ initFn: func(t *contentTree, m map[interface{}]*contentNode) {
+ t.Walk(func(s string, v interface{}) bool {
+ n := v.(*contentNode)
+ if n.p != nil && !n.p.File().IsZero() {
+ meta := n.p.File().FileInfo().Meta()
+ if meta.Path() != meta.PathFile() {
+ // Keep track of the original mount source.
+ mountKey := filepath.ToSlash(filepath.Join(meta.Module(), meta.PathFile()))
+ addToReverseMap(mountKey, n, m)
+ }
+ }
+ k := strings.TrimSuffix(path.Base(s), cmLeafSeparator)
+ addToReverseMap(k, n, m)
+ return false
+ })
+ },
+ }
+
+ return m
+}
+
+type cmInsertKeyBuilder struct {
+ m *contentMap
+
+ err error
+
+ // Builder state
+ tree *contentTree
+ baseKey string // Section or page key
+ key string
+}
+
+func (b cmInsertKeyBuilder) ForPage(s string) *cmInsertKeyBuilder {
+ // TODO2 fmt.Println("ForPage:", s, "baseKey:", b.baseKey, "key:", b.key)
+ baseKey := b.baseKey
+ b.baseKey = s
+
+ if !strings.HasPrefix(s, "/") {
+ s = "/" + s
+ }
+
+ if baseKey != "/" {
+ // Don't repeat the section path in the key.
+ s = strings.TrimPrefix(s, baseKey)
+ }
+
+ switch b.tree {
+ case b.m.sections:
+ b.tree = b.m.pages
+ b.key = baseKey + cmBranchSeparator + s + cmLeafSeparator
+ case b.m.taxonomies:
+ b.key = path.Join(baseKey, s)
+ default:
+ panic("invalid state")
+ }
+
+ return &b
+}
+
+func (b cmInsertKeyBuilder) ForResource(s string) *cmInsertKeyBuilder {
+ // TODO2 fmt.Println("ForResource:", s, "baseKey:", b.baseKey, "key:", b.key)
+
+ s = strings.TrimPrefix(s, "/")
+ s = strings.TrimPrefix(s, strings.TrimPrefix(b.baseKey, "/")+"/")
+
+ switch b.tree {
+ case b.m.pages:
+ b.key = b.key + s
+ case b.m.sections, b.m.taxonomies:
+ b.key = b.key + cmLeafSeparator + s
+ default:
+ panic(fmt.Sprintf("invalid state: %#v", b.tree))
+ }
+ b.tree = b.m.resources
+ return &b
+}
+
+func (b *cmInsertKeyBuilder) Insert(n *contentNode) *cmInsertKeyBuilder {
+ if b.err == nil {
+ b.tree.Insert(cleanTreeKey(b.key), n)
+ }
+ return b
+}
+
+func (b *cmInsertKeyBuilder) DeleteAll() *cmInsertKeyBuilder {
+ if b.err == nil {
+ b.tree.DeletePrefix(cleanTreeKey(b.key))
+ }
+ return b
+}
+
+func (b *cmInsertKeyBuilder) WithFile(fi hugofs.FileMetaInfo) *cmInsertKeyBuilder {
+ b.newTopLevel()
+ m := b.m
+ meta := fi.Meta()
+ p := cleanTreeKey(meta.Path())
+ bundlePath := m.getBundleDir(meta)
+ isBundle := meta.Classifier().IsBundle()
+ if isBundle {
+ panic("not implemented")
+ }
+
+ p, k := b.getBundle(p)
+ if k == "" {
+ b.err = errors.Errorf("no bundle header found for %q", bundlePath)
+ return b
+ }
+
+ id := k + m.reduceKeyPart(p, fi.Meta().Path())
+ b.tree = b.m.resources
+ b.key = id
+ b.baseKey = p
+
+ return b
+}
+
+func (b *cmInsertKeyBuilder) WithSection(s string) *cmInsertKeyBuilder {
+ b.newTopLevel()
+ b.tree = b.m.sections
+ b.baseKey = s
+ b.key = s
+ // TODO2 fmt.Println("WithSection:", s, "baseKey:", b.baseKey, "key:", b.key)
+ return b
+}
+
+func (b *cmInsertKeyBuilder) WithTaxonomy(s string) *cmInsertKeyBuilder {
+ b.newTopLevel()
+ b.tree = b.m.taxonomies
+ b.baseKey = s
+ b.key = s
+ return b
+}
+
+// getBundle gets both the key to the section and the prefix to where to store
+// this page bundle and its resources.
+func (b *cmInsertKeyBuilder) getBundle(s string) (string, string) {
+ m := b.m
+ section, _ := m.getSection(s)
+
+ p := s
+ if section != "/" {
+ p = strings.TrimPrefix(s, section)
+ }
+
+ bundlePathParts := strings.Split(p, "/")[1:]
+ basePath := section + cmBranchSeparator
+
+ // Put it into an existing bundle if found.
+ for i := len(bundlePathParts) - 2; i >= 0; i-- {
+ bundlePath := path.Join(bundlePathParts[:i]...)
+ searchKey := basePath + "/" + bundlePath + cmLeafSeparator
+ if _, found := m.pages.Get(searchKey); found {
+ return section + "/" + bundlePath, searchKey
+ }
+ }
+
+ // Put it into the section bundle.
+ return section, section + cmLeafSeparator
+}
+
+func (b *cmInsertKeyBuilder) newTopLevel() {
+ b.key = ""
+}
+
+type contentBundleViewInfo struct {
+ name viewName
+ termKey string
+ termOrigin string
+ weight int
+ ref *contentNode
+}
+
+func (c *contentBundleViewInfo) kind() string {
+ if c.termKey != "" {
+ return page.KindTaxonomy
+ }
+ return page.KindTaxonomyTerm
+}
+
+func (c *contentBundleViewInfo) sections() []string {
+ if c.kind() == page.KindTaxonomyTerm {
+ return []string{c.name.plural}
+ }
+
+ return []string{c.name.plural, c.termKey}
+
+}
+
+func (c *contentBundleViewInfo) term() string {
+ if c.termOrigin != "" {
+ return c.termOrigin
+ }
+
+ return c.termKey
+}
+
+type contentMap struct {
+ cfg *contentMapConfig
+
+ // View of regular pages, sections, and taxonomies.
+ pageTrees contentTrees
+
+ // View of pages, sections, taxonomies, and resources.
+ bundleTrees contentTrees
+
+ // View of sections and taxonomies.
+ branchTrees contentTrees
+
+ // Stores page bundles keyed by its path's directory or the base filename,
+ // e.g. "blog/post.md" => "/blog/post", "blog/post/index.md" => "/blog/post"
+ // These are the "regular pages" and all of them are bundles.
+ pages *contentTree
+
+ // A reverse index used as a fallback in GetPage.
+ // There are currently two cases where this is used:
+ // 1. Short name lookups in ref/relRef, e.g. using only "mypage.md" without a path.
+ // 2. Links resolved from a remounted content directory. These are restricted to the same module.
+ // Both of the above cases can result in ambigous lookup errors.
+ pageReverseIndex *contentTreeReverseIndex
+
+ // Section nodes.
+ sections *contentTree
+
+ // Taxonomy nodes.
+ taxonomies *contentTree
+
+ // Pages in a taxonomy.
+ taxonomyEntries *contentTree
+
+ // Resources stored per bundle below a common prefix, e.g. "/blog/post__hb_".
+ resources *contentTree
+}
+
+func (m *contentMap) AddFiles(fis ...hugofs.FileMetaInfo) error {
+ for _, fi := range fis {
+ if err := m.addFile(fi); err != nil {
+ return err
+ }
+ }
+
+ return nil
+}
+
+func (m *contentMap) AddFilesBundle(header hugofs.FileMetaInfo, resources ...hugofs.FileMetaInfo) error {
+ var (
+ meta = header.Meta()
+ classifier = meta.Classifier()
+ isBranch = classifier == files.ContentClassBranch
+ bundlePath = m.getBundleDir(meta)
+
+ n = m.newContentNodeFromFi(header)
+ b = m.newKeyBuilder()
+
+ section string
+ )
+
+ if isBranch {
+ // Either a section or a taxonomy node.
+ section = bundlePath
+ if tc := m.cfg.getTaxonomyConfig(section); !tc.IsZero() {
+ term := strings.TrimPrefix(strings.TrimPrefix(section, "/"+tc.plural), "/")
+
+ n.viewInfo = &contentBundleViewInfo{
+ name: tc,
+ termKey: term,
+ termOrigin: term,
+ }
+
+ n.viewInfo.ref = n
+ b.WithTaxonomy(section).Insert(n)
+ } else {
+ b.WithSection(section).Insert(n)
+ }
+ } else {
+ // A regular page. Attach it to its section.
+ section, _ = m.getOrCreateSection(n, bundlePath)
+ b = b.WithSection(section).ForPage(bundlePath).Insert(n)
+ }
+
+ if m.cfg.isRebuild {
+ // The resource owner will be either deleted or overwritten on rebuilds,
+ // but make sure we handle deletion of resources (images etc.) as well.
+ b.ForResource("").DeleteAll()
+ }
+
+ for _, r := range resources {
+ rb := b.ForResource(cleanTreeKey(r.Meta().Path()))
+ rb.Insert(&contentNode{fi: r})
+ }
+
+ return nil
+
+}
+
+func (m *contentMap) CreateMissingNodes() error {
+ // Create missing home and root sections
+ rootSections := make(map[string]interface{})
+ trackRootSection := func(s string, b *contentNode) {
+ parts := strings.Split(s, "/")
+ if len(parts) > 2 {
+ root := strings.TrimSuffix(parts[1], cmBranchSeparator)
+ if root != "" {
+ if _, found := rootSections[root]; !found {
+ rootSections[root] = b
+ }
+ }
+ }
+ }
+
+ m.sections.Walk(func(s string, v interface{}) bool {
+ n := v.(*contentNode)
+
+ if s == "/" {
+ return false
+ }
+
+ trackRootSection(s, n)
+ return false
+ })
+
+ m.pages.Walk(func(s string, v interface{}) bool {
+ trackRootSection(s, v.(*contentNode))
+ return false
+ })
+
+ if _, found := rootSections["/"]; !found {
+ rootSections["/"] = true
+ }
+
+ for sect, v := range rootSections {
+ var sectionPath string
+ if n, ok := v.(*contentNode); ok && n.path != "" {
+ sectionPath = n.path
+ firstSlash := strings.Index(sectionPath, "/")
+ if firstSlash != -1 {
+ sectionPath = sectionPath[:firstSlash]
+ }
+ }
+ sect = cleanTreeKey(sect)
+ _, found := m.sections.Get(sect)
+ if !found {
+ m.sections.Insert(sect, &contentNode{path: sectionPath})
+ }
+ }
+
+ for _, view := range m.cfg.taxonomyConfig {
+ s := cleanTreeKey(view.plural)
+ _, found := m.taxonomies.Get(s)
+ if !found {
+ b := &contentNode{
+ viewInfo: &contentBundleViewInfo{
+ name: view,
+ },
+ }
+ b.viewInfo.ref = b
+ m.taxonomies.Insert(s, b)
+ }
+ }
+
+ return nil
+
+}
+
+func (m *contentMap) getBundleDir(meta hugofs.FileMeta) string {
+ dir := cleanTreeKey(filepath.Dir(meta.Path()))
+
+ switch meta.Classifier() {
+ case files.ContentClassContent:
+ return path.Join(dir, meta.TranslationBaseName())
+ default:
+ return dir
+ }
+}
+
+func (m *contentMap) newContentNodeFromFi(fi hugofs.FileMetaInfo) *contentNode {
+ return &contentNode{
+ fi: fi,
+ path: strings.TrimPrefix(filepath.ToSlash(fi.Meta().Path()), "/"),
+ }
+}
+
+func (m *contentMap) getFirstSection(s string) (string, *contentNode) {
+ for {
+ k, v, found := m.sections.LongestPrefix(s)
+ if !found {
+ return "", nil
+ }
+ if strings.Count(k, "/") == 1 {
+ return k, v.(*contentNode)
+ }
+ s = path.Dir(s)
+ }
+}
+
+func (m *contentMap) newKeyBuilder() *cmInsertKeyBuilder {
+ return &cmInsertKeyBuilder{m: m}
+}
+
+func (m *contentMap) getOrCreateSection(n *contentNode, s string) (string, *contentNode) {
+ level := strings.Count(s, "/")
+ k, b := m.getSection(s)
+
+ mustCreate := false
+
+ if k == "" {
+ mustCreate = true
+ } else if level > 1 && k == "/" {
+ // We found the home section, but this page needs to be placed in
+ // the root, e.g. "/blog", section.
+ mustCreate = true
+ }
+
+ if mustCreate {
+ k = s[:strings.Index(s[1:], "/")+1]
+ if k == "" {
+ k = "/"
+ }
+
+ b = &contentNode{
+ path: n.rootSection(),
+ }
+
+ m.sections.Insert(k, b)
+ }
+
+ return k, b
+}
+
+func (m *contentMap) getPage(section, name string) *contentNode {
+ key := section + cmBranchSeparator + "/" + name + cmLeafSeparator
+ v, found := m.pages.Get(key)
+ if found {
+ return v.(*contentNode)
+ }
+ return nil
+}
+
+func (m *contentMap) getSection(s string) (string, *contentNode) {
+ k, v, found := m.sections.LongestPrefix(path.Dir(s))
+ if found {
+ return k, v.(*contentNode)
+ }
+ return "", nil
+}
+
+func (m *contentMap) getTaxonomyParent(s string) (string, *contentNode) {
+ s = path.Dir(s)
+ if s == "/" {
+ v, found := m.sections.Get(s)
+ if found {
+ return s, v.(*contentNode)
+ }
+ return "", nil
+ }
+
+ for _, tree := range []*contentTree{m.taxonomies, m.sections} {
+ k, v, found := tree.LongestPrefix(s)
+ if found {
+ return k, v.(*contentNode)
+ }
+ }
+ return "", nil
+}
+
+func (m *contentMap) addFile(fi hugofs.FileMetaInfo) error {
+ b := m.newKeyBuilder()
+ return b.WithFile(fi).Insert(m.newContentNodeFromFi(fi)).err
+}
+
+func cleanTreeKey(k string) string {
+ k = "/" + strings.ToLower(strings.Trim(path.Clean(filepath.ToSlash(k)), "./"))
+ return k
+}
+
+func (m *contentMap) onSameLevel(s1, s2 string) bool {
+ return strings.Count(s1, "/") == strings.Count(s2, "/")
+}
+
+func (m *contentMap) deleteBundleMatching(matches func(b *contentNode) bool) {
+ // Check sections first
+ s := m.sections.getMatch(matches)
+ if s != "" {
+ m.deleteSectionByPath(s)
+ return
+ }
+
+ s = m.pages.getMatch(matches)
+ if s != "" {
+ m.deletePage(s)
+ return
+ }
+
+ s = m.resources.getMatch(matches)
+ if s != "" {
+ m.resources.Delete(s)
+ }
+
+}
+
+// Deletes any empty root section that's not backed by a content file.
+func (m *contentMap) deleteOrphanSections() {
+
+ m.sections.Walk(func(s string, v interface{}) bool {
+ n := v.(*contentNode)
+
+ if n.fi != nil {
+ // Section may be empty, but is backed by a content file.
+ return false
+ }
+
+ if s == "/" || strings.Count(s, "/") > 1 {
+ return false
+ }
+
+ prefixBundle := s + cmBranchSeparator
+
+ if !(m.sections.hasPrefix(s+"/") || m.pages.hasPrefix(prefixBundle) || m.resources.hasPrefix(prefixBundle)) {
+ m.sections.Delete(s)
+ }
+
+ return false
+ })
+}
+
+func (m *contentMap) deletePage(s string) {
+ m.pages.DeletePrefix(s)
+ m.resources.DeletePrefix(s)
+}
+
+func (m *contentMap) deleteSectionByPath(s string) {
+ m.sections.Delete(s)
+ m.sections.DeletePrefix(s + "/")
+ m.pages.DeletePrefix(s + cmBranchSeparator)
+ m.pages.DeletePrefix(s + "/")
+ m.resources.DeletePrefix(s + cmBranchSeparator)
+ m.resources.DeletePrefix(s + cmLeafSeparator)
+ m.resources.DeletePrefix(s + "/")
+}
+
+func (m *contentMap) deletePageByPath(s string) {
+ m.pages.Walk(func(s string, v interface{}) bool {
+ fmt.Println("S", s)
+
+ return false
+ })
+}
+
+func (m *contentMap) deleteTaxonomy(s string) {
+ m.taxonomies.Delete(s)
+ m.taxonomies.DeletePrefix(s + "/")
+}
+
+func (m *contentMap) reduceKeyPart(dir, filename string) string {
+ dir, filename = filepath.ToSlash(dir), filepath.ToSlash(filename)
+ dir, filename = strings.TrimPrefix(dir, "/"), strings.TrimPrefix(filename, "/")
+
+ return strings.TrimPrefix(strings.TrimPrefix(filename, dir), "/")
+}
+
+func (m *contentMap) splitKey(k string) []string {
+ if k == "" || k == "/" {
+ return nil
+ }
+
+ return strings.Split(k, "/")[1:]
+
+}
+
+func (m *contentMap) testDump() string {
+ var sb strings.Builder
+
+ for i, r := range []*contentTree{m.pages, m.sections, m.resources} {
+ sb.WriteString(fmt.Sprintf("Tree %d:\n", i))
+ r.Walk(func(s string, v interface{}) bool {
+ sb.WriteString("\t" + s + "\n")
+ return false
+ })
+ }
+
+ for i, r := range []*contentTree{m.pages, m.sections} {
+
+ r.Walk(func(s string, v interface{}) bool {
+ c := v.(*contentNode)
+ cpToString := func(c *contentNode) string {
+ var sb strings.Builder
+ if c.p != nil {
+ sb.WriteString("|p:" + c.p.Title())
+ }
+ if c.fi != nil {
+ sb.WriteString("|f:" + filepath.ToSlash(c.fi.Meta().Path()))
+ }
+ return sb.String()
+ }
+ sb.WriteString(path.Join(m.cfg.lang, r.Name) + s + cpToString(c) + "\n")
+
+ resourcesPrefix := s
+
+ if i == 1 {
+ resourcesPrefix += cmLeafSeparator
+
+ m.pages.WalkPrefix(s+cmBranchSeparator, func(s string, v interface{}) bool {
+ sb.WriteString("\t - P: " + filepath.ToSlash((v.(*contentNode).fi.(hugofs.FileMetaInfo)).Meta().Filename()) + "\n")
+ return false
+ })
+ }
+
+ m.resources.WalkPrefix(resourcesPrefix, func(s string, v interface{}) bool {
+ sb.WriteString("\t - R: " + filepath.ToSlash((v.(*contentNode).fi.(hugofs.FileMetaInfo)).Meta().Filename()) + "\n")
+ return false
+
+ })
+
+ return false
+ })
+ }
+
+ return sb.String()
+
+}
+
+type contentMapConfig struct {
+ lang string
+ taxonomyConfig []viewName
+ taxonomyDisabled bool
+ taxonomyTermDisabled bool
+ pageDisabled bool
+ isRebuild bool
+}
+
+func (cfg contentMapConfig) getTaxonomyConfig(s string) (v viewName) {
+ s = strings.TrimPrefix(s, "/")
+ if s == "" {
+ return
+ }
+ for _, n := range cfg.taxonomyConfig {
+ if strings.HasPrefix(s, n.plural) {
+ return n
+ }
+ }
+
+ return
+}
+
+type contentNode struct {
+ p *pageState
+
+ // Set for taxonomy nodes.
+ viewInfo *contentBundleViewInfo
+
+ // Set if source is a file.
+ // We will soon get other sources.
+ fi hugofs.FileMetaInfo
+
+ // The source path. Unix slashes. No leading slash.
+ path string
+}
+
+func (b *contentNode) rootSection() string {
+ if b.path == "" {
+ return ""
+ }
+ firstSlash := strings.Index(b.path, "/")
+ if firstSlash == -1 {
+ return b.path
+ }
+ return b.path[:firstSlash]
+
+}
+
+type contentTree struct {
+ Name string
+ *radix.Tree
+}
+
+type contentTrees []*contentTree
+
+func (t contentTrees) DeletePrefix(prefix string) int {
+ var count int
+ for _, tree := range t {
+ tree.Walk(func(s string, v interface{}) bool {
+ return false
+ })
+ count += tree.DeletePrefix(prefix)
+ }
+ return count
+}
+
+type contentTreeNodeCallback func(s string, n *contentNode) bool
+
+var (
+ contentTreeNoListFilter = func(s string, n *contentNode) bool {
+ if n.p == nil {
+ return true
+ }
+ return n.p.m.noList()
+ }
+
+ contentTreeNoRenderFilter = func(s string, n *contentNode) bool {
+ if n.p == nil {
+ return true
+ }
+ return n.p.m.noRender()
+ }
+)
+
+func (c *contentTree) WalkPrefixListable(prefix string, fn contentTreeNodeCallback) {
+ c.WalkPrefixFilter(prefix, contentTreeNoListFilter, fn)
+}
+
+func (c *contentTree) WalkPrefixFilter(prefix string, filter, walkFn contentTreeNodeCallback) {
+ c.WalkPrefix(prefix, func(s string, v interface{}) bool {
+ n := v.(*contentNode)
+ if filter(s, n) {
+ return false
+ }
+ return walkFn(s, n)
+ })
+}
+
+func (c *contentTree) WalkListable(fn contentTreeNodeCallback) {
+ c.WalkFilter(contentTreeNoListFilter, fn)
+}
+
+func (c *contentTree) WalkFilter(filter, walkFn contentTreeNodeCallback) {
+ c.Walk(func(s string, v interface{}) bool {
+ n := v.(*contentNode)
+ if filter(s, n) {
+ return false
+ }
+ return walkFn(s, n)
+ })
+}
+
+func (c contentTrees) WalkListable(fn contentTreeNodeCallback) {
+ for _, tree := range c {
+ tree.WalkListable(fn)
+ }
+}
+
+func (c contentTrees) WalkRenderable(fn contentTreeNodeCallback) {
+ for _, tree := range c {
+ tree.WalkFilter(contentTreeNoRenderFilter, fn)
+ }
+}
+
+func (c contentTrees) Walk(fn contentTreeNodeCallback) {
+ for _, tree := range c {
+ tree.Walk(func(s string, v interface{}) bool {
+ n := v.(*contentNode)
+ return fn(s, n)
+ })
+ }
+}
+
+func (c contentTrees) WalkPrefix(prefix string, fn contentTreeNodeCallback) {
+ for _, tree := range c {
+ tree.WalkPrefix(prefix, func(s string, v interface{}) bool {
+ n := v.(*contentNode)
+ return fn(s, n)
+ })
+ }
+}
+
+func (c *contentTree) getMatch(matches func(b *contentNode) bool) string {
+ var match string
+ c.Walk(func(s string, v interface{}) bool {
+ n, ok := v.(*contentNode)
+ if !ok {
+ return false
+ }
+
+ if matches(n) {
+ match = s
+ return true
+ }
+
+ return false
+ })
+
+ return match
+}
+
+func (c *contentTree) hasPrefix(s string) bool {
+ var t bool
+ c.Tree.WalkPrefix(s, func(s string, v interface{}) bool {
+ t = true
+ return true
+ })
+ return t
+}
+
+func (c *contentTree) printKeys() {
+ c.Walk(func(s string, v interface{}) bool {
+ fmt.Println(s)
+ return false
+ })
+}
+
+func (c *contentTree) printKeysPrefix(prefix string) {
+ c.WalkPrefix(prefix, func(s string, v interface{}) bool {
+ fmt.Println(s)
+ return false
+ })
+}
+
+// contentTreeRef points to a node in the given tree.
+type contentTreeRef struct {
+ m *pageMap
+ t *contentTree
+ n *contentNode
+ key string
+}
+
+func (c *contentTreeRef) getCurrentSection() (string, *contentNode) {
+ if c.isSection() {
+ return c.key, c.n
+ }
+ return c.getSection()
+}
+
+func (c *contentTreeRef) isSection() bool {
+ return c.t == c.m.sections
+}
+
+func (c *contentTreeRef) getSection() (string, *contentNode) {
+ return c.m.getSection(c.key)
+}
+
+func (c *contentTreeRef) collectPages() page.Pages {
+ var pas page.Pages
+ c.m.collectPages(c.key+cmBranchSeparator, func(c *contentNode) {
+ pas = append(pas, c.p)
+ })
+ page.SortByDefault(pas)
+
+ return pas
+}
+
+func (c *contentTreeRef) collectPagesAndSections() page.Pages {
+ var pas page.Pages
+ c.m.collectPagesAndSections(c.key, func(c *contentNode) {
+ pas = append(pas, c.p)
+ })
+ page.SortByDefault(pas)
+
+ return pas
+}
+
+func (c *contentTreeRef) collectSections() page.Pages {
+ var pas page.Pages
+ c.m.collectSections(c.key, func(c *contentNode) {
+ pas = append(pas, c.p)
+ })
+ page.SortByDefault(pas)
+
+ return pas
+}
+
+type contentTreeReverseIndex struct {
+ t []*contentTree
+ m map[interface{}]*contentNode
+
+ init sync.Once