summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorBjørn Erik Pedersen <bjorn.erik.pedersen@gmail.com>2020-05-21 11:25:00 +0200
committerBjørn Erik Pedersen <bjorn.erik.pedersen@gmail.com>2020-05-24 12:35:45 +0200
commita985efcecf44afe1d252690ec0a00cf077974f44 (patch)
tree792e19ede943310f091977edb36962d1eee596f9
parent6c3c6686f5d3c7155e2d455b07ac8ab70f42cb88 (diff)
Fix GetPage on section/bundle name overlaps
In the internal Radix we stored the directory based nodes without a traling slash, e.g. `/blog`. The original motivation was probably to make it easy to do prefix searching: Give me all ancestors. This, however have lead to some ambigouty with overlapping directory names. This particular problem was, however, not possible to work around in an easy way, so from now we store these as `/blog/`. Fixes #7301
-rw-r--r--helpers/path.go9
-rw-r--r--htesting/hqt/checkers.go2
-rw-r--r--hugolib/content_map.go161
-rw-r--r--hugolib/content_map_page.go69
-rw-r--r--hugolib/content_map_test.go86
-rw-r--r--hugolib/page.go19
-rw-r--r--hugolib/page__tree.go6
-rw-r--r--hugolib/pagecollections.go14
-rw-r--r--hugolib/pagecollections_test.go6
-rw-r--r--hugolib/taxonomy_test.go1
10 files changed, 211 insertions, 162 deletions
diff --git a/helpers/path.go b/helpers/path.go
index 29e1e6071..01c452607 100644
--- a/helpers/path.go
+++ b/helpers/path.go
@@ -665,3 +665,12 @@ func FileContainsAny(filename string, subslices [][]byte, fs afero.Fs) (bool, er
func Exists(path string, fs afero.Fs) (bool, error) {
return afero.Exists(fs, path)
}
+
+// AddTrailingSlash adds a trailing Unix styled slash (/) if not already
+// there.
+func AddTrailingSlash(path string) string {
+ if !strings.HasSuffix(path, "/") {
+ path += "/"
+ }
+ return path
+}
diff --git a/htesting/hqt/checkers.go b/htesting/hqt/checkers.go
index 6fb65ee47..c12f78034 100644
--- a/htesting/hqt/checkers.go
+++ b/htesting/hqt/checkers.go
@@ -77,7 +77,7 @@ func (c *stringChecker) Check(got interface{}, args []interface{}, note func(key
return nil
}
- return fmt.Errorf("values are not the same text: %s", htesting.DiffStrings(s1, s2))
+ return fmt.Errorf("values are not the same text: %s", strings.Join(htesting.DiffStrings(s1, s2), " | "))
}
func normalizeString(s string) string {
diff --git a/hugolib/content_map.go b/hugolib/content_map.go
index 3c57fffcf..ddcc70707 100644
--- a/hugolib/content_map.go
+++ b/hugolib/content_map.go
@@ -20,6 +20,8 @@ import (
"strings"
"sync"
+ "github.com/gohugoio/hugo/helpers"
+
"github.com/gohugoio/hugo/resources/page"
"github.com/pkg/errors"
@@ -31,27 +33,26 @@ import (
)
// We store the branch nodes in either the `sections` or `taxonomies` tree
-// with their path as a key; Unix style slashes, a leading slash but no
-// trailing slash.
+// with their path as a key; Unix style slashes, a leading and trailing slash.
//
-// E.g. "/blog" or "/categories/funny"
+// E.g. "/blog/" or "/categories/funny/"
//
// Pages that belongs to a section are stored in the `pages` tree below
-// the section name and a branch separator, e.g. "/blog__hb_". A page is
+// the section name and a branch separator, e.g. "/blog/__hb_". A page is
// given a key using the path below the section and the base filename with no extension
// with a leaf separator added.
//
// For bundled pages (/mybundle/index.md), we use the folder name.
//
-// An exmple of a full page key would be "/blog__hb_/page1__hl_"
+// An exmple of a full page key would be "/blog/__hb_page1__hl_"
//
// Bundled resources are stored in the `resources` having their path prefixed
// with the bundle they belong to, e.g.
-// "/blog__hb_/bundle__hl_data.json".
+// "/blog/__hb_bundle__hl_data.json".
//
// The weighted taxonomy entries extracted from page front matter are stored in
// the `taxonomyEntries` tree below /plural/term/page-key, e.g.
-// "/categories/funny/blog__hb_/bundle__hl_".
+// "/categories/funny/blog/__hb_bundle__hl_".
const (
cmBranchSeparator = "__hb_"
cmLeafSeparator = "__hl_"
@@ -105,7 +106,7 @@ func newContentMap(cfg contentMapConfig) *contentMap {
addToReverseMap(mountKey, n, m)
}
}
- k := strings.TrimSuffix(path.Base(s), cmLeafSeparator)
+ k := strings.TrimPrefix(strings.TrimSuffix(path.Base(s), cmLeafSeparator), cmBranchSeparator)
addToReverseMap(k, n, m)
return false
})
@@ -127,18 +128,15 @@ type cmInsertKeyBuilder struct {
}
func (b cmInsertKeyBuilder) ForPage(s string) *cmInsertKeyBuilder {
- // TODO2 fmt.Println("ForPage:", s, "baseKey:", b.baseKey, "key:", b.key)
+ //fmt.Println("ForPage:", s, "baseKey:", b.baseKey, "key:", b.key)
baseKey := b.baseKey
b.baseKey = s
- if !strings.HasPrefix(s, "/") {
- s = "/" + s
- }
-
if baseKey != "/" {
// Don't repeat the section path in the key.
s = strings.TrimPrefix(s, baseKey)
}
+ s = strings.TrimPrefix(s, "/")
switch b.tree {
case b.m.sections:
@@ -154,10 +152,10 @@ func (b cmInsertKeyBuilder) ForPage(s string) *cmInsertKeyBuilder {
}
func (b cmInsertKeyBuilder) ForResource(s string) *cmInsertKeyBuilder {
- // TODO2 fmt.Println("ForResource:", s, "baseKey:", b.baseKey, "key:", b.key)
+ //fmt.Println("ForResource:", s, "baseKey:", b.baseKey, "key:", b.key)
- s = strings.TrimPrefix(s, "/")
- s = strings.TrimPrefix(s, strings.TrimPrefix(b.baseKey, "/")+"/")
+ baseKey := helpers.AddTrailingSlash(b.baseKey)
+ s = strings.TrimPrefix(s, baseKey)
switch b.tree {
case b.m.pages:
@@ -173,14 +171,23 @@ func (b cmInsertKeyBuilder) ForResource(s string) *cmInsertKeyBuilder {
func (b *cmInsertKeyBuilder) Insert(n *contentNode) *cmInsertKeyBuilder {
if b.err == nil {
- b.tree.Insert(cleanTreeKey(b.key), n)
+ b.tree.Insert(b.Key(), n)
}
return b
}
+func (b *cmInsertKeyBuilder) Key() string {
+ switch b.tree {
+ case b.m.sections, b.m.taxonomies:
+ return cleanSectionTreeKey(b.key)
+ default:
+ return cleanTreeKey(b.key)
+ }
+}
+
func (b *cmInsertKeyBuilder) DeleteAll() *cmInsertKeyBuilder {
if b.err == nil {
- b.tree.DeletePrefix(cleanTreeKey(b.key))
+ b.tree.DeletePrefix(b.Key())
}
return b
}
@@ -211,15 +218,16 @@ func (b *cmInsertKeyBuilder) WithFile(fi hugofs.FileMetaInfo) *cmInsertKeyBuilde
}
func (b *cmInsertKeyBuilder) WithSection(s string) *cmInsertKeyBuilder {
+ s = cleanSectionTreeKey(s)
b.newTopLevel()
b.tree = b.m.sections
b.baseKey = s
b.key = s
- // TODO2 fmt.Println("WithSection:", s, "baseKey:", b.baseKey, "key:", b.key)
return b
}
func (b *cmInsertKeyBuilder) WithTaxonomy(s string) *cmInsertKeyBuilder {
+ s = cleanSectionTreeKey(s)
b.newTopLevel()
b.tree = b.m.taxonomies
b.baseKey = s
@@ -233,20 +241,17 @@ func (b *cmInsertKeyBuilder) getBundle(s string) (string, string) {
m := b.m
section, _ := m.getSection(s)
- p := s
- if section != "/" {
- p = strings.TrimPrefix(s, section)
- }
+ p := strings.TrimPrefix(s, section)
- bundlePathParts := strings.Split(p, "/")[1:]
+ bundlePathParts := strings.Split(p, "/")
basePath := section + cmBranchSeparator
// Put it into an existing bundle if found.
for i := len(bundlePathParts) - 2; i >= 0; i-- {
bundlePath := path.Join(bundlePathParts[:i]...)
- searchKey := basePath + "/" + bundlePath + cmLeafSeparator
+ searchKey := basePath + bundlePath + cmLeafSeparator
if _, found := m.pages.Get(searchKey); found {
- return section + "/" + bundlePath, searchKey
+ return section + bundlePath, searchKey
}
}
@@ -432,7 +437,7 @@ func (m *contentMap) CreateMissingNodes() error {
sectionPath = sectionPath[:firstSlash]
}
}
- sect = cleanTreeKey(sect)
+ sect = cleanSectionTreeKey(sect)
_, found := m.sections.Get(sect)
if !found {
m.sections.Insert(sect, &contentNode{path: sectionPath})
@@ -440,7 +445,7 @@ func (m *contentMap) CreateMissingNodes() error {
}
for _, view := range m.cfg.taxonomyConfig {
- s := cleanTreeKey(view.plural)
+ s := cleanSectionTreeKey(view.plural)
_, found := m.taxonomies.Get(s)
if !found {
b := &contentNode{
@@ -476,15 +481,20 @@ func (m *contentMap) newContentNodeFromFi(fi hugofs.FileMetaInfo) *contentNode {
}
func (m *contentMap) getFirstSection(s string) (string, *contentNode) {
+ s = helpers.AddTrailingSlash(s)
for {
k, v, found := m.sections.LongestPrefix(s)
+
if !found {
return "", nil
}
- if strings.Count(k, "/") == 1 {
+
+ if strings.Count(k, "/") <= 2 {
return k, v.(*contentNode)
}
- s = path.Dir(s)
+
+ s = helpers.AddTrailingSlash(path.Dir(strings.TrimSuffix(s, "/")))
+
}
}
@@ -507,10 +517,7 @@ func (m *contentMap) getOrCreateSection(n *contentNode, s string) (string, *cont
}
if mustCreate {
- k = s[:strings.Index(s[1:], "/")+1]
- if k == "" {
- k = "/"
- }
+ k = cleanSectionTreeKey(s[:strings.Index(s[1:], "/")+1])
b = &contentNode{
path: n.rootSection(),
@@ -523,7 +530,9 @@ func (m *contentMap) getOrCreateSection(n *contentNode, s string) (string, *cont
}
func (m *contentMap) getPage(section, name string) *contentNode {
- key := section + cmBranchSeparator + "/" + name + cmLeafSeparator
+ section = helpers.AddTrailingSlash(section)
+ key := section + cmBranchSeparator + name + cmLeafSeparator
+
v, found := m.pages.Get(key)
if found {
return v.(*contentNode)
@@ -532,7 +541,9 @@ func (m *contentMap) getPage(section, name string) *contentNode {
}
func (m *contentMap) getSection(s string) (string, *contentNode) {
- k, v, found := m.sections.LongestPrefix(path.Dir(s))
+ s = helpers.AddTrailingSlash(path.Dir(strings.TrimSuffix(s, "/")))
+
+ k, v, found := m.sections.LongestPrefix(s)
if found {
return k, v.(*contentNode)
@@ -541,21 +552,18 @@ func (m *contentMap) getSection(s string) (string, *contentNode) {
}
func (m *contentMap) getTaxonomyParent(s string) (string, *contentNode) {
- s = path.Dir(s)
- if s == "/" {
- v, found := m.sections.Get(s)
- if found {
- return s, v.(*contentNode)
- }
- return "", nil
+ s = helpers.AddTrailingSlash(path.Dir(strings.TrimSuffix(s, "/")))
+ k, v, found := m.taxonomies.LongestPrefix(s)
+
+ if found {
+ return k, v.(*contentNode)
}
- for _, tree := range []*contentTree{m.taxonomies, m.sections} {
- k, v, found := tree.LongestPrefix(s)
- if found {
- return k, v.(*contentNode)
- }
+ v, found = m.sections.Get("/")
+ if found {
+ return s, v.(*contentNode)
}
+
return "", nil
}
@@ -569,6 +577,15 @@ func cleanTreeKey(k string) string {
return k
}
+func cleanSectionTreeKey(k string) string {
+ k = cleanTreeKey(k)
+ if k != "/" {
+ k += "/"
+ }
+
+ return k
+}
+
func (m *contentMap) onSameLevel(s1, s2 string) bool {
return strings.Count(s1, "/") == strings.Count(s2, "/")
}
@@ -606,13 +623,13 @@ func (m *contentMap) deleteOrphanSections() {
return false
}
- if s == "/" || strings.Count(s, "/") > 1 {
+ if s == "/" || strings.Count(s, "/") > 2 {
return false
}
prefixBundle := s + cmBranchSeparator
- if !(m.sections.hasPrefix(s+"/") || m.pages.hasPrefix(prefixBundle) || m.resources.hasPrefix(prefixBundle)) {
+ if !(m.sections.hasBelow(s) || m.pages.hasBelow(prefixBundle) || m.resources.hasBelow(prefixBundle)) {
sectionsToDelete = append(sectionsToDelete, s)
}
@@ -630,13 +647,15 @@ func (m *contentMap) deletePage(s string) {
}
func (m *contentMap) deleteSectionByPath(s string) {
- m.sections.Delete(s)
- m.sections.DeletePrefix(s + "/")
- m.pages.DeletePrefix(s + cmBranchSeparator)
- m.pages.DeletePrefix(s + "/")
- m.resources.DeletePrefix(s + cmBranchSeparator)
- m.resources.DeletePrefix(s + cmLeafSeparator)
- m.resources.DeletePrefix(s + "/")
+ if !strings.HasSuffix(s, "/") {
+ panic("section must end with a slash")
+ }
+ if !strings.HasPrefix(s, "/") {
+ panic("section must start with a slash")
+ }
+ m.sections.DeletePrefix(s)
+ m.pages.DeletePrefix(s)
+ m.resources.DeletePrefix(s)
}
func (m *contentMap) deletePageByPath(s string) {
@@ -648,8 +667,7 @@ func (m *contentMap) deletePageByPath(s string) {
}
func (m *contentMap) deleteTaxonomy(s string) {
- m.taxonomies.Delete(s)
- m.taxonomies.DeletePrefix(s + "/")
+ m.taxonomies.DeletePrefix(s)
}
func (m *contentMap) reduceKeyPart(dir, filename string) string {
@@ -817,7 +835,7 @@ func (c *contentTree) WalkQuery(query pageMapQuery, walkFn contentTreeNodeCallba
filter = contentTreeNoListAlwaysFilter
}
if query.Prefix != "" {
- c.WalkPrefix(query.Prefix, func(s string, v interface{}) bool {
+ c.WalkBelow(query.Prefix, func(s string, v interface{}) bool {
n := v.(*contentNode)
if filter != nil && filter(s, n) {
return false
@@ -862,6 +880,18 @@ func (c contentTrees) WalkPrefix(prefix string, fn contentTreeNodeCallback) {
}
}
+// WalkBelow walks the tree below the given prefix, i.e. it skips the
+// node with the given prefix as key.
+func (c *contentTree) WalkBelow(prefix string, fn radix.WalkFn) {
+ c.Tree.WalkPrefix(prefix, func(s string, v interface{}) bool {
+ if s == prefix {
+ return false
+ }
+ return fn(s, v)
+ })
+
+}
+
func (c *contentTree) getMatch(matches func(b *contentNode) bool) string {
var match string
c.Walk(func(s string, v interface{}) bool {
@@ -881,9 +911,9 @@ func (c *contentTree) getMatch(matches func(b *contentNode) bool) string {
return match
}
-func (c *contentTree) hasPrefix(s string) bool {
+func (c *contentTree) hasBelow(s1 string) bool {
var t bool
- c.Tree.WalkPrefix(s, func(s string, v interface{}) bool {
+ c.WalkBelow(s1, func(s2 string, v interface{}) bool {
t = true
return true
})
@@ -953,12 +983,7 @@ func (c *contentTreeRef) getPagesRecursive() page.Pages {
Filter: c.n.p.m.getListFilter(true),
}
- query.Prefix = c.key + cmBranchSeparator
- c.m.collectPages(query, func(c *contentNode) {
- pas = append(pas, c.p)
- })
-
- query.Prefix = c.key + "/"
+ query.Prefix = c.key
c.m.collectPages(query, func(c *contentNode) {
pas = append(pas, c.p)
})
diff --git a/hugolib/content_map_page.go b/hugolib/content_map_page.go
index 6be56f3f5..7516a7029 100644
--- a/hugolib/content_map_page.go
+++ b/hugolib/content_map_page.go
@@ -70,7 +70,7 @@ func (m *pageMap) createMissingTaxonomyNodes() error {
m.taxonomyEntries.Walk(func(s string, v interface{}) bool {
n := v.(*contentNode)
vi := n.viewInfo
- k := cleanTreeKey(vi.name.plural + "/" + vi.termKey)
+ k := cleanSectionTreeKey(vi.name.plural + "/" + vi.termKey)
if _, found := m.taxonomies.Get(k); !found {
vic := &contentBundleViewInfo{
@@ -266,6 +266,7 @@ func (m *pageMap) newResource(fim hugofs.FileMetaInfo, owner *pageState) (resour
func (m *pageMap) createSiteTaxonomies() error {
m.s.taxonomies = make(TaxonomyList)
+ var walkErr error
m.taxonomies.Walk(func(s string, v interface{}) bool {
n := v.(*contentNode)
t := n.viewInfo
@@ -276,7 +277,11 @@ func (m *pageMap) createSiteTaxonomies() error {
m.s.taxonomies[viewName.plural] = make(Taxonomy)
} else {
taxonomy := m.s.taxonomies[viewName.plural]
- m.taxonomyEntries.WalkPrefix(s+"/", func(ss string, v interface{}) bool {
+ if taxonomy == nil {
+ walkErr = errors.Errorf("missing taxonomy: %s", viewName.plural)
+ return true
+ }
+ m.taxonomyEntries.WalkPrefix(s, func(ss string, v interface{}) bool {
b2 := v.(*contentNode)
info := b2.viewInfo
taxonomy.add(info.termKey, page.NewWeightedPage(info.weight, info.ref.p, n.p))
@@ -294,7 +299,7 @@ func (m *pageMap) createSiteTaxonomies() error {
}
}
- return nil
+ return walkErr
}
func (m *pageMap) createListAllPages() page.Pages {
@@ -426,7 +431,6 @@ func (m *pageMap) assembleSections() error {
m.sections.Walk(func(s string, v interface{}) bool {
n := v.(*contentNode)
-
var shouldBuild bool
defer func() {
@@ -596,11 +600,12 @@ func (m *pageMap) attachPageToViews(s string, b *contentNode) {
},
}
- if s == "/" {
- // To avoid getting an empty key.
- s = page.KindHome
+ var key string
+ if strings.HasSuffix(s, "/") {
+ key = cleanSectionTreeKey(path.Join(viewName.plural, termKey, s))
+ } else {
+ key = cleanTreeKey(path.Join(viewName.plural, termKey, s))
}
- key := cleanTreeKey(path.Join(viewName.plural, termKey, s))
m.taxonomyEntries.Insert(key, bv)
}
}
@@ -638,19 +643,10 @@ func (m *pageMap) collectPagesAndSections(query pageMapQuery, fn func(c *content
}
func (m *pageMap) collectSections(query pageMapQuery, fn func(c *contentNode)) error {
- var level int
- isHome := query.Prefix == "/"
-
- if !isHome {
- level = strings.Count(query.Prefix, "/")
- }
+ level := strings.Count(query.Prefix, "/")
return m.collectSectionsFn(query, func(s string, c *contentNode) bool {
- if s == query.Prefix {
- return false
- }
-
- if (strings.Count(s, "/") - level) != 1 {
+ if strings.Count(s, "/") != level+1 {
return false
}
@@ -745,10 +741,11 @@ func (m *pageMaps) AssemblePages() error {
return err
}
- a := (&sectionWalker{m: pm.contentMap}).applyAggregates()
+ sw := &sectionWalker{m: pm.contentMap}
+ a := sw.applyAggregates()
_, mainSectionsSet := pm.s.s.Info.Params()["mainsections"]
if !mainSectionsSet && a.mainSection != "" {
- mainSections := []string{a.mainSection}
+ mainSections := []string{strings.TrimRight(a.mainSection, "/")}
pm.s.s.Info.Params()["mainSections"] = mainSections
pm.s.s.Info.Params()["mainsections"] = mainSections
}
@@ -847,7 +844,7 @@ func (b *pagesMapBucket) getTaxonomies() page.Pages {
b.sectionsInit.Do(func() {
var pas page.Pages
ref := b.owner.treeRef
- ref.m.collectTaxonomies(ref.key+"/", func(c *contentNode) {
+ ref.m.collectTaxonomies(ref.key, func(c *contentNode) {
pas = append(pas, c.p)
})
page.SortByDefault(pas)
@@ -888,8 +885,12 @@ type sectionAggregateHandler struct {
s string
}
+func (h *sectionAggregateHandler) String() string {
+ return fmt.Sprintf("%s/%s - %d - %s", h.sectionAggregate.datesAll, h.sectionAggregate.datesSection, h.sectionPageCount, h.s)
+}
+
func (h *sectionAggregateHandler) isRootSection() bool {
- return h.s != "/" && strings.Count(h.s, "/") == 1
+ return h.s != "/" && strings.Count(h.s, "/") == 2
}
func (h *sectionAggregateHandler) handleNested(v sectionWalkHandler) error {
@@ -963,11 +964,13 @@ func (w *sectionWalker) applyAggregates() *sectionAggregateHandler {
func (w *sectionWalker) walkLevel(prefix string, createVisitor func() sectionWalkHandler) sectionWalkHandler {
level := strings.Count(prefix, "/")
+
visitor := createVisitor()
- w.m.taxonomies.WalkPrefix(prefix, func(s string, v interface{}) bool {
+ w.m.taxonomies.WalkBelow(prefix, func(s string, v interface{}) bool {
currentLevel := strings.Count(s, "/")
- if currentLevel > level {
+
+ if currentLevel > level+1 {
return false
}
@@ -977,8 +980,8 @@ func (w *sectionWalker) walkLevel(prefix string, createVisitor func() sectionWal
return true
}
- if currentLevel == 1 {
- nested := w.walkLevel(s+"/", createVisitor)
+ if currentLevel == 2 {
+ nested := w.walkLevel(s, createVisitor)
if w.err = visitor.handleNested(nested); w.err != nil {
return true
}
@@ -995,9 +998,9 @@ func (w *sectionWalker) walkLevel(prefix string, createVisitor func() sectionWal
return w.err != nil
})
- w.m.sections.WalkPrefix(prefix, func(s string, v interface{}) bool {
+ w.m.sections.WalkBelow(prefix, func(s string, v interface{}) bool {
currentLevel := strings.Count(s, "/")
- if currentLevel > level {
+ if currentLevel > level+1 {
return false
}
@@ -1016,11 +1019,9 @@ func (w *sectionWalker) walkLevel(prefix string, createVisitor func() sectionWal
return true
}
- if s != "/" {
- nested := w.walkLevel(s+"/", createVisitor)
- if w.err = visitor.handleNested(nested); w.err != nil {
- return true
- }
+ nested := w.walkLevel(s, createVisitor)
+ if w.err = visitor.handleNested(nested); w.err != nil {
+ return true
}
w.err = visitor.handleSectionPost()
diff --git a/hugolib/content_map_test.go b/hugolib/content_map_test.go
index 4ddbf8c7f..9ec30201a 100644
--- a/hugolib/content_map_test.go
+++ b/hugolib/content_map_test.go
@@ -155,19 +155,19 @@ func TestContentMap(t *testing.T) {
expect := `
Tree 0:
- /blog__hb_/a__hl_
- /blog__hb_/b/c__hl_
+ /blog/__hb_a__hl_
+ /blog/__hb_b/c__hl_
Tree 1:
- /blog
+ /blog/
Tree 2:
- /blog__hb_/a__hl_b/data.json
- /blog__hb_/a__hl_logo.png
- /blog__hl_sectiondata.json
- en/pages/blog__hb_/a__hl_|f:blog/a/index.md
+ /blog/__hb_a__hl_b/data.json
+ /blog/__hb_a__hl_logo.png
+ /blog/__hl_sectiondata.json
+ en/pages/blog/__hb_a__hl_|f:blog/a/index.md
- R: blog/a/b/data.json
- R: blog/a/logo.png
- en/pages/blog__hb_/b/c__hl_|f:blog/b/c/index.md
- en/sections/blog|f:blog/_index.md
+ en/pages/blog/__hb_b/c__hl_|f:blog/b/c/index.md
+ en/sections/blog/|f:blog/_index.md
- P: blog/a/index.md
- P: blog/b/c/index.md
- R: blog/sectiondata.json
@@ -194,24 +194,24 @@ func TestContentMap(t *testing.T) {
expect = `
Tree 0:
- /blog__hb_/a__hl_
- /blog__hb_/b/c__hl_
+ /blog/__hb_a__hl_
+ /blog/__hb_b/c__hl_
Tree 1:
- /blog
+ /blog/
Tree 2:
- /blog__hb_/a__hl_b/data.json
- /blog__hb_/a__hl_b/data2.json
- /blog__hb_/a__hl_logo.png
- /blog__hb_/b/c__hl_d/data3.json
- /blog__hl_sectiondata.json
- /blog__hl_sectiondata2.json
- en/pages/blog__hb_/a__hl_|f:blog/a/index.md
+ /blog/__hb_a__hl_b/data.json
+ /blog/__hb_a__hl_b/data2.json
+ /blog/__hb_a__hl_logo.png
+ /blog/__hb_b/c__hl_d/data3.json
+ /blog/__hl_sectiondata.json
+ /blog/__hl_sectiondata2.json
+ en/pages/blog/__hb_a__hl_|f:blog/a/index.md
- R: blog/a/b/data.json
- R: blog/a/b/data2.json
- R: blog/a/logo.png
- en/pages/blog__hb_/b/c__hl_|f:blog/b/c/index.md
+ en/pages/blog/__hb_b/c__hl_|f:blog/b/c/index.md
- R: blog/b/c/d/data3.json
- en/sections/blog|f:blog/_index.md
+ en/sections/blog/|f:blog/_index.md
- P: blog/a/index.md
- P: blog/b/c/index.md
- R: blog/sectiondata.json
@@ -226,26 +226,26 @@ func TestContentMap(t *testing.T) {
c.Assert(m.testDump(), hqt.IsSameString, `
Tree 0:
- /blog__hb_/a__hl_
- /blog__hb_/b/c__hl_
- /blog__hb_/b__hl_
+ /blog/__hb_a__hl_
+ /blog/__hb_b/c__hl_
+ /blog/__hb_b__hl_
Tree 1:
- /blog
+ /blog/
Tree 2:
- /blog__hb_/a__hl_b/data.json
- /blog__hb_/a__hl_b/data2.json
- /blog__hb_/a__hl_logo.png
- /blog__hb_/b/c__hl_d/data3.json
- /blog__hl_sectiondata.json
- /blog__hl_sectiondata2.json
- en/pages/blog__hb_/a__hl_|f:blog/a/index.md
+ /blog/__hb_a__hl_b/data.json
+ /blog/__hb_a__hl_b/data2.json
+ /blog/__hb_a__hl_logo.png
+ /blog/__hb_b/c__hl_d/data3.json
+ /blog/__hl_sectiondata.json
+ /blog/__hl_sectiondata2.json
+ en/pages/blog/__hb_a__hl_|f:blog/a/index.md
- R: blog/a/b/data.json
- R: blog/a/b/data2.json
- R: blog/a/logo.png
- en/pages/blog__hb_/b/c__hl_|f:blog/b/c/index.md
+ en/pages/blog/__hb_b/c__hl_|f:blog/b/c/index.md
- R: blog/b/c/d/data3.json
- en/pages/blog__hb_/b__hl_|f:blog/b.md
- en/sections/blog|f:blog/_index.md
+ en/pages/blog/__hb_b__hl_|f:blog/b.md
+ en/sections/blog/|f:blog/_index.md
- P: blog/a/index.md
- P: blog/b/c/index.md
- P: blog/b.md
@@ -280,19 +280,19 @@ func TestContentMap(t *testing.T) {
c.Assert(got, hqt.IsSameString, `
Tree 0:
- /__hb_/bundle__hl_
- /blog__hb_/a__hl_
- /blog__hb_/page__hl_
+ /__hb_bundle__hl_
+ /blog/__hb_a__hl_
+ /blog/__hb_page__hl_
Tree 1:
/
- /blog
+ /blog/
Tree 2:
- en/pages/__hb_/bundle__hl_|f:bundle/index.md
- en/pages/blog__hb_/a__hl_|f:blog/a/index.md
- en/pages/blog__hb_/page__hl_|f:blog/page.md
+ en/pages/__hb_bundle__hl_|f:bundle/index.md
+ en/pages/blog/__hb_a__hl_|f:blog/a/index.md
+ en/pages/blog/__hb_page__hl_|f:blog/page.md
en/sections/
- P: bundle/index.md
- en/sections/blog
+ en/sections/blog/
- P: blog/a/index.md
- P: blog/page.md
diff --git a/hugolib/page.go b/hugolib/page.go
index bd518c1e1..dbcc31236 100644
--- a/hugolib/page.go
+++ b/hugolib/page.go
@@ -133,22 +133,21 @@ func (p *pageState) GitInfo() *gitmap.GitInfo {
// GetTerms gets the terms defined on this page in the given taxonomy.
func (p *pageState) GetTerms(taxonomy string) page.Pages {
- taxonomy = strings.ToLower(taxonomy)
+ if p.treeRef == nil {
+ return nil
+ }
+
m := p.s.pageMap
- prefix := cleanTreeKey(taxonomy)
- var self string
- if p.IsHome() {
- // TODO(bep) make this less magical, see taxonomyEntries.Insert.
- self = "/" + page.KindHome
- } else if p.treeRef != nil {
- self = p.treeRef.key
- }
+ taxonomy = strings.ToLower(taxonomy)
+ prefix := cleanSectionTreeKey(taxonomy)
+ self := strings.TrimPrefix(p.treeRef.key, "/")
var pas page.Pages
m.taxonomies.WalkQuery(pageMapQuery{Prefix: prefix}, func(s string, n *contentNode) bool {
- if _, found := m.taxonomyEntries.Get(s + self); found {
+ key := s + self
+ if _, found := m.taxonomyEntries.Get(key); found {
pas = append(pas, n.p)
}
return false
diff --git a/hugolib/page__tree.go b/hugolib/page__tree.go
index a6f66ffbc..5b53dc4cd 100644
--- a/hugolib/page__tree.go
+++ b/hugolib/page__tree.go
@@ -58,7 +58,7 @@ func (pt pageTree) IsAncestor(other interface{}) (bool, error) {
return true, nil
}
- if strings.HasPrefix(ref2.key, ref1.key+"/") {
+ if strings.HasPrefix(ref2.key, ref1.key) {
return true, nil
}
@@ -109,7 +109,7 @@ func (pt pageTree) IsDescendant(other interface{}) (bool, error) {
return true, nil
}
- if strings.HasPrefix(ref1.key, ref2.key+"/") {
+ if strings.HasPrefix(ref1.key, ref2.key) {
return true, nil
}
@@ -123,9 +123,11 @@ func (pt pageTree) FirstSection() page.Page {
return pt.p.s.home
}
key := ref.key
+
if !ref.isSection() {
key = path.Dir(key)
}
+
_, b := ref.m.getFirstSection(key)
if b == nil {
return nil
diff --git a/hugolib/pagecollections.go b/hugolib/pagecollections.go
index 7cdac7453..49378452f 100644
--- a/hugolib/pagecoll