summaryrefslogtreecommitdiffstats
path: root/hugolib/pagecollections.go
diff options
context:
space:
mode:
authorBjørn Erik Pedersen <bjorn.erik.pedersen@gmail.com>2019-08-03 17:27:40 +0200
committerBjørn Erik Pedersen <bjorn.erik.pedersen@gmail.com>2019-08-08 20:13:39 +0200
commit7ff0a8ee9fe8d710d407e57faf1fda43bd635f28 (patch)
tree4baa7d913f735cc1089e465b51ff007014bfe25a /hugolib/pagecollections.go
parentdf374851a0683f1446f33a4afef74c42f7d3eaaf (diff)
Simplify page tree logic
This is preparation for #6041. For historic reasons, the code for bulding the section tree and the taxonomies were very much separate. This works, but makes it hard to extend, maintain, and possibly not so fast as it could be. This simplification also introduces 3 slightly breaking changes, which I suspect most people will be pleased about. See referenced issues: This commit also switches the radix tree dependency to a mutable implementation: github.com/armon/go-radix. Fixes #6154 Fixes #6153 Fixes #6152
Diffstat (limited to 'hugolib/pagecollections.go')
-rw-r--r--hugolib/pagecollections.go200
1 files changed, 191 insertions, 9 deletions
diff --git a/hugolib/pagecollections.go b/hugolib/pagecollections.go
index aedcf4090..1c8bed9d9 100644
--- a/hugolib/pagecollections.go
+++ b/hugolib/pagecollections.go
@@ -17,8 +17,12 @@ import (
"fmt"
"path"
"path/filepath"
+ "sort"
"strings"
"sync"
+ "time"
+
+ "github.com/gohugoio/hugo/resources/resource"
"github.com/pkg/errors"
@@ -32,6 +36,7 @@ var ambiguityFlag = &pageState{}
// PageCollections contains the page collections for a site.
type PageCollections struct {
+ pagesMap *pagesMap
// Includes absolute all pages (of all types), including drafts etc.
rawAllPages pageStatePages
@@ -340,15 +345,6 @@ func (*PageCollections) findPagesByKindInWorkPages(kind string, inPages pageStat
return pages
}
-func (c *PageCollections) findFirstWorkPageByKindIn(kind string) *pageState {
- for _, p := range c.workAllPages {
- if p.Kind() == kind {
- return p
- }
- }
- return nil
-}
-
func (c *PageCollections) addPage(page *pageState) {
c.rawAllPages = append(c.rawAllPages, page)
}
@@ -389,3 +385,189 @@ func (c *PageCollections) clearResourceCacheForPage(page *pageState) {
page.s.ResourceSpec.DeleteCacheByPrefix(page.targetPaths().SubResourceBaseTarget)
}
}
+
+func (c *PageCollections) assemblePagesMap(s *Site) error {
+ c.pagesMap = newPagesMap(s)
+
+ rootSections := make(map[string]bool)
+
+ // Add all branch nodes first.
+ for _, p := range c.rawAllPages {
+ rootSections[p.Section()] = true
+ if p.IsPage() {
+ continue
+ }
+ c.pagesMap.addPage(p)
+ }
+
+ // Create missing home page and the first level sections if no
+ // _index provided.
+ s.home = c.pagesMap.getOrCreateHome()
+ for k := range rootSections {
+ c.pagesMap.createSectionIfNotExists(k)
+ }
+
+ // Attach the regular pages to their section.
+ for _, p := range c.rawAllPages {
+ if p.IsNode() {
+ continue
+ }
+ c.pagesMap.addPage(p)
+ }
+
+ return nil
+}
+
+func (c *PageCollections) createWorkAllPages() error {
+ c.workAllPages = make(pageStatePages, 0, len(c.rawAllPages))
+ c.headlessPages = make(pageStatePages, 0)
+
+ var (
+ homeDates *resource.Dates
+ sectionDates *resource.Dates
+ siteLastmod time.Time
+ siteLastDate time.Time
+
+ sectionsParamId = "mainSections"
+ sectionsParamIdLower = strings.ToLower(sectionsParamId)
+ )
+
+ mainSections, mainSectionsFound := c.pagesMap.s.Info.Params()[sectionsParamIdLower]
+
+ var (
+ bucketsToRemove []string
+ rootBuckets []*pagesMapBucket
+ )
+
+ c.pagesMap.r.Walk(func(s string, v interface{}) bool {
+ bucket := v.(*pagesMapBucket)
+ var parentBucket *pagesMapBucket
+
+ if s != "/" {
+ _, parentv, found := c.pagesMap.r.LongestPrefix(path.Dir(s))
+ if !found {
+ panic(fmt.Sprintf("[BUG] parent bucket not found for %q", s))
+ }
+ parentBucket = parentv.(*pagesMapBucket)
+
+ if !mainSectionsFound && strings.Count(s, "/") == 1 {
+ // Root section
+ rootBuckets = append(rootBuckets, bucket)
+ }
+ }
+
+ if bucket.owner.IsHome() {
+ if resource.IsZeroDates(bucket.owner) {
+ // Calculate dates from the page tree.
+ homeDates = &bucket.owner.m.Dates
+ }
+ }
+
+ sectionDates = nil
+ if resource.IsZeroDates(bucket.owner) {
+ sectionDates = &bucket.owner.m.Dates
+ }
+
+ if parentBucket != nil {
+ bucket.parent = parentBucket
+ if bucket.owner.IsSection() {
+ parentBucket.bucketSections = append(parentBucket.bucketSections, bucket)
+ }
+ }
+
+ tmp := bucket.pages[:0]
+ for _, x := range bucket.pages {
+ if c.pagesMap.s.shouldBuild(x) {
+ tmp = append(tmp, x)
+ }
+ }
+ bucket.pages = tmp
+
+ if bucket.isEmpty() {
+ if bucket.owner.IsSection() && bucket.owner.File().IsZero() {
+ // Check for any nested section.
+ var hasDescendant bool
+ c.pagesMap.r.WalkPrefix(s, func(ss string, v interface{}) bool {
+ if s != ss {
+ hasDescendant = true
+ return true
+ }
+ return false
+ })
+ if !hasDescendant {
+ // This is an auto-created section with, now, nothing in it.
+ bucketsToRemove = append(bucketsToRemove, s)
+ return false
+ }
+ }
+ }
+
+ if !bucket.disabled {
+ c.workAllPages = append(c.workAllPages, bucket.owner)
+ }
+
+ if !bucket.view {
+ for _, p := range bucket.pages {
+ ps := p.(*pageState)
+ ps.parent = bucket.owner
+ if ps.m.headless {
+ c.headlessPages = append(c.headlessPages, ps)
+ } else {
+ c.workAllPages = append(c.workAllPages, ps)
+ }
+
+ if homeDates != nil {
+ homeDates.UpdateDateAndLastmodIfAfter(ps)
+ }
+
+ if sectionDates != nil {
+ sectionDates.UpdateDateAndLastmodIfAfter(ps)
+ }
+
+ if p.Lastmod().After(siteLastmod) {
+ siteLastmod = p.Lastmod()
+ }
+ if p.Date().After(siteLastDate) {
+ siteLastDate = p.Date()
+ }
+ }
+ }
+
+ return false
+ })
+
+ c.pagesMap.s.lastmod = siteLastmod
+
+ if !mainSectionsFound {
+
+ // Calculare main section
+ var (
+ maxRootBucketWeight int
+ maxRootBucket *pagesMapBucket
+ )
+
+ for _, b := range rootBuckets {
+ weight := len(b.pages) + (len(b.bucketSections) * 5)
+ if weight >= maxRootBucketWeight {
+ maxRootBucket = b
+ maxRootBucketWeight = weight
+ }
+ }
+
+ if maxRootBucket != nil {
+ // Try to make this as backwards compatible as possible.
+ mainSections = []string{maxRootBucket.owner.Section()}
+ }
+ }
+
+ c.pagesMap.s.Info.Params()[sectionsParamId] = mainSections
+ c.pagesMap.s.Info.Params()[sectionsParamIdLower] = mainSections
+
+ for _, key := range bucketsToRemove {
+ c.pagesMap.r.Delete(key)
+ }
+
+ sort.Sort(c.workAllPages)
+
+ return nil
+}