summaryrefslogtreecommitdiffstats
path: root/hugolib/hugo_sites.go
diff options
context:
space:
mode:
authorBjørn Erik Pedersen <bjorn.erik.pedersen@gmail.com>2019-01-02 12:33:26 +0100
committerBjørn Erik Pedersen <bjorn.erik.pedersen@gmail.com>2019-03-23 18:51:22 +0100
commit597e418cb02883418f2cebb41400e8e61413f651 (patch)
tree177ad9c540b2583b6dab138c9f0490d28989c7f7 /hugolib/hugo_sites.go
parent44f5c1c14cb1f42cc5f01739c289e9cfc83602af (diff)
Make Page an interface
The main motivation of this commit is to add a `page.Page` interface to replace the very file-oriented `hugolib.Page` struct. This is all a preparation step for issue #5074, "pages from other data sources". But this also fixes a set of annoying limitations, especially related to custom output formats, and shortcodes. Most notable changes: * The inner content of shortcodes using the `{{%` as the outer-most delimiter will now be sent to the content renderer, e.g. Blackfriday. This means that any markdown will partake in the global ToC and footnote context etc. * The Custom Output formats are now "fully virtualized". This removes many of the current limitations. * The taxonomy list type now has a reference to the `Page` object. This improves the taxonomy template `.Title` situation and make common template constructs much simpler. See #5074 Fixes #5763 Fixes #5758 Fixes #5090 Fixes #5204 Fixes #4695 Fixes #5607 Fixes #5707 Fixes #5719 Fixes #3113 Fixes #5706 Fixes #5767 Fixes #5723 Fixes #5769 Fixes #5770 Fixes #5771 Fixes #5759 Fixes #5776 Fixes #5777 Fixes #5778
Diffstat (limited to 'hugolib/hugo_sites.go')
-rw-r--r--hugolib/hugo_sites.go523
1 files changed, 346 insertions, 177 deletions
diff --git a/hugolib/hugo_sites.go b/hugolib/hugo_sites.go
index 9ce1c438e..af1e0fbac 100644
--- a/hugolib/hugo_sites.go
+++ b/hugolib/hugo_sites.go
@@ -1,4 +1,4 @@
-// Copyright 2018 The Hugo Authors. All rights reserved.
+// Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -14,14 +14,24 @@
package hugolib
import (
- "errors"
"io"
"path/filepath"
"sort"
"strings"
"sync"
+ "github.com/gohugoio/hugo/output"
+ "github.com/gohugoio/hugo/parser/metadecoders"
+
+ "github.com/gohugoio/hugo/hugofs"
+
+ "github.com/pkg/errors"
+
+ "github.com/gohugoio/hugo/source"
+
+ "github.com/bep/gitmap"
"github.com/gohugoio/hugo/config"
+ "github.com/spf13/afero"
"github.com/gohugoio/hugo/publisher"
@@ -30,8 +40,10 @@ import (
"github.com/gohugoio/hugo/deps"
"github.com/gohugoio/hugo/helpers"
"github.com/gohugoio/hugo/langs"
+ "github.com/gohugoio/hugo/lazy"
"github.com/gohugoio/hugo/i18n"
+ "github.com/gohugoio/hugo/resources/page"
"github.com/gohugoio/hugo/tpl"
"github.com/gohugoio/hugo/tpl/tplimpl"
)
@@ -48,17 +60,96 @@ type HugoSites struct {
// If this is running in the dev server.
running bool
+ // Render output formats for all sites.
+ renderFormats output.Formats
+
*deps.Deps
+ gitInfo *gitInfo
+
+ // As loaded from the /data dirs
+ data map[string]interface{}
+
// Keeps track of bundle directories and symlinks to enable partial rebuilding.
ContentChanges *contentChangeMap
- // If enabled, keeps a revision map for all content.
- gitInfo *gitInfo
+ init *hugoSitesInit
+
+ *fatalErrorHandler
+}
+
+type fatalErrorHandler struct {
+ mu sync.Mutex
+
+ h *HugoSites
+
+ err error
+
+ done bool
+ donec chan bool // will be closed when done
+}
+
+// FatalError error is used in some rare situations where it does not make sense to
+// continue processing, to abort as soon as possible and log the error.
+func (f *fatalErrorHandler) FatalError(err error) {
+ f.mu.Lock()
+ defer f.mu.Unlock()
+ if !f.done {
+ f.done = true
+ close(f.donec)
+ }
+ f.err = err
}
-func (h *HugoSites) siteInfos() SiteInfos {
- infos := make(SiteInfos, len(h.Sites))
+func (f *fatalErrorHandler) getErr() error {
+ f.mu.Lock()
+ defer f.mu.Unlock()
+ return f.err
+}
+
+func (f *fatalErrorHandler) Done() <-chan bool {
+ return f.donec
+}
+
+type hugoSitesInit struct {
+ // Loads the data from all of the /data folders.
+ data *lazy.Init
+
+ // Loads the Git info for all the pages if enabled.
+ gitInfo *lazy.Init
+
+ // Maps page translations.
+ translations *lazy.Init
+}
+
+func (h *hugoSitesInit) Reset() {
+ h.data.Reset()
+ h.gitInfo.Reset()
+ h.translations.Reset()
+}
+
+func (h *HugoSites) Data() map[string]interface{} {
+ if _, err := h.init.data.Do(); err != nil {
+ h.SendError(errors.Wrap(err, "failed to load data"))
+ return nil
+ }
+ return h.data
+}
+
+func (h *HugoSites) gitInfoForPage(p page.Page) (*gitmap.GitInfo, error) {
+ if _, err := h.init.gitInfo.Do(); err != nil {
+ return nil, err
+ }
+
+ if h.gitInfo == nil {
+ return nil, nil
+ }
+
+ return h.gitInfo.forPage(p), nil
+}
+
+func (h *HugoSites) siteInfos() page.Sites {
+ infos := make(page.Sites, len(h.Sites))
for i, site := range h.Sites {
infos[i] = &site.Info
}
@@ -106,7 +197,7 @@ func (h *HugoSites) IsMultihost() bool {
func (h *HugoSites) LanguageSet() map[string]bool {
set := make(map[string]bool)
for _, s := range h.Sites {
- set[s.Language.Lang] = true
+ set[s.language.Lang] = true
}
return set
}
@@ -129,14 +220,14 @@ func (h *HugoSites) PrintProcessingStats(w io.Writer) {
func (h *HugoSites) langSite() map[string]*Site {
m := make(map[string]*Site)
for _, s := range h.Sites {
- m[s.Language.Lang] = s
+ m[s.language.Lang] = s
}
return m
}
// GetContentPage finds a Page with content given the absolute filename.
// Returns nil if none found.
-func (h *HugoSites) GetContentPage(filename string) *Page {
+func (h *HugoSites) GetContentPage(filename string) page.Page {
for _, s := range h.Sites {
pos := s.rawAllPages.findPagePosByFilename(filename)
if pos == -1 {
@@ -178,10 +269,40 @@ func newHugoSites(cfg deps.DepsCfg, sites ...*Site) (*HugoSites, error) {
running: cfg.Running,
multilingual: langConfig,
multihost: cfg.Cfg.GetBool("multihost"),
- Sites: sites}
+ Sites: sites,
+ init: &hugoSitesInit{
+ data: lazy.New(),
+ gitInfo: lazy.New(),
+ translations: lazy.New(),
+ },
+ }
+
+ h.fatalErrorHandler = &fatalErrorHandler{
+ h: h,
+ donec: make(chan bool),
+ }
+
+ h.init.data.Add(func() (interface{}, error) {
+ err := h.loadData(h.PathSpec.BaseFs.Data.Fs)
+ return err, nil
+ })
+
+ h.init.translations.Add(func() (interface{}, error) {
+ if len(h.Sites) > 1 {
+ allTranslations := pagesToTranslationsMap(h.Sites)
+ assignTranslationsToPages(allTranslations, h.Sites)
+ }
+
+ return nil, nil
+ })
+
+ h.init.gitInfo.Add(func() (interface{}, error) {
+ err := h.loadGitInfo()
+ return nil, err
+ })
for _, s := range sites {
- s.owner = h
+ s.h = h
}
if err := applyDeps(cfg, sites...); err != nil {
@@ -197,14 +318,10 @@ func newHugoSites(cfg deps.DepsCfg, sites ...*Site) (*HugoSites, error) {
h.ContentChanges = contentChangeTracker
}
- if err := h.initGitInfo(); err != nil {
- return nil, err
- }
-
return h, nil
}
-func (h *HugoSites) initGitInfo() error {
+func (h *HugoSites) loadGitInfo() error {
if h.Cfg.GetBool("enableGitInfo") {
gi, err := newGitInfo(h.Cfg)
if err != nil {
@@ -247,16 +364,16 @@ func applyDeps(cfg deps.DepsCfg, sites ...*Site) error {
d.Site = &s.Info
- siteConfig, err := loadSiteConfig(s.Language)
+ siteConfig, err := loadSiteConfig(s.language)
if err != nil {
return err
}
- s.siteConfig = siteConfig
- s.siteRefLinker, err = newSiteRefLinker(s.Language, s)
+ s.siteConfigConfig = siteConfig
+ s.siteRefLinker, err = newSiteRefLinker(s.language, s)
return err
}
- cfg.Language = s.Language
+ cfg.Language = s.language
cfg.MediaTypes = s.mediaTypesConfig
cfg.OutputFormats = s.outputFormatsConfig
@@ -347,11 +464,23 @@ func createSitesFromConfig(cfg deps.DepsCfg) ([]*Site, error) {
return sites, nil
}
-// Reset resets the sites and template caches, making it ready for a full rebuild.
-func (h *HugoSites) reset() {
- for i, s := range h.Sites {
- h.Sites[i] = s.reset()
+// Reset resets the sites and template caches etc., making it ready for a full rebuild.
+func (h *HugoSites) reset(config *BuildCfg) {
+ if config.ResetState {
+ for i, s := range h.Sites {
+ h.Sites[i] = s.reset()
+ if r, ok := s.Fs.Destination.(hugofs.Reseter); ok {
+ r.Reset()
+ }
+ }
}
+
+ h.fatalErrorHandler = &fatalErrorHandler{
+ h: h,
+ donec: make(chan bool),
+ }
+
+ h.init.Reset()
}
// resetLogs resets the log counters etc. Used to do a new build on the same sites.
@@ -387,7 +516,7 @@ func (h *HugoSites) createSitesFromConfig(cfg config.Provider) error {
h.Sites = sites
for _, s := range sites {
- s.owner = h
+ s.h = h
}
if err := applyDeps(depsCfg, sites...); err != nil {
@@ -435,7 +564,10 @@ type BuildCfg struct {
// Note that a page does not have to have a content page / file.
// For regular builds, this will allways return true.
// TODO(bep) rename/work this.
-func (cfg *BuildCfg) shouldRender(p *Page) bool {
+func (cfg *BuildCfg) shouldRender(p *pageState) bool {
+ if !p.render {
+ return false
+ }
if p.forceRender {
p.forceRender = false
return true
@@ -445,15 +577,8 @@ func (cfg *BuildCfg) shouldRender(p *Page) bool {
return true
}
- if cfg.RecentlyVisited[p.RelPermalink()] {
- if cfg.PartialReRender {
- _ = p.initMainOutputFormat()
- }
- return true
- }
-
- if cfg.whatChanged != nil && p.File != nil {
- return cfg.whatChanged.files[p.File.Filename()]
+ if cfg.whatChanged != nil && p.File() != nil {
+ return cfg.whatChanged.files[p.File().Filename()]
}
return false
@@ -477,100 +602,85 @@ func (h *HugoSites) renderCrossSitesArtifacts() error {
return nil
}
- // TODO(bep) DRY
- sitemapDefault := parseSitemap(h.Cfg.GetStringMap("sitemap"))
-
s := h.Sites[0]
smLayouts := []string{"sitemapindex.xml", "_default/sitemapindex.xml", "_internal/_default/sitemapindex.xml"}
return s.renderAndWriteXML(&s.PathSpec.ProcessingStats.Sitemaps, "sitemapindex",
- sitemapDefault.Filename, h.toSiteInfos(), smLayouts...)
-}
-
-func (h *HugoSites) assignMissingTranslations() error {
-
- // This looks heavy, but it should be a small number of nodes by now.
- allPages := h.findAllPagesByKindNotIn(KindPage)
- for _, nodeType := range []string{KindHome, KindSection, KindTaxonomy, KindTaxonomyTerm} {
- nodes := h.findPagesByKindIn(nodeType, allPages)
-
- // Assign translations
- for _, t1 := range nodes {
- for _, t2 := range nodes {
- if t1.isNewTranslation(t2) {
- t1.translations = append(t1.translations, t2)
- }
- }
- }
- }
-
- // Now we can sort the translations.
- for _, p := range allPages {
- if len(p.translations) > 0 {
- pageBy(languagePageSort).Sort(p.translations)
- }
- }
- return nil
-
+ s.siteCfg.sitemap.Filename, h.toSiteInfos(), smLayouts...)
}
// createMissingPages creates home page, taxonomies etc. that isnt't created as an
// effect of having a content file.
func (h *HugoSites) createMissingPages() error {
- var newPages Pages
+ var newPages pageStatePages
for _, s := range h.Sites {
- if s.isEnabled(KindHome) {
+ if s.isEnabled(page.KindHome) {
// home pages
- home := s.findPagesByKind(KindHome)
- if len(home) > 1 {
+ homes := s.findWorkPagesByKind(page.KindHome)
+ if len(homes) > 1 {
panic("Too many homes")
}
- if len(home) == 0 {
- n := s.newHomePage()
- s.Pages = append(s.Pages, n)
- newPages = append(newPages, n)
+ var home *pageState
+ if len(homes) == 0 {
+ home = s.newPage(page.KindHome)
+ s.workAllPages = append(s.workAllPages, home)
+ newPages = append(newPages, home)
+ } else {
+ home = homes[0]
}
+
+ s.home = home
}
// Will create content-less root sections.
newSections := s.assembleSections()
- s.Pages = append(s.Pages, newSections...)
+ s.workAllPages = append(s.workAllPages, newSections...)
newPages = append(newPages, newSections...)
+ taxonomyTermEnabled := s.isEnabled(page.KindTaxonomyTerm)
+ taxonomyEnabled := s.isEnabled(page.KindTaxonomy)
+
// taxonomy list and terms pages
- taxonomies := s.Language.GetStringMapString("taxonomies")
+ taxonomies := s.Language().GetStringMapString("taxonomies")
if len(taxonomies) > 0 {
- taxonomyPages := s.findPagesByKind(KindTaxonomy)
- taxonomyTermsPages := s.findPagesByKind(KindTaxonomyTerm)
+ taxonomyPages := s.findWorkPagesByKind(page.KindTaxonomy)
+ taxonomyTermsPages := s.findWorkPagesByKind(page.KindTaxonomyTerm)
+
+ // Make them navigable from WeightedPage etc.
+ for _, p := range taxonomyPages {
+ p.getTaxonomyNodeInfo().TransferValues(p)
+ }
+ for _, p := range taxonomyTermsPages {
+ p.getTaxonomyNodeInfo().TransferValues(p)
+ }
+
for _, plural := range taxonomies {
- if s.isEnabled(KindTaxonomyTerm) {
+ if taxonomyTermEnabled {
foundTaxonomyTermsPage := false
for _, p := range taxonomyTermsPages {
- if p.sectionsPath() == plural {
+ if p.SectionsPath() == plural {
foundTaxonomyTermsPage = true
break
}
}
if !foundTaxonomyTermsPage {
- n := s.newTaxonomyTermsPage(plural)
- s.Pages = append(s.Pages, n)
+ n := s.newPage(page.KindTaxonomyTerm, plural)
+ n.getTaxonomyNodeInfo().TransferValues(n)
+ s.workAllPages = append(s.workAllPages, n)
newPages = append(newPages, n)
}
}
- if s.isEnabled(KindTaxonomy) {
- for key := range s.Taxonomies[plural] {
+ if taxonomyEnabled {
+ for termKey := range s.Taxonomies[plural] {
+
foundTaxonomyPage := false
- origKey := key
- if s.Info.preserveTaxonomyNames {
- key = s.PathSpec.MakePathSanitized(key)
- }
for _, p := range taxonomyPages {
- sectionsPath := p.sectionsPath()
+ sectionsPath := p.SectionsPath()
if !strings.HasPrefix(sectionsPath, plural) {
continue
@@ -579,20 +689,21 @@ func (h *HugoSites) createMissingPages() error {
singularKey := strings.TrimPrefix(sectionsPath, plural)
singularKey = strings.TrimPrefix(singularKey, "/")
- // Some people may have /authors/MaxMustermann etc. as paths.
- // p.sections contains the raw values from the file system.
- // See https://github.com/gohugoio/hugo/issues/4238
- singularKey = s.PathSpec.MakePathSanitized(singularKey)
-
- if singularKey == key {
+ if singularKey == termKey {
foundTaxonomyPage = true
break
}
}
if !foundTaxonomyPage {
- n := s.newTaxonomyPage(plural, origKey)
- s.Pages = append(s.Pages, n)
+ info := s.taxonomyNodes.Get(plural, termKey)
+ if info == nil {
+ panic("no info found")
+ }
+
+ n := s.newTaxonomyPage(info.term, info.plural, info.termKey)
+ info.TransferValues(n)
+ s.workAllPages = append(s.workAllPages, n)
newPages = append(newPages, n)
}
}
@@ -601,24 +712,6 @@ func (h *HugoSites) createMissingPages() error {
}
}
- if len(newPages) > 0 {
- // This resorting is unfortunate, but it also needs to be sorted
- // when sections are created.
- first := h.Sites[0]
-
- first.AllPages = append(first.AllPages, newPages...)
-
- first.AllPages.sort()
-
- for _, s := range h.Sites {
- s.Pages.sort()
- }
-
- for i := 1; i < len(h.Sites); i++ {
- h.Sites[i].AllPages = first.AllPages
- }
- }
-
return nil
}
@@ -628,61 +721,58 @@ func (h *HugoSites) removePageByFilename(filename string) {
}
}
-func (h *HugoSites) setupTranslations() {
+func (h *HugoSites) createPageCollections() error {
for _, s := range h.Sites {
for _, p := range s.rawAllPages {
- if p.Kind == kindUnknown {
- p.Kind = p.kindFromSections()
- }
-
- if !p.s.isEnabled(p.Kind) {
+ if !s.isEnabled(p.Kind()) {
continue
}
- shouldBuild := p.shouldBuild()
- s.updateBuildStats(p)
+ shouldBuild := s.shouldBuild(p)
+ s.buildStats.update(p)
if shouldBuild {
- if p.headless {
+ if p.m.headless {
s.headlessPages = append(s.headlessPages, p)
} else {
- s.Pages = append(s.Pages, p)
+ s.workAllPages = append(s.workAllPages, p)
}
}
}
}
- allPages := make(Pages, 0)
+ allPages := newLazyPagesFactory(func() page.Pages {
+ var pages page.Pages
+ for _, s := range h.Sites {
+ pages = append(pages, s.Pages()...)
+ }
- for _, s := range h.Sites {
- allPages = append(allPages, s.Pages...)
- }
+ page.SortByDefault(pages)
- allPages.sort()
+ return pages
+ })
- for _, s := range h.Sites {
- s.AllPages = allPages
- }
+ allRegularPages := newLazyPagesFactory(func() page.Pages {
+ return h.findPagesByKindIn(page.KindPage, allPages.get())
+ })
- // Pull over the collections from the master site
- for i := 1; i < len(h.Sites); i++ {
- h.Sites[i].Data = h.Sites[0].Data
+ for _, s := range h.Sites {
+ s.PageCollections.allPages = allPages
+ s.PageCollections.allRegularPages = allRegularPages
}
- if len(h.Sites) > 1 {
- allTranslations := pagesToTranslationsMap(allPages)
- assignTranslationsToPages(allTranslations, allPages)
- }
+ return nil
}
-func (s *Site) preparePagesForRender(start bool) error {
- for _, p := range s.Pages {
- if err := p.prepareForRender(start); err != nil {
+func (s *Site) preparePagesForRender(idx int) error {
+
+ for _, p := range s.workAllPages {
+ if err := p.initOutputFormat(idx); err != nil {
return err
}
}
for _, p := range s.headlessPages {
- if err := p.prepareForRender(start); err != nil {
+ if err := p.initOutputFormat(idx); err != nil {
return err
}
}
@@ -691,62 +781,141 @@ func (s *Site) preparePagesForRender(start bool) error {
}
// Pages returns all pages for all sites.
-func (h *HugoSites) Pages() Pages {
- return h.Sites[0].AllPages
+func (h *HugoSites) Pages() page.Pages {
+ return h.Sites[0].AllPages()
}
-func handleShortcodes(p *PageWithoutContent, rawContentCopy []byte) ([]byte, error) {
- if p.shortcodeState != nil && p.shortcodeState.contentShortcodes.Len() > 0 {
- p.s.Log.DEBUG.Printf("Replace %d shortcodes in %q", p.shortcodeState.contentShortcodes.Len(), p.BaseFileName())
- err := p.shortcodeState.executeShortcodesForDelta(p)
+func (h *HugoSites) loadData(fs afero.Fs) (err error) {
+ spec := source.NewSourceSpec(h.PathSpec, fs)
+ fileSystem := spec.NewFilesystem("")
+ h.data = make(map[string]interface{})
+ for _, r := range fileSystem.Files() {
+ if err := h.handleDataFile(r); err != nil {
+ return err
+ }
+ }
- if err != nil {
+ return
+}
+
+func (h *HugoSites) handleDataFile(r source.ReadableFile) error {
+ var current map[string]interface{}
- return rawContentCopy, err
+ f, err := r.Open()
+ if err != nil {
+ return errors.Wrapf(err, "Failed to open data file %q:", r.LogicalName())
+ }
+ defer f.Close()
+
+ // Crawl in data tree to insert data
+ current = h.data
+ keyParts := strings.Split(r.Dir(), helpers.FilePathSeparator)
+ // The first path element is the virtual folder (typically theme name), which is
+ // not part of the key.
+ if len(keyParts) > 1 {
+ for _, key := range keyParts[1:] {
+ if key != "" {
+ if _, ok := current[key]; !ok {
+ current[key] = make(map[string]interface{})
+ }
+ current = current[key].(map[string]interface{})
+ }
}
+ }
- rawContentCopy, err = replaceShortcodeTokens(rawContentCopy, shortcodePlaceholderPrefix, p.shortcodeState.renderedShortcodes)
+ data, err := h.readData(r)
+ if err != nil {
+ return h.errWithFileContext(err, r)
+ }
- if err != nil {
- p.s.Log.FATAL.Printf("Failed to replace shortcode tokens in %s:\n%s", p.BaseFileName(), err.Error())
+ if data == nil {
+ return nil
+ }
+
+ // filepath.Walk walks the files in lexical order, '/' comes before '.'
+ // this warning could happen if
+ // 1. A theme uses the same key; the main data folder wins
+ // 2. A sub folder uses the same key: the sub folder wins
+ higherPrecedentData := current[r.BaseFileName()]
+
+ switch data.(type) {
+ case nil:
+ // hear the crickets?
+
+ case map[string]interface{}:
+
+ switch higherPrecedentData.(type) {
+ case nil:
+ current[r.BaseFileName()] = data
+ case map[string]interface{}:
+ // merge maps: insert entries from data for keys that
+ // don't already exist in higherPrecedentData
+ higherPrecedentMap := higherPrecedentData.(map[string]interface{})
+ for key, value := range data.(map[string]interface{}) {
+ if _, exists := higherPrecedentMap[key]; exists {
+ h.Log.WARN.Printf("Data for key '%s' in path '%s' is overridden by higher precedence data already in the data tree", key, r.Path())
+ } else {
+ higherPrecedentMap[key] = value
+ }
+ }
+ default:
+ // can't merge: higherPrecedentData is not a map
+ h.Log.WARN.Printf("The %T data from '%s' overridden by "+
+ "higher precedence %T data already in the data tree", data, r.Path(), higherPrecedentData)
+ }
+
+ case []interface{}:
+ if higherPrecedentData == nil {
+ current[r.BaseFileName()] = data
+ } else {
+ // we don't merge array data
+ h.Log.WARN.Printf("The %T data from '%s' overridden by "+
+ "higher precedence %T data already in the data tree", data, r.Path(), higherPrecedentData)
}
+
+ default:
+ h.Log.ERROR.Printf("unexpected data type %T in file %s", data, r.LogicalName())
}
- return rawContentCopy, nil
+ return nil
}
-func (s *Site) updateBuildStats(page *Page) {
- if page.IsDraft() {
- s.draftCount++
+func (h *HugoSites) errWithFileContext(err error, f source.File) error {
+ rfi, ok := f.FileInfo().(hugofs.RealFilenameInfo)
+ if !ok {
+ return err
}
- if page.IsFuture() {
- s.futureCount++
- }
+ realFilename := rfi.RealFilename()
- if page.IsExpired() {
- s.expiredCount++
- }
-}
+ err, _ = herrors.WithFileContextForFile(
+ err,
+ realFilename,
+ realFilename,
+ h.SourceSpec.Fs.Source,
+ herrors.SimpleLineMatcher)
-func (h *HugoSites) findPagesByKindNotIn(kind string, inPages Pages) Pages {
- return h.Sites[0].findPagesByKindNotIn(kind, inPages)
+ return err
}
-func (h *HugoSites) findPagesByKindIn(kind string, inPages Pages) Pages {
- return h.Sites[0].findPagesByKindIn(kind, inPages)
-}
+func (h *HugoSites) readData(f source.ReadableFile) (interface{}, error) {
+ file, err := f.Open()
+ if err != nil {
+ return nil, errors.Wrap(err, "readData: failed to open data file")
+ }
+ defer file.Close()
+ content := helpers.ReaderToBytes(file)
-func (h *HugoSites) findAllPagesByKind(kind string) Pages {
- return h.findPagesByKindIn(kind, h.Sites[0].AllPages)
+ format := metadecoders.FormatFromString(f.Extension())
+ return metadecoders.Default.Unmarshal(content, format)
}
-func (h *HugoSites) findAllPagesByKindNotIn(kind string) Pages {
- return h.findPagesByKindNotIn(kind, h.Sites[0].AllPages)
+func (h *HugoSites) findPagesByKindIn(kind string, inPages page.Pages) page.Pages {
+ return h.Sites[0].findPagesByKindIn(kind, inPages)
}
-func (h *HugoSites) findPagesByShortcode(shortcode string) Pages {
- var pages Pages
+func (h *HugoSites) findPagesByShortcode(shortcode string) page.Pages {
+ var pages page.Pages
for _, s := range h.Sites {
pages = append(pages, s.findPagesByShortcode(shortcode)...)
}