summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--hugolib/hugo_sites.go51
-rw-r--r--hugolib/hugo_sites_test.go117
-rw-r--r--hugolib/menu_test.go16
-rw-r--r--hugolib/node.go26
-rw-r--r--hugolib/pagination_test.go22
-rw-r--r--hugolib/site.go257
6 files changed, 369 insertions, 120 deletions
diff --git a/hugolib/hugo_sites.go b/hugolib/hugo_sites.go
index 209ee4250..8e97e0783 100644
--- a/hugolib/hugo_sites.go
+++ b/hugolib/hugo_sites.go
@@ -39,6 +39,10 @@ type HugoSites struct {
runMode runmode
multilingual *Multilingual
+
+ // Maps internalID to a set of nodes.
+ nodeMap map[string]Nodes
+ nodeMapMu sync.Mutex
}
// NewHugoSites creates a new collection of sites given the input sites, building
@@ -50,7 +54,7 @@ func newHugoSites(sites ...*Site) (*HugoSites, error) {
return nil, err
}
- h := &HugoSites{multilingual: langConfig, Sites: sites}
+ h := &HugoSites{multilingual: langConfig, Sites: sites, nodeMap: make(map[string]Nodes)}
for _, s := range sites {
s.owner = h
@@ -92,14 +96,39 @@ func createSitesFromConfig() ([]*Site, error) {
return sites, nil
}
+func (h *HugoSites) addNode(nodeID string, node *Node) {
+ h.nodeMapMu.Lock()
+
+ if nodes, ok := h.nodeMap[nodeID]; ok {
+ h.nodeMap[nodeID] = append(nodes, node)
+ } else {
+ h.nodeMap[nodeID] = Nodes{node}
+ }
+ h.nodeMapMu.Unlock()
+}
+
+func (h *HugoSites) getNodes(nodeID string) Nodes {
+ // At this point it is read only, so no need to lock.
+ if nodeID != "" {
+ if nodes, ok := h.nodeMap[nodeID]; ok {
+ return nodes
+ }
+ }
+ // Paginator pages will not have related nodes.
+ return Nodes{}
+}
+
// Reset resets the sites, making it ready for a full rebuild.
func (h *HugoSites) reset() {
+ h.nodeMap = make(map[string]Nodes)
for i, s := range h.Sites {
h.Sites[i] = s.reset()
}
}
func (h *HugoSites) reCreateFromConfig() error {
+ h.nodeMap = make(map[string]Nodes)
+
sites, err := createSitesFromConfig()
if err != nil {
@@ -236,6 +265,7 @@ func (h *HugoSites) Rebuild(config BuildCfg, events ...fsnotify.Event) error {
firstSite := h.Sites[0]
+ h.nodeMap = make(map[string]Nodes)
for _, s := range h.Sites {
s.resetBuildState()
}
@@ -359,6 +389,23 @@ func (h *HugoSites) setupTranslations(master *Site) {
// Shortcode handling is the main task in here.
// TODO(bep) We need to look at the whole handler-chain construct witht he below in mind.
func (h *HugoSites) preRender() error {
+
+ for _, s := range h.Sites {
+ // Run "render prepare"
+ if err := s.renderHomePage(true); err != nil {
+ return err
+ }
+ if err := s.renderTaxonomiesLists(true); err != nil {
+ return err
+ }
+ if err := s.renderListsOfTaxonomyTerms(true); err != nil {
+ return err
+ }
+ if err := s.renderSectionLists(true); err != nil {
+ return err
+ }
+ }
+
pageChan := make(chan *Page)
wg := &sync.WaitGroup{}
@@ -418,7 +465,7 @@ func (h *HugoSites) preRender() error {
}
// Pages returns all pages for all sites.
-func (h HugoSites) Pages() Pages {
+func (h *HugoSites) Pages() Pages {
return h.Sites[0].AllPages
}
diff --git a/hugolib/hugo_sites_test.go b/hugolib/hugo_sites_test.go
index 6ab60d9ec..26a294d5e 100644
--- a/hugolib/hugo_sites_test.go
+++ b/hugolib/hugo_sites_test.go
@@ -59,7 +59,7 @@ func doTestMultiSitesMainLangInRoot(t *testing.T, defaultInSubDir bool) {
t.Fatalf("Failed to build sites: %s", err)
}
- require.Len(t, sites.Sites, 2)
+ require.Len(t, sites.Sites, 4)
enSite := sites.Sites[0]
frSite := sites.Sites[1]
@@ -177,8 +177,8 @@ func TestMultiSitesBuild(t *testing.T) {
if len(enSite.Pages) != 3 {
t.Fatal("Expected 3 english pages")
}
- assert.Len(t, enSite.Source.Files(), 11, "should have 11 source files")
- assert.Len(t, enSite.AllPages, 6, "should have 6 total pages (including translations)")
+ assert.Len(t, enSite.Source.Files(), 13, "should have 13 source files")
+ assert.Len(t, enSite.AllPages, 8, "should have 8 total pages (including translations)")
doc1en := enSite.Pages[0]
permalink, err := doc1en.Permalink()
@@ -230,7 +230,7 @@ func TestMultiSitesBuild(t *testing.T) {
assert.Equal(t, "fr", frSite.Language.Lang)
assert.Len(t, frSite.Pages, 3, "should have 3 pages")
- assert.Len(t, frSite.AllPages, 6, "should have 6 total pages (including translations)")
+ assert.Len(t, frSite.AllPages, 8, "should have 8 total pages (including translations)")
for _, frenchPage := range frSite.Pages {
assert.Equal(t, "fr", frenchPage.Lang())
@@ -240,6 +240,36 @@ func TestMultiSitesBuild(t *testing.T) {
languageRedirect := readDestination(t, "public/index.html")
require.True(t, strings.Contains(languageRedirect, "0; url=http://example.com/blog/fr"), languageRedirect)
+ // Check node translations
+ homeEn := enSite.getNode("home-0")
+ require.NotNil(t, homeEn)
+ require.Len(t, homeEn.Translations(), 3)
+ require.Equal(t, "fr", homeEn.Translations()[0].Lang())
+ require.Equal(t, "nn", homeEn.Translations()[1].Lang())
+ require.Equal(t, "Nynorsk", homeEn.Translations()[1].Title)
+ require.Equal(t, "nb", homeEn.Translations()[2].Lang())
+ require.Equal(t, "Bokmål", homeEn.Translations()[2].Title)
+
+ sectFr := frSite.getNode("sect-sect-0")
+ require.NotNil(t, sectFr)
+
+ require.Equal(t, "fr", sectFr.Lang())
+ require.Len(t, sectFr.Translations(), 1)
+ require.Equal(t, "en", sectFr.Translations()[0].Lang())
+ require.Equal(t, "Sects", sectFr.Translations()[0].Title)
+
+ nnSite := sites.Sites[2]
+ require.Equal(t, "nn", nnSite.Language.Lang)
+ taxNn := nnSite.getNode("taxlist-lag-0")
+ require.NotNil(t, taxNn)
+ require.Len(t, taxNn.Translations(), 1)
+ require.Equal(t, "nb", taxNn.Translations()[0].Lang())
+
+ taxTermNn := nnSite.getNode("tax-lag-sogndal-0")
+ require.NotNil(t, taxTermNn)
+ require.Len(t, taxTermNn.Translations(), 1)
+ require.Equal(t, "nb", taxTermNn.Translations()[0].Lang())
+
// Check sitemap(s)
sitemapIndex := readDestination(t, "public/sitemap.xml")
require.True(t, strings.Contains(sitemapIndex, "<loc>http:/example.com/blog/en/sitemap.xml</loc>"), sitemapIndex)
@@ -338,7 +368,7 @@ func TestMultiSitesRebuild(t *testing.T) {
},
func(t *testing.T) {
assert.Len(t, enSite.Pages, 4)
- assert.Len(t, enSite.AllPages, 8)
+ assert.Len(t, enSite.AllPages, 10)
assert.Len(t, frSite.Pages, 4)
assert.Equal(t, "new_fr_1", frSite.Pages[3].Title)
assert.Equal(t, "new_en_2", enSite.Pages[0].Title)
@@ -391,7 +421,7 @@ func TestMultiSitesRebuild(t *testing.T) {
[]fsnotify.Event{{Name: "layouts/_default/single.html", Op: fsnotify.Write}},
func(t *testing.T) {
assert.Len(t, enSite.Pages, 4)
- assert.Len(t, enSite.AllPages, 8)
+ assert.Len(t, enSite.AllPages, 10)
assert.Len(t, frSite.Pages, 4)
doc1 := readDestination(t, "public/en/sect/doc1-slug/index.html")
assert.True(t, strings.Contains(doc1, "Template Changed"), doc1)
@@ -408,12 +438,18 @@ func TestMultiSitesRebuild(t *testing.T) {
[]fsnotify.Event{{Name: "i18n/fr.yaml", Op: fsnotify.Write}},
func(t *testing.T) {
assert.Len(t, enSite.Pages, 4)
- assert.Len(t, enSite.AllPages, 8)
+ assert.Len(t, enSite.AllPages, 10)
assert.Len(t, frSite.Pages, 4)
docEn := readDestination(t, "public/en/sect/doc1-slug/index.html")
assert.True(t, strings.Contains(docEn, "Hello"), "No Hello")
docFr := readDestination(t, "public/fr/sect/doc1/index.html")
assert.True(t, strings.Contains(docFr, "Salut"), "No Salut")
+
+ homeEn := enSite.getNode("home-0")
+ require.NotNil(t, homeEn)
+ require.Len(t, homeEn.Translations(), 3)
+ require.Equal(t, "fr", homeEn.Translations()[0].Lang())
+
},
},
} {
@@ -469,12 +505,12 @@ func TestAddNewLanguage(t *testing.T) {
newConfig := multiSiteTomlConfig + `
-[Languages.no]
+[Languages.sv]
weight = 15
-title = "Norsk"
+title = "Svenska"
`
- writeNewContentFile(t, "Norwegian Contentfile", "2016-01-01", "content/sect/doc1.no.md", 10)
+ writeNewContentFile(t, "Swedish Contentfile", "2016-01-01", "content/sect/doc1.sv.md", 10)
// replace the config
writeSource(t, "multilangconfig.toml", newConfig)
@@ -486,27 +522,32 @@ title = "Norsk"
t.Fatalf("Failed to rebuild sites: %s", err)
}
- require.Len(t, sites.Sites, 3, fmt.Sprintf("Len %d", len(sites.Sites)))
+ require.Len(t, sites.Sites, 5, fmt.Sprintf("Len %d", len(sites.Sites)))
- // The Norwegian site should be put in the middle (language weight=15)
+ // The Swedish site should be put in the middle (language weight=15)
enSite := sites.Sites[0]
- noSite := sites.Sites[1]
+ svSite := sites.Sites[1]
frSite := sites.Sites[2]
require.True(t, enSite.Language.Lang == "en", enSite.Language.Lang)
- require.True(t, noSite.Language.Lang == "no", noSite.Language.Lang)
+ require.True(t, svSite.Language.Lang == "sv", svSite.Language.Lang)
require.True(t, frSite.Language.Lang == "fr", frSite.Language.Lang)
+ homeEn := enSite.getNode("home-0")
+ require.NotNil(t, homeEn)
+ require.Len(t, homeEn.Translations(), 4)
+ require.Equal(t, "sv", homeEn.Translations()[0].Lang())
+
require.Len(t, enSite.Pages, 3)
require.Len(t, frSite.Pages, 3)
- // Veriy Norwegian site
- require.Len(t, noSite.Pages, 1)
- noPage := noSite.Pages[0]
- require.Equal(t, "Norwegian Contentfile", noPage.Title)
- require.Equal(t, "no", noPage.Lang())
- require.Len(t, noPage.Translations(), 2)
- require.Len(t, noPage.AllTranslations(), 3)
- require.Equal(t, "en", noPage.Translations()[0].Lang())
+ // Veriy Swedish site
+ require.Len(t, svSite.Pages, 1)
+ svPage := svSite.Pages[0]
+ require.Equal(t, "Swedish Contentfile", svPage.Title)
+ require.Equal(t, "sv", svPage.Lang())
+ require.Len(t, svPage.Translations(), 2)
+ require.Len(t, svPage.AllTranslations(), 3)
+ require.Equal(t, "en", svPage.Translations()[0].Lang())
//noFile := readDestination(t, "/public/no/doc1/index.html")
//require.True(t, strings.Contains("foo", noFile), noFile)
@@ -543,6 +584,18 @@ weight = 20
title = "Français"
[Languages.fr.Taxonomies]
plaque = "plaques"
+
+[Languages.nn]
+weight = 30
+title = "Nynorsk"
+[Languages.nn.Taxonomies]
+lag = "lag"
+
+[Languages.nb]
+weight = 40
+title = "Bokmål"
+[Languages.nb.Taxonomies]
+lag = "lag"
`
func createMultiTestSites(t *testing.T, tomlConfig string) *HugoSites {
@@ -687,6 +740,24 @@ draft: true
---
# Draft
`)},
+ {filepath.FromSlash("stats/tax.nn.md"), []byte(`---
+title: Tax NN
+publishdate: "2000-01-06"
+weight: 1001
+lag:
+- Sogndal
+---
+# Tax NN
+`)},
+ {filepath.FromSlash("stats/tax.nb.md"), []byte(`---
+title: Tax NB
+publishdate: "2000-01-06"
+weight: 1002
+lag:
+- Sogndal
+---
+# Tax NB
+`)},
}
writeSource(t, "multilangconfig.toml", tomlConfig)
@@ -713,7 +784,7 @@ draft: true
t.Fatalf("Failed to create sites: %s", err)
}
- if len(sites.Sites) != 2 {
+ if len(sites.Sites) != 4 {
t.Fatalf("Got %d sites", len(sites.Sites))
}
diff --git a/hugolib/menu_test.go b/hugolib/menu_test.go
index 4eafba47e..5ca8af8e5 100644
--- a/hugolib/menu_test.go
+++ b/hugolib/menu_test.go
@@ -27,6 +27,7 @@ import (
"github.com/spf13/hugo/source"
"github.com/spf13/viper"
"github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
)
const (
@@ -414,9 +415,13 @@ func doTestSectionPagesMenu(canonifyUrls bool, t *testing.T) {
fishySectionPages := s.Sections["fish-and-chips"]
assert.Equal(t, 1, len(fishySectionPages))
- nodeFirst := s.newSectionListNode("First", "first", firstSectionPages)
- nodeSecond := s.newSectionListNode("Second Section", "second-section", secondSectionPages)
- nodeFishy := s.newSectionListNode("Fish and Chips", "fish-and-chips", fishySectionPages)
+ nodeFirst := s.getNode("sect-first-0")
+ require.NotNil(t, nodeFirst)
+ nodeSecond := s.getNode("sect-second-section-0")
+ require.NotNil(t, nodeSecond)
+ nodeFishy := s.getNode("sect-Fish and Chips-0")
+ require.NotNil(t, nodeFishy)
+
firstSectionMenuEntry := findTestMenuEntryByID(s, "spm", "first")
secondSectionMenuEntry := findTestMenuEntryByID(s, "spm", "second-section")
fishySectionMenuEntry := findTestMenuEntryByID(s, "spm", "Fish and Chips")
@@ -472,7 +477,7 @@ func TestTaxonomyNodeMenu(t *testing.T) {
&MenuEntry{Name: "Somewhere else", URL: "/somewhereelse"}, false, false},
} {
- n, _ := s.newTaxonomyNode(this.taxInfo)
+ n, _ := s.newTaxonomyNode(true, this.taxInfo, i)
isMenuCurrent := n.IsMenuCurrent(this.menu, this.menuItem)
hasMenuCurrent := n.HasMenuCurrent(this.menu, this.menuItem)
@@ -544,7 +549,8 @@ func TestHomeNodeMenu(t *testing.T) {
s := setupMenuTests(t, menuPageSources)
- home := s.newHomeNode()
+ home := s.getNode("home-0")
+
homeMenuEntry := &MenuEntry{Name: home.Title, URL: home.URL()}
for i, this := range []struct {
diff --git a/hugolib/node.go b/hugolib/node.go
index 780190ec6..fed759eeb 100644
--- a/hugolib/node.go
+++ b/hugolib/node.go
@@ -17,7 +17,7 @@ import (
"html/template"
"path"
"path/filepath"
- "sort"
+ //"sort"
"strings"
"sync"
"time"
@@ -30,6 +30,11 @@ import (
)
type Node struct {
+ // a natural key that should be unique for this site
+ // for the home page this will typically be "home", but it can anything
+ // as long as it is the same for repeated builds.
+ nodeID string
+
RSSLink template.HTML
Site *SiteInfo `json:"-"`
// layout string
@@ -283,23 +288,8 @@ func (n *Node) IsTranslated() bool {
func (n *Node) initTranslations() {
n.translationsInit.Do(func() {
- if n.translations != nil {
- return
- }
- n.translations = make(Nodes, 0)
- for _, l := range n.Site.Languages {
- if l == n.language {
- n.translations = append(n.translations, n)
- continue
- }
-
- translation := *n
- translation.language = l
- translation.translations = n.translations
- n.translations = append(n.translations, &translation)
- }
-
- sort.Sort(n.translations)
+ n.translations = n.Site.owner.getNodes(n.nodeID)
+ //sort.Sort(n.translations)
})
}
diff --git a/hugolib/pagination_test.go b/hugolib/pagination_test.go
index df2094d63..6f85e91d3 100644
--- a/hugolib/pagination_test.go
+++ b/hugolib/pagination_test.go
@@ -224,8 +224,8 @@ func doTestPaginator(t *testing.T, useViper bool) {
}
pages := createTestPages(12)
s := newSiteDefaultLang()
- n1 := s.newHomeNode()
- n2 := s.newHomeNode()
+ n1 := s.newHomeNode(true, 0)
+ n2 := s.newHomeNode(true, 1)
n1.Data["Pages"] = pages
var paginator1 *Pager
@@ -261,7 +261,7 @@ func TestPaginatorWithNegativePaginate(t *testing.T) {
viper.Set("paginate", -1)
s := newSiteDefaultLang()
- _, err := s.newHomeNode().Paginator()
+ _, err := s.newHomeNode(true, 0).Paginator()
assert.NotNil(t, err)
}
@@ -283,8 +283,8 @@ func doTestPaginate(t *testing.T, useViper bool) {
pages := createTestPages(6)
s := newSiteDefaultLang()
- n1 := s.newHomeNode()
- n2 := s.newHomeNode()
+ n1 := s.newHomeNode(true, 0)
+ n2 := s.newHomeNode(true, 1)
var paginator1, paginator2 *Pager
var err error
@@ -316,7 +316,7 @@ func doTestPaginate(t *testing.T, useViper bool) {
func TestInvalidOptions(t *testing.T) {
s := newSiteDefaultLang()
- n1 := s.newHomeNode()
+ n1 := s.newHomeNode(true, 0)
_, err := n1.Paginate(createTestPages(1), 1, 2)
assert.NotNil(t, err)
_, err = n1.Paginator(1, 2)
@@ -330,7 +330,7 @@ func TestPaginateWithNegativePaginate(t *testing.T) {
viper.Set("paginate", -1)
s := newSiteDefaultLang()
- _, err := s.newHomeNode().Paginate(createTestPages(2))
+ _, err := s.newHomeNode(true, 0).Paginate(createTestPages(2))
assert.NotNil(t, err)
}
@@ -352,8 +352,8 @@ func TestPaginatorFollowedByPaginateShouldFail(t *testing.T) {
viper.Set("paginate", 10)
s := newSiteDefaultLang()
- n1 := s.newHomeNode()
- n2 := s.newHomeNode()
+ n1 := s.newHomeNode(true, 0)
+ n2 := s.newHomeNode(true, 1)
_, err := n1.Paginator()
assert.Nil(t, err)
@@ -370,8 +370,8 @@ func TestPaginateFollowedByDifferentPaginateShouldFail(t *testing.T) {
viper.Set("paginate", 10)
s := newSiteDefaultLang()
- n1 := s.newHomeNode()
- n2 := s.newHomeNode()
+ n1 := s.newHomeNode(true, 0)
+ n2 := s.newHomeNode(true, 1)
p1 := createTestPages(2)
p2 := createTestPages(10)
diff --git a/hugolib/site.go b/hugolib/site.go
index edcccfe28..8403a6af0 100644
--- a/hugolib/site.go
+++ b/hugolib/site.go
@@ -58,6 +58,15 @@ var (
distinctFeedbackLogger = helpers.NewDistinctFeedbackLogger()
)
+type nodeCache struct {
+ m map[string]*Node
+ sync.RWMutex
+}
+
+func (c *nodeCache) reset() {
+ c.m = make(map[string]*Node)
+}
+
// Site contains all the information relevant for constructing a static
// site. The basic flow of information is as follows:
//
@@ -76,7 +85,12 @@ var (
//
// 5. The entire collection of files is written to disk.
type Site struct {
- owner *HugoSites
+ owner *HugoSites
+
+ // Used internally to discover duplicates.
+ nodeCache *nodeCache
+ nodeCacheInit sync.Once
+
Pages Pages
AllPages Pages
rawAllPages Pages
@@ -167,6 +181,7 @@ type SiteInfo struct {
paginationPageCount uint64
Data *map[string]interface{}
+ owner *HugoSites
multilingual *Multilingual
Language *helpers.Language
LanguagePrefix string
@@ -773,13 +788,15 @@ func (s *Site) render() (err error) {
return
}
s.timerStep("render and write aliases")
- if err = s.renderTaxonomiesLists(); err != nil {
+ if err = s.renderTaxonomiesLists(false); err != nil {
return
}
s.timerStep("render and write taxonomies")
- s.renderListsOfTaxonomyTerms()
+ if err = s.renderListsOfTaxonomyTerms(false); err != nil {
+ return
+ }
s.timerStep("render & write taxonomy lists")
- if err = s.renderSectionLists(); err != nil {
+ if err = s.renderSectionLists(false); err != nil {
return
}
s.timerStep("render and write lists")
@@ -787,7 +804,7 @@ func (s *Site) render() (err error) {
return
}
s.timerStep("render and write pages")
- if err = s.renderHomePage(); err != nil {
+ if err = s.renderHomePage(false); err != nil {
return
}
s.timerStep("render and write homepage")
@@ -904,6 +921,7 @@ func (s *Site) initializeSiteInfo() {
Params: params,
Permalinks: permalinks,
Data: &s.Data,
+ owner: s.owner,
}
}
@@ -1372,6 +1390,8 @@ func (s *Site) assembleTaxonomies() {
// Prepare site for a new full build.
func (s *Site) resetBuildState() {
+ s.nodeCache.reset()
+
s.Pages = make(Pages, 0)
s.AllPages = make(Pages, 0)
@@ -1557,7 +1577,7 @@ type taxRenderInfo struct {
// renderTaxonomiesLists renders the listing pages based on the meta data
// each unique term within a taxonomy will have a page created
-func (s *Site) renderTaxonomiesLists() error {
+func (s *Site) renderTaxonomiesLists(prepare bool) error {
wg := &sync.WaitGroup{}
taxes := make(chan taxRenderInfo)
@@ -1567,7 +1587,7 @@ func (s *Site) renderTaxonomiesLists() error {
for i := 0; i < procs*4; i++ {
wg.Add(1)
- go taxonomyRenderer(s, taxes, results, wg)
+ go taxonomyRenderer(prepare, s, taxes, results, wg)
}
errs := make(chan error)
@@ -1593,9 +1613,19 @@ func (s *Site) renderTaxonomiesLists() error {
return nil
}
-func (s *Site) newTaxonomyNode(t taxRenderInfo) (*Node, string) {
+func (s *Site) newTaxonomyNode(prepare bool, t taxRenderInfo, counter int) (*Node, string) {
key := t.key
- n := s.newNode()
+ n := s.nodeLookup(fmt.Sprintf("tax-%s-%s", t.plural, key), counter, prepare)
+
+ if s.Info.preserveTaxonomyNames {
+ key = helpers.MakePathSanitized(key)
+ }
+ base := t.plural + "/" + key
+
+ if !prepare {
+ return n, base
+ }
+
if s.Info.preserveTaxonomyNames {
key = helpers.MakePathSanitized(key)
// keep as is in the title
@@ -1603,7 +1633,6 @@ func (s *Site) newTaxonomyNode(t taxRenderInfo) (*Node, string) {
} else {
n.Title = strings.Replace(strings.Title(t.key), "-", " ", -1)
}
- base := t.plural + "/" + key
s.setURLs(n, base)
if len(t.pages) > 0 {
n.Date = t.pages[0].Page.Date
@@ -1616,19 +1645,30 @@ func (s *Site) newTaxonomyNode(t taxRenderInfo) (*Node, string) {
return n, base
}
-func taxonomyRenderer(s *Site, taxes <-chan taxRenderInfo, results chan<- error, wg *sync.WaitGroup) {
+func taxonomyRenderer(prepare bool, s *Site, taxes <-chan taxRenderInfo, results chan<- error, wg *sync.WaitGroup) {
defer wg.Done()
var n *Node
for t := range taxes {
- var base string
- layouts := s.appendThemeTemplates(
- []string{"taxonomy/" + t.singular + ".html", "indexes/" + t.singular + ".html", "_default/taxonomy.html", "_default/list.html"})
+ var (
+ base string
+ baseWithLanguagePrefix string
+ paginatePath string
+ layouts []string
+ )
- n, base = s.newTaxonomyNode(t)
- baseWithLanguagePrefix := n.addLangPathPrefix(base)
+ n, base = s.newTaxonomyNode(prepare, t, 0)
+
+ if prepare {
+ continue
+ }
+
+ baseWithLanguagePrefix = n.addLangPathPrefix(base)
+
+ layouts = s.appendThemeTemplates(
+ []string{"taxonomy/" + t.singular + ".html", "indexes/" + t.singular + ".html", "_default/taxonomy.html", "_default/list.html"})
dest := base
if viper.GetBool("UglyURLs") {
@@ -1644,7 +1684,7 @@ func taxonomyRenderer(s *Site, taxes <-chan taxRenderInfo, results chan<- error,
if n.paginator != nil {
- paginatePath := viper.GetString("paginatePath")
+ paginatePath = viper.GetString("paginatePath")
// write alias for page 1
s.writeDestAlias(helpers.PaginateAliasPath(baseWithLanguagePrefix, 1), n.Permalink())
@@ -1657,13 +1697,15 @@ func taxonomyRenderer(s *Site, taxes <-chan taxRenderInfo, results chan<- error,
continue
}
- taxonomyPagerNode, _ := s.newTaxonomyNode(t)
+ taxonomyPagerNode, _ := s.newTaxonomyNode(true, t, i)
+
taxonomyPagerNode.paginator = pager
if pager.TotalPages() > 0 {
first, _ := pager.page(0)
taxonomyPagerNode.Date = first.Date
taxonomyPagerNode.Lastmod = first.Lastmod
}
+
pageNumber := i + 1
htmlBase := fmt.Sprintf("/%s/%s/%d", baseWithLanguagePrefix, paginatePath, pageNumber)
if err := s.renderAndWritePage(fmt.Sprintf("taxonomy %s", t.singular), htmlBase, taxonomyPagerNode, layouts...); err != nil {
@@ -1673,13 +1715,21 @@ func taxonomyRenderer(s *Site, taxes <-chan taxRenderInfo, results chan<- error,
}
}
+ if prepare {
+ continue
+ }
+
if !viper.GetBool("DisableRSS") {
// XML Feed
+ c := *n
+ rssNode := &c
+ rssNode.nodeID = ""
rssuri := viper.GetString("RSSUri")
- s.setURLs(n, base+"/"+rssuri)
+ s.setURLs(rssNode, base+"/"+rssuri)
+
rssLayouts := []string{"taxonomy/" + t.singular + ".rss.xml", "_default/rss.xml", "rss.xml", "_internal/_default/rss.xml"}
- if err := s.renderAndWriteXML("taxonomy "+t.singular+" rss", baseWithLanguagePrefix+"/"+rssuri, n, s.appendThemeTemplates(rssLayouts)...); err != nil {
+ if err := s.renderAndWriteXML("taxonomy "+t.singular+" rss", baseWithLanguagePrefix+"/"+rssuri, rssNode, s.appendThemeTemplates(rssLayouts)...); err != nil {
results <- err
continue
}
@@ -1688,18 +1738,24 @@ func taxonomyRenderer(s *Site, taxes <-chan taxRenderInfo, results chan<- error,
}
// renderListsOfTaxonomyTerms renders a page per taxonomy that lists the terms for that taxonomy
-func (s *Site) renderListsOfTaxonomyTerms() (err error) {
+func (s *Site) renderListsOfTaxonomyTerms(prepare bool) (err error) {
taxonomies := s.Language.GetStringMapString("Taxonomies")
for singular, plural := range taxonomies {
- n := s.newNode()
- n.Title = strings.Title(plural)
- s.setURLs(n, plural)
- n.Data["Singular"] = singular
- n.Data["Plural"] = plural
- n.Data["Terms"] = s.Taxonomies[plural]
- // keep the following just for legacy reasons
- n.Data["OrderedIndex"] = n.Data["Terms"]
- n.Data["Index"] = n.Data["Terms"]
+ n := s.nodeLookup(fmt.Sprintf("taxlist-%s", plural), 0, prepare)
+
+ if prepare {
+ n.Title = strings.Title(plural)
+ s.setURLs(n, plural)
+ n.Data["Singular"] = singular
+ n.Data["Plural"] = plural
+ n.Data["Terms"] = s.Taxonomies[plural]
+ // keep the following just for legacy reasons
+ n.Data["OrderedIndex"] = n.Data["Terms"]
+ n.Data["Index"] = n.Data["Terms"]
+
+ continue
+ }
+
layouts := []string{"taxonomy/" + singular + ".terms.html", "_default/terms.html", "indexes/indexes.html"}
layouts = s.appendThemeTemplates(layouts)
if s.layoutExists(layouts...) {
@@ -1712,8 +1768,13 @@ func (s *Site) renderListsOfTaxonomyTerms() (err error) {
return
}
-func (s *Site) newSectionListNode(sectionName, section string, data WeightedPages) *Node {
- n := s.newNode()
+func (s *Site) newSectionListNode(prepare bool, sectionName, section string, data WeightedPages, counter int) *Node {
+ n := s.nodeLookup(fmt.Sprintf("sect-%s", sectionName), counter, prepare)
+
+ if !prepare {
+ return n
+ }
+
sectionName = helpers.FirstUpper(sectionName)
if viper.GetBool("PluralizeListTitles") {
n.Title = inflect.Pluralize(sectionName)
@@ -1729,7 +1790,7 @@ func (s *Site) newSectionListNode(sectionName, section string, data WeightedPage
}
// renderSectionLists renders a page for each section
-func (s *Site) renderSectionLists() error {
+func (s *Site) renderSectionLists(prepare bool) error {
for section, data := range s.Sections {
// section keys can be lower case (depending on site.pathifyTaxonomyKeys)
// extract the original casing from the first page to get sensible titles.
@@ -1737,6 +1798,13 @@ func (s *Site) renderSectionLists() error {
if !s.Info.preserveTaxonomyNames && len(data) > 0 {
sectionName = data[0].Page.Section()
}
+
+ n := s.newSectionListNode(prepare, sectionName, section, data, 0)
+
+ if prepare {
+ continue
+ }
+
layouts := s.appendThemeTemplates(
[]string{"section/" + section + ".html", "_default/section.html", "_default/list.html", "indexes/" + section + ".html", "_default/indexes.html"})
@@ -1744,7 +1812,6 @@ func (s *Site) renderSectionLists() error {
section = helpers.MakePathSanitized(section)
}
- n := s.newSectionListNode(sectionName, section, data)
base := n.addLangPathPrefix(section)
if err := s.renderAndWritePage(fmt.Sprintf("section %s", section), base, n, s.appendThemeTemplates(layouts)...); err != nil {
@@ -1766,7 +1833,7 @@ func (s *Site) renderSectionLists() error {
continue
}
- sectionPagerNode := s.newSectionListNode(sectionName, section, data)
+ sectionPagerNode := s.newSectionListNode(true, sectionName, section, data, i)
sectionPagerNode.paginator = pager
if pager.TotalPages() > 0 {
first, _ := pager.page(0)
@@ -1781,6 +1848,10 @@ func (s *Site) renderSectionLists() error {
}
}
+ if prepare {
+ return nil
+ }
+
if !viper.GetBool("DisableRSS") && section != "" {
// XML Feed
rssuri := viper.GetString("RSSUri")
@@ -1794,9 +1865,12 @@ func (s *Site) renderSectionLists() error {
return nil
}
-func (s *Site) renderHomePage() error {
+func (s *Site) renderHomePage(prepare bool) error {
- n := s.newHomeNode()
+ n := s.newHomeNode(prepare, 0)
+ if prepare {
+ return nil
+ }
layouts := s.appendThemeTemplates([]string{"index.html", "_default/list.html"})
base := n.addLangFilepathPrefix("")
@@ -1807,9 +1881,11 @@ func (s *Site) renderHomePage() error {
if n.paginator != nil {
paginatePath := viper.GetString("paginatePath")
- // write alias for page 1
- // TODO(bep) ml all of these n.addLang ... fix.
- s.writeDestAlias(n.addLangPathPrefix(helpers.PaginateAliasPath("", 1)), n.Permalink())
+ {
+ // write alias for page 1
+ // TODO(bep) ml all of these n.addLang ... fix.
+ s.writeDestAlias(n.addLangPathPrefix(helpers.PaginateAliasPath("", 1)), n.Permalink())
+ }
pagers := n.paginator.Pagers()
@@ -1819,39 +1895,44 @@ func (s *Site) renderHomePage() error {
continue
}
- homePagerNode := s.newHomeNode()
+ homePagerNode := s.newHomeNode(true, i)
+
homePagerNode.paginator = pager
if pager.TotalPages() > 0 {
first, _ := pager.page(0)
homePagerNode.Date = first.Date
homePagerNode.Lastmod = first.Lastmod
}
+
pageNumber := i + 1
htmlBase := fmt.Sprintf("/%s/%d", paginatePath, pageNumber)
htmlBase = n.addLangPathPrefix(htmlBase)
- if err := s.renderAndWritePage(fmt.Sprintf("homepage"), filepath.FromSlash(htmlBase), homePagerNode, layouts...); err != nil {
+ if err := s.renderAndWritePage(fmt.Sprintf("homepage"),
+ filepath.FromSlash(htmlBase), homePagerNode, layouts...); err != nil {
return err
}
+
}
}
if !viper.GetBool("DisableRSS") {