summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--hugolib/hugo_sites.go13
-rw-r--r--hugolib/node_as_page_test.go25
-rw-r--r--hugolib/page.go29
-rw-r--r--hugolib/site.go1
-rw-r--r--hugolib/site_render.go26
5 files changed, 84 insertions, 10 deletions
diff --git a/hugolib/hugo_sites.go b/hugolib/hugo_sites.go
index 8332640df..23694319a 100644
--- a/hugolib/hugo_sites.go
+++ b/hugolib/hugo_sites.go
@@ -17,6 +17,7 @@ import (
"errors"
"fmt"
"os"
+ "path"
"strings"
"sync"
"time"
@@ -390,6 +391,11 @@ func (h *HugoSites) createMissingNodes() error {
// TODO(bep) np check node title etc.
s := h.Sites[0]
+ // TODO(bep) np
+ for _, p := range s.Pages {
+ p.setNodeTypeVars(s)
+ }
+
home := s.findPagesByNodeType(NodeHome)
// home page
@@ -460,7 +466,7 @@ func (s *Site) newNodePage(typ NodeType) *Page {
language: s.Language,
}
- return &Page{Node: n}
+ return &Page{Node: n, site: s}
}
func (s *Site) newHomePage() *Page {
@@ -489,6 +495,7 @@ func (s *Site) newTaxonomyPage(plural, key string) *Page {
}
// TODO(bep) np check set url
+ p.URLPath.URL = path.Join(plural, key)
return p
}
@@ -509,7 +516,7 @@ func (s *Site) newSectionPage(name string, section WeightedPages) *Page {
} else {
p.Title = sectionName
}
-
+ p.URLPath.URL = name
return p
}
@@ -613,8 +620,6 @@ func (s *Site) preparePagesForRender(cfg BuildCfg, changed whatChanged) {
continue
}
- p.setNodeTypeVars(s)
-
// If we got this far it means that this is either a new Page pointer
// or a template or similar has changed so wee need to do a rerendering
// of the shortcodes etc.
diff --git a/hugolib/node_as_page_test.go b/hugolib/node_as_page_test.go
index 9c8506503..8276f2d23 100644
--- a/hugolib/node_as_page_test.go
+++ b/hugolib/node_as_page_test.go
@@ -31,8 +31,8 @@ import (
*/
func TestNodesAsPage(t *testing.T) {
- //jww.SetStdoutThreshold(jww.LevelDebug)
- jww.SetStdoutThreshold(jww.LevelFatal)
+ jww.SetStdoutThreshold(jww.LevelDebug)
+ //jww.SetStdoutThreshold(jww.LevelFatal)
nodePageFeatureFlag = true
defer toggleNodePageFeatureFlag()
@@ -105,6 +105,8 @@ Content Page %02d
}
viper.Set("paginate", 1)
+ viper.Set("title", "Hugo Rocks")
+ viper.Set("rssURI", "customrss.xml")
s := newSiteDefaultLang()
@@ -172,11 +174,18 @@ Content Page %02d
// There are no pages to paginate over in the taxonomy terms.
+ // RSS
+ assertFileContent(t, filepath.Join("public", "customrss.xml"), false, "Recent content in Home Sweet Home! on Hugo Rocks", "<rss")
+ assertFileContent(t, filepath.Join("public", "sect1", "customrss.xml"), false, "Recent content in Section1 on Hugo Rocks", "<rss")
+ assertFileContent(t, filepath.Join("public", "sect2", "customrss.xml"), false, "Recent content in Section2 on Hugo Rocks", "<rss")
+ assertFileContent(t, filepath.Join("public", "categories", "hugo", "customrss.xml"), false, "Recent content in Taxonomy Hugo on Hugo Rocks", "<rss")
+ assertFileContent(t, filepath.Join("public", "categories", "web", "customrss.xml"), false, "Recent content in Taxonomy Web on Hugo Rocks", "<rss")
+
}
func TestNodesWithNoContentFile(t *testing.T) {
- //jww.SetStdoutThreshold(jww.LevelDebug)
- jww.SetStdoutThreshold(jww.LevelFatal)
+ jww.SetStdoutThreshold(jww.LevelDebug)
+ //jww.SetStdoutThreshold(jww.LevelFatal)
nodePageFeatureFlag = true
defer toggleNodePageFeatureFlag()
@@ -203,6 +212,7 @@ Content Page %02d
viper.Set("paginate", 1)
viper.Set("title", "Hugo Rocks!")
+ viper.Set("rssURI", "customrss.xml")
s := newSiteDefaultLang()
@@ -234,6 +244,13 @@ Content Page %02d
assertFileContent(t, filepath.Join("public", "sect2", "index.html"), false,
"Section Title: Sect2s")
+ // RSS
+ assertFileContent(t, filepath.Join("public", "customrss.xml"), false, "Recent content in Hugo Rocks! on Hugo Rocks!", "<rss")
+ assertFileContent(t, filepath.Join("public", "sect1", "customrss.xml"), false, "Recent content in Sect1s on Hugo Rocks!", "<rss")
+ assertFileContent(t, filepath.Join("public", "sect2", "customrss.xml"), false, "Recent content in Sect2s on Hugo Rocks!", "<rss")
+ assertFileContent(t, filepath.Join("public", "categories", "hugo", "customrss.xml"), false, "Recent content in Hugo on Hugo Rocks!", "<rss")
+ assertFileContent(t, filepath.Join("public", "categories", "web", "customrss.xml"), false, "Recent content in Web on Hugo Rocks!", "<rss")
+
}
func writeLayoutsForNodeAsPageTests(t *testing.T) {
diff --git a/hugolib/page.go b/hugolib/page.go
index ec728d4c8..79b3da3b0 100644
--- a/hugolib/page.go
+++ b/hugolib/page.go
@@ -95,6 +95,8 @@ type Page struct {
PageMeta
Source
Position `json:"-"`
+
+ // TODO(bep) np pointer, or remove
Node
GitInfo *gitmap.GitInfo
@@ -496,6 +498,29 @@ func (p *Page) layouts(l ...string) []string {
return layouts(p.Type(), layout)
}
+// TODO(bep) np consolidate and test these NodeType switches
+// rssLayouts returns RSS layouts to use for the RSS version of this page, nil
+// if no RSS should be rendered.
+func (p *Page) rssLayouts() []string {
+ switch p.NodeType {
+ case NodeHome:
+ return []string{"rss.xml", "_default/rss.xml", "_internal/_default/rss.xml"}
+ case NodeSection:
+ section := p.sections[0]
+ return []string{"section/" + section + ".rss.xml", "_default/rss.xml", "rss.xml", "_internal/_default/rss.xml"}
+ case NodeTaxonomy:
+ singular := p.site.taxonomiesPluralSingular[p.sections[0]]
+ return []string{"taxonomy/" + singular + ".rss.xml", "_default/rss.xml", "rss.xml", "_internal/_default/rss.xml"}
+ case NodeTaxonomyTerms:
+ // No RSS for taxonomy terms
+ case NodePage:
+ // No RSS for regular pages
+ }
+
+ return nil
+
+}
+
func layouts(types string, layout string) (layouts []string) {
t := strings.Split(types, "/")
@@ -1246,7 +1271,7 @@ func (p *Page) prepareData(s *Site) error {
if !ok {
return fmt.Errorf("Data for section %s not found", p.Section())
}
- p.Data["Pages"] = sectionData
+ p.Data["Pages"] = sectionData.Pages()
case NodeTaxonomy:
plural := p.sections[0]
term := p.sections[1]
@@ -1278,7 +1303,7 @@ func (p *Page) prepareData(s *Site) error {
// the paginators etc., we do it manually here.
// TODO(bep) np do better
func (p *Page) copy() *Page {
- c := &Page{Node: Node{NodeType: p.NodeType}}
+ c := &Page{Node: Node{NodeType: p.NodeType, Site: p.Site}}
c.Title = p.Title
c.Data = p.Data
c.Date = p.Date
diff --git a/hugolib/site.go b/hugolib/site.go
index 6de1daaa9..b1d454b7b 100644
--- a/hugolib/site.go
+++ b/hugolib/site.go
@@ -2322,6 +2322,7 @@ func (s *Site) layoutExists(layouts ...string) bool {
}
func (s *Site) renderAndWriteXML(name string, dest string, d interface{}, layouts ...string) error {
+ jww.DEBUG.Printf("Render XML for %q to %q", name, dest)
renderBuffer := bp.GetBuffer()
defer bp.PutBuffer(renderBuffer)
renderBuffer.WriteString("<?xml version=\"1.0\" encoding=\"utf-8\" standalone=\"yes\" ?>\n")
diff --git a/hugolib/site_render.go b/hugolib/site_render.go
index bad831d6c..bf6b78231 100644
--- a/hugolib/site_render.go
+++ b/hugolib/site_render.go
@@ -76,6 +76,10 @@ func pageRenderer(s *Site, pages <-chan *Page, results chan<- error, wg *sync.Wa
results <- err
}
}
+
+ if err := s.renderRSS(p); err != nil {
+ results <- err
+ }
}
}
@@ -121,3 +125,25 @@ func (s *Site) renderPaginator(p *Page) error {
}
return nil
}
+
+func (s *Site) renderRSS(p *Page) error {
+ layouts := p.rssLayouts()
+
+ if layouts == nil {
+ // No RSS for this NodeType
+ return nil
+ }
+
+ // TODO(bep) np check RSS titles
+ rssNode := p.copy()
+
+ // TODO(bep) np todelido URL
+ rssURI := s.Language.GetString("rssURI")
+ rssNode.URLPath.URL = path.Join(rssNode.URLPath.URL, rssURI)
+
+ if err := s.renderAndWriteXML(rssNode.Title, rssNode.URLPath.URL, rssNode, s.appendThemeTemplates(layouts)...); err != nil {
+ return err
+ }
+
+ return nil
+}