summaryrefslogtreecommitdiffstats
path: root/hugolib
diff options
context:
space:
mode:
authorAnthony Fok <foka@debian.org>2015-03-17 22:16:54 -0700
committerbep <bjorn.erik.pedersen@gmail.com>2015-03-18 11:30:37 +0100
commit8b8fb417ae065954a056ea018bd092748cc9127c (patch)
treea9160d1f6acc218bcb8728d729deaf1b532b56b3 /hugolib
parentca69cad8aa5d8eefbce13abd97b2b979339a458c (diff)
More initialism corrections (golint)
Thanks to @bep's new, brilliant helpers.Deprecated() function, the following functions or variables are transitioned to their new names, preserving backward compatibility for v0.14 and warning the user of upcoming obsolescence in v0.15: * .Url → .URL (for node, menu and paginator) * .Site.BaseUrl → .Site.BaseURL * .Site.Indexes → .Site.Taxonomies * .Site.Recent → .Site.Pages * getJson → getJSON * getCsv → getCSV * safeHtml → safeHTML * safeCss → safeCSS * safeUrl → safeURL Also fix related initialisms in strings and comments. Continued effort in fixing #959.
Diffstat (limited to 'hugolib')
-rw-r--r--hugolib/menu.go21
-rw-r--r--hugolib/menu_test.go20
-rw-r--r--hugolib/node.go17
-rw-r--r--hugolib/page.go24
-rw-r--r--hugolib/page_permalink_test.go8
-rw-r--r--hugolib/pagination.go14
-rw-r--r--hugolib/pagination_test.go12
-rw-r--r--hugolib/permalinks.go4
-rw-r--r--hugolib/site.go62
9 files changed, 108 insertions, 74 deletions
diff --git a/hugolib/menu.go b/hugolib/menu.go
index 4fdf315f0..95370062b 100644
--- a/hugolib/menu.go
+++ b/hugolib/menu.go
@@ -19,10 +19,11 @@ import (
"strings"
"github.com/spf13/cast"
+ "github.com/spf13/hugo/helpers"
)
type MenuEntry struct {
- Url string
+ URL string
Name string
Menu string
Identifier string
@@ -37,6 +38,12 @@ type Menu []*MenuEntry
type Menus map[string]*Menu
type PageMenus map[string]*MenuEntry
+// Url is deprecated. Will be removed in 0.15.
+func (me *MenuEntry) Url() string {
+ helpers.Deprecated("MenuEntry", ".Url", ".URL")
+ return me.URL
+}
+
func (me *MenuEntry) AddChild(child *MenuEntry) {
me.Children = append(me.Children, child)
me.Children.Sort()
@@ -53,22 +60,22 @@ func (me *MenuEntry) KeyName() string {
return me.Name
}
-func (me *MenuEntry) hopefullyUniqueId() string {
+func (me *MenuEntry) hopefullyUniqueID() string {
if me.Identifier != "" {
return me.Identifier
- } else if me.Url != "" {
- return me.Url
+ } else if me.URL != "" {
+ return me.URL
} else {
return me.Name
}
}
func (me *MenuEntry) IsEqual(inme *MenuEntry) bool {
- return me.hopefullyUniqueId() == inme.hopefullyUniqueId() && me.Parent == inme.Parent
+ return me.hopefullyUniqueID() == inme.hopefullyUniqueID() && me.Parent == inme.Parent
}
func (me *MenuEntry) IsSameResource(inme *MenuEntry) bool {
- return me.Url != "" && inme.Url != "" && me.Url == inme.Url
+ return me.URL != "" && inme.URL != "" && me.URL == inme.URL
}
func (me *MenuEntry) MarshallMap(ime map[string]interface{}) {
@@ -76,7 +83,7 @@ func (me *MenuEntry) MarshallMap(ime map[string]interface{}) {
loki := strings.ToLower(k)
switch loki {
case "url":
- me.Url = cast.ToString(v)
+ me.URL = cast.ToString(v)
case "weight":
me.Weight = cast.ToInt(v)
case "name":
diff --git a/hugolib/menu_test.go b/hugolib/menu_test.go
index bea5a84e4..b829dd2ea 100644
--- a/hugolib/menu_test.go
+++ b/hugolib/menu_test.go
@@ -180,8 +180,8 @@ func doTestPageMenuWithIdentifier(t *testing.T, menuPageSources []source.ByteSou
assert.NotNil(t, me1)
assert.NotNil(t, me2)
- assert.True(t, strings.Contains(me1.Url, "doc1"))
- assert.True(t, strings.Contains(me2.Url, "doc2"))
+ assert.True(t, strings.Contains(me1.URL, "doc1"))
+ assert.True(t, strings.Contains(me2.URL, "doc2"))
}
@@ -216,8 +216,8 @@ func doTestPageMenuWithDuplicateName(t *testing.T, menuPageSources []source.Byte
assert.NotNil(t, me1)
assert.NotNil(t, me2)
- assert.True(t, strings.Contains(me1.Url, "doc1"))
- assert.True(t, strings.Contains(me2.Url, "doc2"))
+ assert.True(t, strings.Contains(me1.URL, "doc1"))
+ assert.True(t, strings.Contains(me2.URL, "doc2"))
}
@@ -275,7 +275,7 @@ func TestMenuWithHashInURL(t *testing.T) {
assert.NotNil(t, me)
- assert.Equal(t, "/Zoo/resource/#anchor", me.Url)
+ assert.Equal(t, "/Zoo/resource/#anchor", me.URL)
}
// issue #719
@@ -309,7 +309,7 @@ func doTestMenuWithUnicodeURLs(t *testing.T, canonifyURLs, uglyURLs bool) {
expected = expectedBase + "/"
}
- assert.Equal(t, expected, unicodeRussian.Url, "uglyURLs[%t]", uglyURLs)
+ assert.Equal(t, expected, unicodeRussian.URL, "uglyURLs[%t]", uglyURLs)
}
func TestTaxonomyNodeMenu(t *testing.T) {
@@ -329,7 +329,7 @@ func TestTaxonomyNodeMenu(t *testing.T) {
{"tax", taxRenderInfo{key: "key", singular: "one", plural: "two"},
ts.findTestMenuEntryByID("tax", "2"), true, false},
{"tax", taxRenderInfo{key: "key", singular: "one", plural: "two"},
- &MenuEntry{Name: "Somewhere else", Url: "/somewhereelse"}, false, false},
+ &MenuEntry{Name: "Somewhere else", URL: "/somewhereelse"}, false, false},
} {
n, _ := ts.site.newTaxonomyNode(this.taxInfo)
@@ -349,7 +349,7 @@ func TestTaxonomyNodeMenu(t *testing.T) {
menuEntryXML := ts.findTestMenuEntryByID("tax", "xml")
- if strings.HasSuffix(menuEntryXML.Url, "/") {
+ if strings.HasSuffix(menuEntryXML.URL, "/") {
t.Error("RSS menu item should not be padded with trailing slash")
}
}
@@ -359,7 +359,7 @@ func TestHomeNodeMenu(t *testing.T) {
defer resetMenuTestState(ts)
home := ts.site.newHomeNode()
- homeMenuEntry := &MenuEntry{Name: home.Title, Url: home.Url}
+ homeMenuEntry := &MenuEntry{Name: home.Title, URL: home.URL}
for i, this := range []struct {
menu string
@@ -369,7 +369,7 @@ func TestHomeNodeMenu(t *testing.T) {
}{
{"main", homeMenuEntry, true, false},
{"doesnotexist", homeMenuEntry, false, false},
- {"main", &MenuEntry{Name: "Somewhere else", Url: "/somewhereelse"}, false, false},
+ {"main", &MenuEntry{Name: "Somewhere else", URL: "/somewhereelse"}, false, false},
{"grandparent", ts.findTestMenuEntryByID("grandparent", "grandparentId"), false, false},
{"grandparent", ts.findTestMenuEntryByID("grandparent", "parentId"), false, true},
{"grandparent", ts.findTestMenuEntryByID("grandparent", "grandchildId"), true, false},
diff --git a/hugolib/node.go b/hugolib/node.go
index aa9a46d99..d13a2626e 100644
--- a/hugolib/node.go
+++ b/hugolib/node.go
@@ -17,6 +17,7 @@ import (
"html/template"
"sync"
"time"
+ "github.com/spf13/hugo/helpers"
)
type Node struct {
@@ -30,7 +31,7 @@ type Node struct {
Params map[string]interface{}
Date time.Time
Sitemap Sitemap
- UrlPath
+ URLPath
paginator *pager
paginatorInit sync.Once
scratch *Scratch
@@ -42,7 +43,7 @@ func (n *Node) Now() time.Time {
func (n *Node) HasMenuCurrent(menuID string, inme *MenuEntry) bool {
if inme.HasChildren() {
- me := MenuEntry{Name: n.Title, Url: n.Url}
+ me := MenuEntry{Name: n.Title, URL: n.URL}
for _, child := range inme.Children {
if me.IsSameResource(child) {
@@ -56,7 +57,7 @@ func (n *Node) HasMenuCurrent(menuID string, inme *MenuEntry) bool {
func (n *Node) IsMenuCurrent(menuID string, inme *MenuEntry) bool {
- me := MenuEntry{Name: n.Title, Url: n.Url}
+ me := MenuEntry{Name: n.Title, URL: n.URL}
if !me.IsSameResource(inme) {
return false
}
@@ -119,13 +120,19 @@ func (n *Node) RelRef(ref string) (string, error) {
return n.Site.RelRef(ref, nil)
}
-type UrlPath struct {
- Url string
+type URLPath struct {
+ URL string
Permalink template.HTML
Slug string
Section string
}
+// Url is deprecated. Will be removed in 0.15.
+func (n *Node) Url() string {
+ helpers.Deprecated("Node", ".Url", ".URL")
+ return n.URL
+}
+
// Scratch returns the writable context associated with this Node.
func (n *Node) Scratch() *Scratch {
if n.scratch == nil {
diff --git a/hugolib/page.go b/hugolib/page.go
index 9aa301037..76ac4f51b 100644
--- a/hugolib/page.go
+++ b/hugolib/page.go
@@ -341,10 +341,10 @@ func (p *Page) analyzePage() {
}
func (p *Page) permalink() (*url.URL, error) {
- baseURL := string(p.Site.BaseUrl)
+ baseURL := string(p.Site.BaseURL)
dir := strings.TrimSpace(filepath.ToSlash(p.Source.Dir()))
pSlug := strings.TrimSpace(p.Slug)
- pURL := strings.TrimSpace(p.Url)
+ pURL := strings.TrimSpace(p.URL)
var permalink string
var err error
@@ -420,9 +420,9 @@ func (p *Page) RelPermalink() (string, error) {
}
if viper.GetBool("CanonifyURLs") {
- // replacements for relpermalink with baseUrl on the form http://myhost.com/sub/ will fail later on
- // have to return the Url relative from baseUrl
- relpath, err := helpers.GetRelativePath(link.String(), string(p.Site.BaseUrl))
+ // replacements for relpermalink with baseURL on the form http://myhost.com/sub/ will fail later on
+ // have to return the URL relative from baseURL
+ relpath, err := helpers.GetRelativePath(link.String(), string(p.Site.BaseURL))
if err != nil {
return "", err
}
@@ -455,9 +455,9 @@ func (p *Page) update(f interface{}) error {
p.Slug = helpers.URLize(cast.ToString(v))
case "url":
if url := cast.ToString(v); strings.HasPrefix(url, "http://") || strings.HasPrefix(url, "https://") {
- return fmt.Errorf("Only relative urls are supported, %v provided", url)
+ return fmt.Errorf("Only relative URLs are supported, %v provided", url)
}
- p.Url = helpers.URLize(cast.ToString(v))
+ p.URL = helpers.URLize(cast.ToString(v))
case "type":
p.contentType = cast.ToString(v)
case "extension", "ext":
@@ -588,7 +588,7 @@ func (p *Page) Menus() PageMenus {
if ms, ok := p.Params["menu"]; ok {
link, _ := p.RelPermalink()
- me := MenuEntry{Name: p.LinkTitle(), Weight: p.Weight, Url: link}
+ me := MenuEntry{Name: p.LinkTitle(), Weight: p.Weight, URL: link}
// Could be the name of the menu to attach it to
mname, err := cast.ToStringE(ms)
@@ -618,7 +618,7 @@ func (p *Page) Menus() PageMenus {
}
for name, menu := range menus {
- menuEntry := MenuEntry{Name: p.LinkTitle(), Url: link, Weight: p.Weight, Menu: name}
+ menuEntry := MenuEntry{Name: p.LinkTitle(), URL: link, Weight: p.Weight, Menu: name}
jww.DEBUG.Printf("found menu: %q, in %q\n", name, p.Title)
ime, err := cast.ToStringMapE(menu)
@@ -785,9 +785,9 @@ func (p *Page) FullFilePath() string {
func (p *Page) TargetPath() (outfile string) {
- // Always use Url if it's specified
- if len(strings.TrimSpace(p.Url)) > 2 {
- outfile = strings.TrimSpace(p.Url)
+ // Always use URL if it's specified
+ if len(strings.TrimSpace(p.URL)) > 2 {
+ outfile = strings.TrimSpace(p.URL)
if strings.HasSuffix(outfile, "/") {
outfile = outfile + "index.html"
diff --git a/hugolib/page_permalink_test.go b/hugolib/page_permalink_test.go
index dc4dc8371..1ed232348 100644
--- a/hugolib/page_permalink_test.go
+++ b/hugolib/page_permalink_test.go
@@ -35,7 +35,7 @@ func TestPermalink(t *testing.T) {
{"x/y/z/boofar.md", "x/y/z/", "http://barnew/boo/", "boofar", "", true, true, "http://barnew/boo/x/y/z/boofar.html", "/x/y/z/boofar.html"},
{"x/y/z/boofar.md", "x/y/z/", "http://barnew/boo", "boofar", "", true, true, "http://barnew/boo/x/y/z/boofar.html", "/x/y/z/boofar.html"},
- // test url overrides
+ // test URL overrides
{"x/y/z/boofar.md", "x/y/z", "", "", "/z/y/q/", false, false, "/z/y/q/", "/z/y/q/"},
}
@@ -46,12 +46,12 @@ func TestPermalink(t *testing.T) {
viper.Set("canonifyurls", test.canonifyURLs)
p := &Page{
Node: Node{
- UrlPath: UrlPath{
+ URLPath: URLPath{
Section: "z",
- Url: test.url,
+ URL: test.url,
},
Site: &SiteInfo{
- BaseUrl: test.base,
+ BaseURL: test.base,
},
},
Source: Source{File: *source.NewFile(filepath.FromSlash(test.file))},
diff --git a/hugolib/pagination.go b/hugolib/pagination.go
index b56e64f7f..5401aaa66 100644
--- a/hugolib/pagination.go
+++ b/hugolib/pagination.go
@@ -47,11 +47,17 @@ func (p *pager) PageNumber() int {
return p.number
}
-// Url returns the url to the current page.
-func (p *pager) Url() template.HTML {
+// URL returns the URL to the current page.
+func (p *pager) URL() template.HTML {
return template.HTML(p.paginationURLFactory(p.PageNumber()))
}
+// Url is deprecated. Will be removed in 0.15.
+func (p *pager) Url() template.HTML {
+ helpers.Deprecated("Paginator", ".Url", ".URL")
+ return p.URL()
+}
+
// Pages returns the elements on this page.
func (p *pager) Pages() Pages {
if len(p.paginatedPages) == 0 {
@@ -142,7 +148,7 @@ func (n *Node) Paginator() (*pager, error) {
return
}
- pagers, err := paginatePages(n.Data["Pages"], n.Url)
+ pagers, err := paginatePages(n.Data["Pages"], n.URL)
if err != nil {
initError = err
@@ -184,7 +190,7 @@ func (n *Node) Paginate(seq interface{}) (*pager, error) {
if n.paginator != nil {
return
}
- pagers, err := paginatePages(seq, n.Url)
+ pagers, err := paginatePages(seq, n.URL)
if err != nil {
initError = err
diff --git a/hugolib/pagination_test.go b/hugolib/pagination_test.go
index 7d3b2ae72..b41cf4c35 100644
--- a/hugolib/pagination_test.go
+++ b/hugolib/pagination_test.go
@@ -43,7 +43,7 @@ func TestPager(t *testing.T) {
assert.Equal(t, 5, paginator.TotalPages())
first := paginatorPages[0]
- assert.Equal(t, "page/1/", first.Url())
+ assert.Equal(t, "page/1/", first.URL())
assert.Equal(t, first, first.First())
assert.True(t, first.HasNext())
assert.Equal(t, paginatorPages[1], first.Next())
@@ -58,7 +58,7 @@ func TestPager(t *testing.T) {
assert.Equal(t, paginatorPages[1], third.Prev())
last := paginatorPages[4]
- assert.Equal(t, "page/5/", last.Url())
+ assert.Equal(t, "page/5/", last.URL())
assert.Equal(t, last, last.Last())
assert.False(t, last.HasNext())
assert.Nil(t, last.Next())
@@ -97,7 +97,7 @@ func TestPagerNoPages(t *testing.T) {
}
-func TestPaginationUrlFactory(t *testing.T) {
+func TestPaginationURLFactory(t *testing.T) {
viper.Set("PaginatePath", "zoo")
unicode := newPaginationURLFactory("новости проекта")
fooBar := newPaginationURLFactory("foo", "bar")
@@ -197,12 +197,12 @@ func createTestPages(num int) Pages {
for i := 0; i < num; i++ {
pages[i] = &Page{
Node: Node{
- UrlPath: UrlPath{
+ URLPath: URLPath{
Section: "z",
- Url: fmt.Sprintf("http://base/x/y/p%d.html", num),
+ URL: fmt.Sprintf("http://base/x/y/p%d.html", num),
},
Site: &SiteInfo{
- BaseUrl: "http://base/",
+ BaseURL: "http://base/",
},
},
Source: Source{File: *source.NewFile(filepath.FromSlash(fmt.Sprintf("/x/y/p%d.md", num)))},
diff --git a/hugolib/permalinks.go b/hugolib/permalinks.go
index fdae2b856..325fbfe4b 100644
--- a/hugolib/permalinks.go
+++ b/hugolib/permalinks.go
@@ -138,7 +138,7 @@ func pageToPermalinkDate(p *Page, dateField string) (string, error) {
// pageToPermalinkTitle returns the URL-safe form of the title
func pageToPermalinkTitle(p *Page, _ string) (string, error) {
// Page contains Node which has Title
- // (also contains UrlPath which has Slug, sometimes)
+ // (also contains URLPath which has Slug, sometimes)
return helpers.URLize(p.Title), nil
}
@@ -166,7 +166,7 @@ func pageToPermalinkSlugElseTitle(p *Page, a string) (string, error) {
}
func pageToPermalinkSection(p *Page, _ string) (string, error) {
- // Page contains Node contains UrlPath which has Section
+ // Page contains Node contains URLPath which has Section
return p.Section(), nil
}
diff --git a/hugolib/site.go b/hugolib/site.go
index 39fe888b5..e1f0fe991 100644
--- a/hugolib/site.go
+++ b/hugolib/site.go
@@ -93,15 +93,13 @@ type targetList struct {
}
type SiteInfo struct {
- BaseUrl template.URL
+ BaseURL template.URL
Taxonomies TaxonomyList
Authors AuthorList
Social SiteSocial
- Indexes *TaxonomyList // legacy, should be identical to Taxonomies
Sections Taxonomy
Pages *Pages
Files []*source.File
- Recent *Pages // legacy, should be identical to Pages
Menus *Menus
Hugo *HugoInfo
Title string
@@ -133,6 +131,24 @@ type SiteInfo struct {
// linkedin
type SiteSocial map[string]string
+// BaseUrl is deprecated. Will be removed in 0.15.
+func (s *SiteInfo) BaseUrl() template.URL {
+ helpers.Deprecated("Site", ".BaseUrl", ".BaseURL")
+ return s.BaseURL
+}
+
+// Recent is deprecated. Will be removed in 0.15.
+func (s *SiteInfo) Recent() *Pages {
+ helpers.Deprecated("Site", ".Recent", ".Pages")
+ return s.Pages
+}
+
+// Indexes is deprecated. Will be removed in 0.15.
+func (s *SiteInfo) Indexes() *TaxonomyList {
+ helpers.Deprecated("Site", ".Indexes", ".Taxonomies")
+ return &s.Taxonomies
+}
+
func (s *SiteInfo) GetParam(key string) interface{} {
v := s.Params[strings.ToLower(key)]
@@ -445,7 +461,7 @@ func (s *Site) initializeSiteInfo() {
}
s.Info = SiteInfo{
- BaseUrl: template.URL(helpers.SanitizeURLKeepTrailingSlash(viper.GetString("BaseURL"))),
+ BaseURL: template.URL(helpers.SanitizeURLKeepTrailingSlash(viper.GetString("BaseURL"))),
Title: viper.GetString("Title"),
Author: viper.GetStringMap("author"),
LanguageCode: viper.GetString("languagecode"),
@@ -454,7 +470,6 @@ func (s *Site) initializeSiteInfo() {
BuildDrafts: viper.GetBool("BuildDrafts"),
canonifyURLs: viper.GetBool("CanonifyURLs"),
Pages: &s.Pages,
- Recent: &s.Pages,
Menus: &s.Menus,
Params: params,
Permalinks: permalinks,
@@ -705,14 +720,14 @@ func (s *Site) getMenusFromConfig() Menus {
menuEntry.MarshallMap(ime)
- if strings.HasPrefix(menuEntry.Url, "/") {
+ if strings.HasPrefix(menuEntry.URL, "/") {
// make it match the nodes
- menuEntryURL := menuEntry.Url
+ menuEntryURL := menuEntry.URL
menuEntryURL = helpers.URLizeAndPrep(menuEntryURL)
if !s.Info.canonifyURLs {
- menuEntryURL = helpers.AddContextRoot(string(s.Info.BaseUrl), menuEntryURL)
+ menuEntryURL = helpers.AddContextRoot(string(s.Info.BaseURL), menuEntryURL)
}
- menuEntry.Url = menuEntryURL
+ menuEntry.URL = menuEntryURL
}
if ret[name] == nil {
@@ -764,8 +779,8 @@ func (s *Site) assembleMenus() {
for p, childmenu := range children {
_, ok := flat[twoD{p.MenuName, p.EntryName}]
if !ok {
- // if parent does not exist, create one without a url
- flat[twoD{p.MenuName, p.EntryName}] = &MenuEntry{Name: p.EntryName, Url: ""}
+ // if parent does not exist, create one without a URL
+ flat[twoD{p.MenuName, p.EntryName}] = &MenuEntry{Name: p.EntryName, URL: ""}
}
flat[twoD{p.MenuName, p.EntryName}].Children = childmenu
}
@@ -818,7 +833,6 @@ func (s *Site) assembleTaxonomies() {
}
s.Info.Taxonomies = s.Taxonomies
- s.Info.Indexes = &s.Taxonomies
s.Info.Sections = s.Sections
}
@@ -1021,7 +1035,7 @@ func (s *Site) newTaxonomyNode(t taxRenderInfo) (*Node, string) {
base := t.plural + "/" + t.key
n := s.NewNode()
n.Title = strings.Replace(strings.Title(t.key), "-", " ", -1)
- s.setUrls(n, base)
+ s.setURLs(n, base)
if len(t.pages) > 0 {
n.Date = t.pages[0].Page.Date
}
@@ -1081,7 +1095,7 @@ func taxonomyRenderer(s *Site, taxes <-chan taxRenderInfo, results chan<- error,
if !viper.GetBool("DisableRSS") {
// XML Feed
- n.Url = s.permalinkStr(base + "/index.xml")
+ n.URL = s.permalinkStr(base + "/index.xml")
n.Permalink = s.permalink(base)
rssLayouts := []string{"taxonomy/" + t.singular + ".rss.xml", "_default/rss.xml", "rss.xml", "_internal/_default/rss.xml"}
@@ -1099,7 +1113,7 @@ func (s *Site) RenderListsOfTaxonomyTerms() (err error) {
for singular, plural := range taxonomies {
n := s.NewNode()
n.Title = strings.Title(plural)
- s.setUrls(n, plural)
+ s.setURLs(n, plural)
n.Data["Singular"] = singular
n.Data["Plural"] = plural
n.Data["Terms"] = s.Taxonomies[plural]
@@ -1125,7 +1139,7 @@ func (s *Site) newSectionListNode(section string, data WeightedPages) *Node {
} else {
n.Title = strings.Title(section)
}
- s.setUrls(n, section)
+ s.setURLs(n, section)
n.Date = data[0].Page.Date
n.Data["Pages"] = data.Pages()
@@ -1175,7 +1189,7 @@ func (s *Site) RenderSectionLists() error {
if !viper.GetBool("DisableRSS") && section != "" {
// XML Feed
- n.Url = s.permalinkStr(section + "/index.xml")
+ n.URL = s.permalinkStr(section + "/index.xml")
n.Permalink = s.permalink(section)
rssLayouts := []string{"section/" + section + ".rss.xml", "_default/rss.xml", "rss.xml", "_internal/_default/rss.xml"}
if err := s.renderAndWriteXML("section "+section+" rss", section+"/index.xml", n, s.appendThemeTemplates(rssLayouts)...); err != nil {
@@ -1189,7 +1203,7 @@ func (s *Site) RenderSectionLists() error {
func (s *Site) newHomeNode() *Node {
n := s.NewNode()
n.Title = n.Site.Title
- s.setUrls(n, "/")
+ s.setURLs(n, "/")
n.Data["Pages"] = s.Pages
return n
}
@@ -1232,7 +1246,7 @@ func (s *Site) RenderHomePage() error {
if !viper.GetBool("DisableRSS") {
// XML Feed
- n.Url = s.permalinkStr("index.xml")
+ n.URL = s.permalinkStr("index.xml")
n.Title = ""
high := 50
if len(s.Pages) < high {
@@ -1250,7 +1264,7 @@ func (s *Site) RenderHomePage() error {
}
}
- n.Url = helpers.URLize("404.html")
+ n.URL = helpers.URLize("404.html")
n.Title = "404 Page not found"
n.Permalink = s.permalink("404.html")
@@ -1277,7 +1291,7 @@ func (s *Site) RenderSitemap() error {
page := &Page{}
page.Date = s.Info.LastChange
page.Site = &s.Info
- page.Url = "/"
+ page.URL = "/"
pages = append(pages, page)
pages = append(pages, s.Pages...)
@@ -1315,9 +1329,9 @@ func (s *Site) Stats() {
}
}
-func (s *Site) setUrls(n *Node, in string) {
- n.Url = helpers.URLizeAndPrep(in)
- n.Permalink = s.permalink(n.Url)
+func (s *Site) setURLs(n *Node, in string) {
+ n.URL = helpers.URLizeAndPrep(in)
+ n.Permalink = s.permalink(n.URL)
n.RSSLink = s.permalink(in + ".xml")
}