summaryrefslogtreecommitdiffstats
path: root/hugolib/hugo_sites.go
diff options
context:
space:
mode:
authorNico <nico.duesing@secure.mailbox.org>2020-06-20 15:00:25 +0200
committerBjørn Erik Pedersen <bjorn.erik.pedersen@gmail.com>2020-06-20 17:13:26 +0200
commit3466884e364d0df39394742697d6fa0b37d49e64 (patch)
treefa76e1990886d69353e0ce4176fc2ebef0dec638 /hugolib/hugo_sites.go
parent6ff435aa3f19a628f26d5ce755238e359eb77df6 (diff)
Create robots.txt in the domain root directory
Before a robots.txt is created in every Site. So in public/robots.txt if there are no languages (was correct). But if there are multiple languages in every language directory, too (was wrong). If defaultContentLanguageInSubdir is true, no language is created into the root directory, so no robots.txt is in the root directory (was wrong). If multihosts are configured for each language, that is the only case where one robots.txt must be created in each language directory (was correct). I've changed the behaviour, that only in the multihost case the robots.txt is created in the language directories. In any other case it is created in public/robots.txt. I've also added tests that files are not created in the wrong directories. Fixes #5160 See also #4193
Diffstat (limited to 'hugolib/hugo_sites.go')
-rw-r--r--hugolib/hugo_sites.go35
1 files changed, 34 insertions, 1 deletions
diff --git a/hugolib/hugo_sites.go b/hugolib/hugo_sites.go
index e71e48d41..ee0d5c563 100644
--- a/hugolib/hugo_sites.go
+++ b/hugolib/hugo_sites.go
@@ -49,6 +49,7 @@ import (
"github.com/gohugoio/hugo/langs/i18n"
"github.com/gohugoio/hugo/resources/page"
+ "github.com/gohugoio/hugo/resources/page/pagemeta"
"github.com/gohugoio/hugo/tpl"
"github.com/gohugoio/hugo/tpl/tplimpl"
)
@@ -690,7 +691,7 @@ func (cfg *BuildCfg) shouldRender(p *pageState) bool {
return false
}
-func (h *HugoSites) renderCrossSitesArtifacts() error {
+func (h *HugoSites) renderCrossSitesSitemap() error {
if !h.multilingual.enabled() || h.IsMultihost() {
return nil
@@ -716,6 +717,38 @@ func (h *HugoSites) renderCrossSitesArtifacts() error {
s.siteCfg.sitemap.Filename, h.toSiteInfos(), templ)
}
+func (h *HugoSites) renderCrossSitesRobotsTXT() error {
+ if h.multihost {
+ return nil
+ }
+ if !h.Cfg.GetBool("enableRobotsTXT") {
+ return nil
+ }
+
+ s := h.Sites[0]
+
+ p, err := newPageStandalone(&pageMeta{
+ s: s,
+ kind: kindRobotsTXT,
+ urlPaths: pagemeta.URLPath{
+ URL: "robots.txt",
+ },
+ },
+ output.RobotsTxtFormat)
+
+ if err != nil {
+ return err
+ }
+
+ if !p.render {
+ return nil
+ }
+
+ templ := s.lookupLayouts("robots.txt", "_default/robots.txt", "_internal/_default/robots.txt")
+
+ return s.renderAndWritePage(&s.PathSpec.ProcessingStats.Pages, "Robots Txt", "robots.txt", p, templ)
+}
+
func (h *HugoSites) removePageByFilename(filename string) {
h.getContentMaps().withMaps(func(m *pageMap) error {
m.deleteBundleMatching(func(b *contentNode) bool {