diff options
author | Ivan Fraixedes <ivan@fraixed.es> | 2015-12-08 21:13:09 +0000 |
---|---|---|
committer | Bjørn Erik Pedersen <bjorn.erik.pedersen@gmail.com> | 2016-01-05 23:36:16 +0100 |
commit | 9a6dc6c791f47e1d410956cb16cc42a0117f7246 (patch) | |
tree | 83262271b13d4d0af9b117ac017b99168b6c9709 /hugolib | |
parent | 7c5a1fd16bc76f010d2321371e3c2ef95b53556d (diff) |
Add embeded template for robots.txt
Diffstat (limited to 'hugolib')
-rw-r--r-- | hugolib/robotstxt_test.go | 67 | ||||
-rw-r--r-- | hugolib/site.go | 26 | ||||
-rw-r--r-- | hugolib/sitemap_test.go | 4 |
3 files changed, 97 insertions, 0 deletions
diff --git a/hugolib/robotstxt_test.go b/hugolib/robotstxt_test.go new file mode 100644 index 000000000..c964c0231 --- /dev/null +++ b/hugolib/robotstxt_test.go @@ -0,0 +1,67 @@ +package hugolib + +import ( + "bytes" + "testing" + + "github.com/spf13/afero" + "github.com/spf13/hugo/helpers" + "github.com/spf13/hugo/hugofs" + "github.com/spf13/hugo/source" + "github.com/spf13/viper" +) + +const ROBOTSTXT_TEMPLATE = `User-agent: Googlebot + {{ range .Data.Pages }} + Disallow: {{.RelPermalink}} + {{ end }} +` + +func TestRobotsTXTOutput(t *testing.T) { + viper.Reset() + defer viper.Reset() + + hugofs.DestinationFS = new(afero.MemMapFs) + + viper.Set("baseurl", "http://auth/bub/") + + s := &Site{ + Source: &source.InMemorySource{ByteSource: WEIGHTED_SOURCES}, + } + + s.initializeSiteInfo() + + s.prepTemplates() + s.addTemplate("robots.txt", ROBOTSTXT_TEMPLATE) + + if err := s.CreatePages(); err != nil { + t.Fatalf("Unable to create pages: %s", err) + } + + if err := s.BuildSiteMeta(); err != nil { + t.Fatalf("Unable to build site metadata: %s", err) + } + + if err := s.RenderHomePage(); err != nil { + t.Fatalf("Unable to RenderHomePage: %s", err) + } + + if err := s.RenderSitemap(); err != nil { + t.Fatalf("Unable to RenderSitemap: %s", err) + } + + if err := s.RenderRobotsTXT(); err != nil { + t.Fatalf("Unable to RenderRobotsTXT :%s", err) + } + + robotsFile, err := hugofs.DestinationFS.Open("robots.txt") + + if err != nil { + t.Fatalf("Unable to locate: robots.txt") + } + + robots := helpers.ReaderToBytes(robotsFile) + if !bytes.HasPrefix(robots, []byte("User-agent: Googlebot")) { + t.Errorf("Robots file should start with 'User-agentL Googlebot'. %s", robots) + } +} diff --git a/hugolib/site.go b/hugolib/site.go index 648ba6454..957e8ed4c 100644 --- a/hugolib/site.go +++ b/hugolib/site.go @@ -583,6 +583,12 @@ func (s *Site) Render() (err error) { return } s.timerStep("render and write Sitemap") + + if err = s.RenderRobotsTXT(); err != nil { + return + } + s.timerStep("render and write robots.txt") + return } @@ -1561,6 +1567,26 @@ func (s *Site) RenderSitemap() error { return nil } +func (s *Site) RenderRobotsTXT() error { + if viper.GetBool("DisableRobotsTXT") { + return nil + } + + n := s.NewNode() + n.Data["Pages"] = s.Pages + + rLayouts := []string{"robots.txt", "_default/robots.txt", "_internal/_default/robots.txt"} + outBuffer := bp.GetBuffer() + defer bp.PutBuffer(outBuffer) + err := s.render("robots", n, outBuffer, s.appendThemeTemplates(rLayouts)...) + + if err == nil { + err = s.WriteDestFile("robots.txt", outBuffer) + } + + return err +} + func (s *Site) Stats() { jww.FEEDBACK.Println(s.draftStats()) jww.FEEDBACK.Println(s.futureStats()) diff --git a/hugolib/sitemap_test.go b/hugolib/sitemap_test.go index 7bc16b482..13bc92a18 100644 --- a/hugolib/sitemap_test.go +++ b/hugolib/sitemap_test.go @@ -68,6 +68,10 @@ func TestSitemapOutput(t *testing.T) { t.Fatalf("Unable to RenderSitemap: %s", err) } + if err := s.RenderRobotsTXT(); err != nil { + t.Fatalf("Unable to RenderRobotsTXT :%s", err) + } + sitemapFile, err := hugofs.DestinationFS.Open("sitemap.xml") if err != nil { |