From 597e418cb02883418f2cebb41400e8e61413f651 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bj=C3=B8rn=20Erik=20Pedersen?= Date: Wed, 2 Jan 2019 12:33:26 +0100 Subject: Make Page an interface The main motivation of this commit is to add a `page.Page` interface to replace the very file-oriented `hugolib.Page` struct. This is all a preparation step for issue #5074, "pages from other data sources". But this also fixes a set of annoying limitations, especially related to custom output formats, and shortcodes. Most notable changes: * The inner content of shortcodes using the `{{%` as the outer-most delimiter will now be sent to the content renderer, e.g. Blackfriday. This means that any markdown will partake in the global ToC and footnote context etc. * The Custom Output formats are now "fully virtualized". This removes many of the current limitations. * The taxonomy list type now has a reference to the `Page` object. This improves the taxonomy template `.Title` situation and make common template constructs much simpler. See #5074 Fixes #5763 Fixes #5758 Fixes #5090 Fixes #5204 Fixes #4695 Fixes #5607 Fixes #5707 Fixes #5719 Fixes #3113 Fixes #5706 Fixes #5767 Fixes #5723 Fixes #5769 Fixes #5770 Fixes #5771 Fixes #5759 Fixes #5776 Fixes #5777 Fixes #5778 --- hugolib/alias.go | 29 +- hugolib/alias_test.go | 4 +- hugolib/author.go | 45 - hugolib/collections.go | 75 +- hugolib/collections_test.go | 18 +- hugolib/config.go | 23 +- hugolib/datafiles_test.go | 6 +- hugolib/disableKinds_test.go | 40 +- hugolib/embedded_shortcodes_test.go | 10 +- hugolib/gitinfo.go | 14 +- hugolib/hugo_sites.go | 523 ++++--- hugolib/hugo_sites_build.go | 175 ++- hugolib/hugo_sites_build_errors_test.go | 123 +- hugolib/hugo_sites_build_test.go | 509 +++--- hugolib/hugo_sites_multihost_test.go | 26 +- hugolib/hugo_smoke_test.go | 303 ++++ hugolib/language_content_dir_test.go | 78 +- hugolib/media.go | 60 - hugolib/menu.go | 224 --- hugolib/menu_test.go | 6 +- hugolib/minify_publisher_test.go | 20 +- hugolib/multilingual.go | 16 +- hugolib/orderedMap.go | 99 -- hugolib/orderedMap_test.go | 69 - hugolib/page.go | 2400 +++++++---------------------- hugolib/pageCache.go | 136 -- hugolib/pageCache_test.go | 88 -- hugolib/pageGroup.go | 298 ---- hugolib/pageGroup_test.go | 457 ------ hugolib/pageSort.go | 332 ---- hugolib/pageSort_test.go | 281 ---- hugolib/page__common.go | 112 ++ hugolib/page__content.go | 135 ++ hugolib/page__data.go | 70 + hugolib/page__menus.go | 74 + hugolib/page__meta.go | 652 ++++++++ hugolib/page__new.go | 291 ++++ hugolib/page__output.go | 107 ++ hugolib/page__paginator.go | 83 + hugolib/page__paths.go | 148 ++ hugolib/page__per_output.go | 445 ++++++ hugolib/page__position.go | 76 + hugolib/page__ref.go | 117 ++ hugolib/page__tree.go | 113 ++ hugolib/page_content.go | 233 --- hugolib/page_errors.go | 47 - hugolib/page_kinds.go | 40 + hugolib/page_output.go | 320 ---- hugolib/page_paths.go | 312 ---- hugolib/page_paths_test.go | 194 --- hugolib/page_permalink_test.go | 44 +- hugolib/page_ref.go | 100 -- hugolib/page_resource.go | 23 - hugolib/page_taxonomy_test.go | 96 -- hugolib/page_test.go | 901 +++-------- hugolib/page_time_integration_test.go | 183 --- hugolib/page_unwrap.go | 50 + hugolib/page_unwrap_test.go | 37 + hugolib/page_without_content.go | 67 - hugolib/pagebundler.go | 42 +- hugolib/pagebundler_capture.go | 10 +- hugolib/pagebundler_capture_test.go | 12 +- hugolib/pagebundler_handlers.go | 141 +- hugolib/pagebundler_test.go | 308 ++-- hugolib/pagecollections.go | 279 ++-- hugolib/pagecollections_test.go | 86 +- hugolib/pagemeta/page_frontmatter.go | 426 ----- hugolib/pagemeta/page_frontmatter_test.go | 261 ---- hugolib/pagemeta/pagemeta.go | 32 - hugolib/pagesPrevNext.go | 42 - hugolib/pagesPrevNext_test.go | 86 -- hugolib/pages_language_merge.go | 64 - hugolib/pages_language_merge_test.go | 40 +- hugolib/pages_related.go | 191 --- hugolib/pages_related_test.go | 75 - hugolib/pagination.go | 595 ------- hugolib/pagination_test.go | 579 ------- hugolib/path_separators_test.go | 38 - hugolib/paths/themes.go | 4 +- hugolib/permalinker.go | 5 +- hugolib/permalinks.go | 213 --- hugolib/permalinks_test.go | 85 - hugolib/resource_chain_test.go | 8 +- hugolib/rss_test.go | 25 +- hugolib/shortcode.go | 490 ++---- hugolib/shortcode_page.go | 56 + hugolib/shortcode_test.go | 400 ++--- hugolib/site.go | 1132 +++++++------- hugolib/siteJSONEncode_test.go | 34 +- hugolib/site_output.go | 15 +- hugolib/site_output_test.go | 42 +- hugolib/site_render.go | 444 +++--- hugolib/site_sections.go | 273 +--- hugolib/site_sections_test.go | 150 +- hugolib/site_test.go | 123 +- hugolib/site_url_test.go | 14 +- hugolib/sitemap.go | 45 - hugolib/sitemap_test.go | 10 +- hugolib/taxonomy.go | 151 +- hugolib/taxonomy_test.go | 161 +- hugolib/testhelpers_test.go | 123 +- hugolib/translations.go | 56 +- 102 files changed, 6820 insertions(+), 12103 deletions(-) delete mode 100644 hugolib/author.go create mode 100644 hugolib/hugo_smoke_test.go delete mode 100644 hugolib/media.go delete mode 100644 hugolib/menu.go delete mode 100644 hugolib/orderedMap.go delete mode 100644 hugolib/orderedMap_test.go delete mode 100644 hugolib/pageCache.go delete mode 100644 hugolib/pageCache_test.go delete mode 100644 hugolib/pageGroup.go delete mode 100644 hugolib/pageGroup_test.go delete mode 100644 hugolib/pageSort.go delete mode 100644 hugolib/pageSort_test.go create mode 100644 hugolib/page__common.go create mode 100644 hugolib/page__content.go create mode 100644 hugolib/page__data.go create mode 100644 hugolib/page__menus.go create mode 100644 hugolib/page__meta.go create mode 100644 hugolib/page__new.go create mode 100644 hugolib/page__output.go create mode 100644 hugolib/page__paginator.go create mode 100644 hugolib/page__paths.go create mode 100644 hugolib/page__per_output.go create mode 100644 hugolib/page__position.go create mode 100644 hugolib/page__ref.go create mode 100644 hugolib/page__tree.go delete mode 100644 hugolib/page_content.go delete mode 100644 hugolib/page_errors.go create mode 100644 hugolib/page_kinds.go delete mode 100644 hugolib/page_output.go delete mode 100644 hugolib/page_paths.go delete mode 100644 hugolib/page_paths_test.go delete mode 100644 hugolib/page_ref.go delete mode 100644 hugolib/page_resource.go delete mode 100644 hugolib/page_taxonomy_test.go delete mode 100644 hugolib/page_time_integration_test.go create mode 100644 hugolib/page_unwrap.go create mode 100644 hugolib/page_unwrap_test.go delete mode 100644 hugolib/page_without_content.go delete mode 100644 hugolib/pagemeta/page_frontmatter.go delete mode 100644 hugolib/pagemeta/page_frontmatter_test.go delete mode 100644 hugolib/pagemeta/pagemeta.go delete mode 100644 hugolib/pagesPrevNext.go delete mode 100644 hugolib/pagesPrevNext_test.go delete mode 100644 hugolib/pages_language_merge.go delete mode 100644 hugolib/pages_related.go delete mode 100644 hugolib/pages_related_test.go delete mode 100644 hugolib/pagination.go delete mode 100644 hugolib/pagination_test.go delete mode 100644 hugolib/path_separators_test.go delete mode 100644 hugolib/permalinks.go delete mode 100644 hugolib/permalinks_test.go create mode 100644 hugolib/shortcode_page.go delete mode 100644 hugolib/sitemap.go (limited to 'hugolib') diff --git a/hugolib/alias.go b/hugolib/alias.go index c44f32dbb..599821c0a 100644 --- a/hugolib/alias.go +++ b/hugolib/alias.go @@ -1,4 +1,4 @@ -// Copyright 2017 The Hugo Authors. All rights reserved. +// Copyright 2019 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -26,6 +26,7 @@ import ( "github.com/gohugoio/hugo/output" "github.com/gohugoio/hugo/publisher" + "github.com/gohugoio/hugo/resources/page" "github.com/gohugoio/hugo/tpl" "github.com/gohugoio/hugo/helpers" @@ -55,7 +56,12 @@ func newAliasHandler(t tpl.TemplateFinder, l *loggers.Logger, allowRoot bool) al return aliasHandler{t, l, allowRoot} } -func (a aliasHandler) renderAlias(isXHTML bool, permalink string, page *Page) (io.Reader, error) { +type aliasPage struct { + Permalink string + page.Page +} + +func (a aliasHandler) renderAlias(isXHTML bool, permalink string, p page.Page) (io.Reader, error) { t := "alias" if isXHTML { t = "alias-xhtml" @@ -75,12 +81,9 @@ func (a aliasHandler) renderAlias(isXHTML bool, permalink string, page *Page) (i } } - data := struct { - Permalink string - Page *Page - }{ + data := aliasPage{ permalink, - page, + p, } buffer := new(bytes.Buffer) @@ -91,11 +94,11 @@ func (a aliasHandler) renderAlias(isXHTML bool, permalink string, page *Page) (i return buffer, nil } -func (s *Site) writeDestAlias(path, permalink string, outputFormat output.Format, p *Page) (err error) { +func (s *Site) writeDestAlias(path, permalink string, outputFormat output.Format, p page.Page) (err error) { return s.publishDestAlias(false, path, permalink, outputFormat, p) } -func (s *Site) publishDestAlias(allowRoot bool, path, permalink string, outputFormat output.Format, p *Page) (err error) { +func (s *Site) publishDestAlias(allowRoot bool, path, permalink string, outputFormat output.Format, p page.Page) (err error) { handler := newAliasHandler(s.Tmpl, s.Log, allowRoot) isXHTML := strings.HasSuffix(path, ".xhtml") @@ -126,19 +129,19 @@ func (s *Site) publishDestAlias(allowRoot bool, path, permalink string, outputFo func (a aliasHandler) targetPathAlias(src string) (string, error) { originalAlias := src if len(src) <= 0 { - return "", fmt.Errorf("Alias \"\" is an empty string") + return "", fmt.Errorf("alias \"\" is an empty string") } alias := filepath.Clean(src) components := strings.Split(alias, helpers.FilePathSeparator) if !a.allowRoot && alias == helpers.FilePathSeparator { - return "", fmt.Errorf("Alias \"%s\" resolves to website root directory", originalAlias) + return "", fmt.Errorf("alias \"%s\" resolves to website root directory", originalAlias) } // Validate against directory traversal if components[0] == ".." { - return "", fmt.Errorf("Alias \"%s\" traverses outside the website root directory", originalAlias) + return "", fmt.Errorf("alias \"%s\" traverses outside the website root directory", originalAlias) } // Handle Windows file and directory naming restrictions @@ -171,7 +174,7 @@ func (a aliasHandler) targetPathAlias(src string) (string, error) { for _, m := range msgs { a.log.ERROR.Println(m) } - return "", fmt.Errorf("Cannot create \"%s\": Windows filename restriction", originalAlias) + return "", fmt.Errorf("cannot create \"%s\": Windows filename restriction", originalAlias) } for _, m := range msgs { a.log.INFO.Println(m) diff --git a/hugolib/alias_test.go b/hugolib/alias_test.go index da1b80b70..684e35c9a 100644 --- a/hugolib/alias_test.go +++ b/hugolib/alias_test.go @@ -1,4 +1,4 @@ -// Copyright 2018 The Hugo Authors. All rights reserved. +// Copyright 2019 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -50,7 +50,7 @@ func TestAlias(t *testing.T) { b.CreateSites().Build(BuildCfg{}) assert.Equal(1, len(b.H.Sites)) - require.Len(t, b.H.Sites[0].RegularPages, 1) + require.Len(t, b.H.Sites[0].RegularPages(), 1) // the real page b.AssertFileContent("public/page/index.html", "For some moments the old man") diff --git a/hugolib/author.go b/hugolib/author.go deleted file mode 100644 index 0f4327097..000000000 --- a/hugolib/author.go +++ /dev/null @@ -1,45 +0,0 @@ -// Copyright 2015 The Hugo Authors. All rights reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package hugolib - -// AuthorList is a list of all authors and their metadata. -type AuthorList map[string]Author - -// Author contains details about the author of a page. -type Author struct { - GivenName string - FamilyName string - DisplayName string - Thumbnail string - Image string - ShortBio string - LongBio string - Email string - Social AuthorSocial -} - -// AuthorSocial is a place to put social details per author. These are the -// standard keys that themes will expect to have available, but can be -// expanded to any others on a per site basis -// - website -// - github -// - facebook -// - twitter -// - googleplus -// - pinterest -// - instagram -// - youtube -// - linkedin -// - skype -type AuthorSocial map[string]string diff --git a/hugolib/collections.go b/hugolib/collections.go index cf75d3732..a794a9866 100644 --- a/hugolib/collections.go +++ b/hugolib/collections.go @@ -1,4 +1,4 @@ -// Copyright 2018 The Hugo Authors. All rights reserved. +// Copyright 2019 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -14,19 +14,13 @@ package hugolib import ( - "fmt" - - "github.com/gohugoio/hugo/resources/resource" - "github.com/gohugoio/hugo/common/collections" + "github.com/gohugoio/hugo/resources/page" ) var ( - _ collections.Grouper = (*Page)(nil) - _ collections.Slicer = (*Page)(nil) - _ collections.Slicer = PageGroup{} - _ collections.Slicer = WeightedPage{} - _ resource.ResourcesConverter = Pages{} + _ collections.Grouper = (*pageState)(nil) + _ collections.Slicer = (*pageState)(nil) ) // collections.Slicer implementations below. We keep these bridge implementations @@ -35,50 +29,8 @@ var ( // Slice is not meant to be used externally. It's a bridge function // for the template functions. See collections.Slice. -func (p *Page) Slice(items interface{}) (interface{}, error) { - return toPages(items) -} - -// Slice is not meant to be used externally. It's a bridge function -// for the template functions. See collections.Slice. -func (p PageGroup) Slice(in interface{}) (interface{}, error) { - switch items := in.(type) { - case PageGroup: - return items, nil - case []interface{}: - groups := make(PagesGroup, len(items)) - for i, v := range items { - g, ok := v.(PageGroup) - if !ok { - return nil, fmt.Errorf("type %T is not a PageGroup", v) - } - groups[i] = g - } - return groups, nil - default: - return nil, fmt.Errorf("invalid slice type %T", items) - } -} - -// Slice is not meant to be used externally. It's a bridge function -// for the template functions. See collections.Slice. -func (p WeightedPage) Slice(in interface{}) (interface{}, error) { - switch items := in.(type) { - case WeightedPages: - return items, nil - case []interface{}: - weighted := make(WeightedPages, len(items)) - for i, v := range items { - g, ok := v.(WeightedPage) - if !ok { - return nil, fmt.Errorf("type %T is not a WeightedPage", v) - } - weighted[i] = g - } - return weighted, nil - default: - return nil, fmt.Errorf("invalid slice type %T", items) - } +func (p *pageState) Slice(items interface{}) (interface{}, error) { + return page.ToPages(items) } // collections.Grouper implementations below @@ -86,19 +38,10 @@ func (p WeightedPage) Slice(in interface{}) (interface{}, error) { // Group creates a PageGroup from a key and a Pages object // This method is not meant for external use. It got its non-typed arguments to satisfy // a very generic interface in the tpl package. -func (p *Page) Group(key interface{}, in interface{}) (interface{}, error) { - pages, err := toPages(in) +func (p *pageState) Group(key interface{}, in interface{}) (interface{}, error) { + pages, err := page.ToPages(in) if err != nil { return nil, err } - return PageGroup{Key: key, Pages: pages}, nil -} - -// ToResources wraps resource.ResourcesConverter -func (pages Pages) ToResources() resource.Resources { - r := make(resource.Resources, len(pages)) - for i, p := range pages { - r[i] = p - } - return r + return page.PageGroup{Key: key, Pages: pages}, nil } diff --git a/hugolib/collections_test.go b/hugolib/collections_test.go index 9cf328a05..bc55bdbe8 100644 --- a/hugolib/collections_test.go +++ b/hugolib/collections_test.go @@ -1,4 +1,4 @@ -// Copyright 2018 The Hugo Authors. All rights reserved. +// Copyright 2019 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -40,7 +40,7 @@ title: "Page" b.CreateSites().Build(BuildCfg{}) assert.Equal(1, len(b.H.Sites)) - require.Len(t, b.H.Sites[0].RegularPages, 2) + require.Len(t, b.H.Sites[0].RegularPages(), 2) b.AssertFileContent("public/index.html", "cool: 2") } @@ -79,12 +79,12 @@ tags_weight: %d b.CreateSites().Build(BuildCfg{}) assert.Equal(1, len(b.H.Sites)) - require.Len(t, b.H.Sites[0].RegularPages, 2) + require.Len(t, b.H.Sites[0].RegularPages(), 2) b.AssertFileContent("public/index.html", - "pages:2:hugolib.Pages:Page(/page1.md)/Page(/page2.md)", - "pageGroups:2:hugolib.PagesGroup:Page(/page1.md)/Page(/page2.md)", - `weightedPages:2::hugolib.WeightedPages:[WeightedPage(10,"Page") WeightedPage(20,"Page")]`) + "pages:2:page.Pages:Page(/page1.md)/Page(/page2.md)", + "pageGroups:2:page.PagesGroup:Page(/page1.md)/Page(/page2.md)", + `weightedPages:2::page.WeightedPages:[WeightedPage(10,"Page") WeightedPage(20,"Page")]`) } func TestAppendFunc(t *testing.T) { @@ -129,11 +129,11 @@ tags_weight: %d b.CreateSites().Build(BuildCfg{}) assert.Equal(1, len(b.H.Sites)) - require.Len(t, b.H.Sites[0].RegularPages, 2) + require.Len(t, b.H.Sites[0].RegularPages(), 2) b.AssertFileContent("public/index.html", - "pages:2:hugolib.Pages:Page(/page2.md)/Page(/page1.md)", - "appendPages:9:hugolib.Pages:home/page", + "pages:2:page.Pages:Page(/page2.md)/Page(/page1.md)", + "appendPages:9:page.Pages:home/page", "appendStrings:[]string:[a b c d e]", "appendStringsSlice:[]string:[a b c c d]", "union:[]string:[a b c d e]", diff --git a/hugolib/config.go b/hugolib/config.go index 6a1de32be..50e4ca6ec 100644 --- a/hugolib/config.go +++ b/hugolib/config.go @@ -1,4 +1,4 @@ -// Copyright 2016-present The Hugo Authors. All rights reserved. +// Copyright 2019 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -24,7 +24,6 @@ import ( "github.com/gohugoio/hugo/common/herrors" "github.com/gohugoio/hugo/common/hugo" - "github.com/gohugoio/hugo/hugofs" "github.com/gohugoio/hugo/hugolib/paths" "github.com/pkg/errors" _errors "github.com/pkg/errors" @@ -177,14 +176,6 @@ type configLoader struct { ConfigSourceDescriptor } -func (l configLoader) wrapFileInfoError(err error, fi os.FileInfo) error { - rfi, ok := fi.(hugofs.RealFilenameInfo) - if !ok { - return err - } - return l.wrapFileError(err, rfi.RealFilename()) -} - func (l configLoader) loadConfig(configName string, v *viper.Viper) (string, error) { baseDir := l.configFileDir() var baseFilename string @@ -240,11 +231,6 @@ func (l configLoader) wrapFileError(err error, filename string) error { return err } -func (l configLoader) newRealBaseFs(path string) afero.Fs { - return hugofs.NewBasePathRealFilenameFs(afero.NewBasePathFs(l.Fs, path).(*afero.BasePathFs)) - -} - func (l configLoader) loadConfigFromConfigDir(v *viper.Viper) ([]string, error) { sourceFs := l.Fs configDir := l.AbsConfigDir @@ -274,7 +260,7 @@ func (l configLoader) loadConfigFromConfigDir(v *viper.Viper) ([]string, error) for _, configDir := range configDirs { err := afero.Walk(sourceFs, configDir, func(path string, fi os.FileInfo, err error) error { - if fi == nil { + if fi == nil || err != nil { return nil } @@ -616,8 +602,8 @@ func loadDefaultSettingsFor(v *viper.Viper) error { v.SetDefault("removePathAccents", false) v.SetDefault("titleCaseStyle", "AP") v.SetDefault("taxonomies", map[string]string{"tag": "tags", "category": "categories"}) - v.SetDefault("permalinks", make(PermalinkOverrides, 0)) - v.SetDefault("sitemap", Sitemap{Priority: -1, Filename: "sitemap.xml"}) + v.SetDefault("permalinks", make(map[string]string)) + v.SetDefault("sitemap", config.Sitemap{Priority: -1, Filename: "sitemap.xml"}) v.SetDefault("pygmentsStyle", "monokai") v.SetDefault("pygmentsUseClasses", false) v.SetDefault("pygmentsCodeFences", false) @@ -625,7 +611,6 @@ func loadDefaultSettingsFor(v *viper.Viper) error { v.SetDefault("pygmentsOptions", "") v.SetDefault("disableLiveReload", false) v.SetDefault("pluralizeListTitles", true) - v.SetDefault("preserveTaxonomyNames", false) v.SetDefault("forceSyncStatic", false) v.SetDefault("footnoteAnchorPrefix", "") v.SetDefault("footnoteReturnLinkContents", "") diff --git a/hugolib/datafiles_test.go b/hugolib/datafiles_test.go index 6685de4cc..b65183a8a 100644 --- a/hugolib/datafiles_test.go +++ b/hugolib/datafiles_test.go @@ -1,4 +1,4 @@ -// Copyright 2015 The Hugo Authors. All rights reserved. +// Copyright 2019 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -349,7 +349,7 @@ func doTestDataDirImpl(t *testing.T, dd dataDir, expected interface{}, configKey s := buildSingleSiteExpected(t, false, expectBuildError, depsCfg, BuildCfg{SkipRender: true}) - if !expectBuildError && !reflect.DeepEqual(expected, s.Data) { + if !expectBuildError && !reflect.DeepEqual(expected, s.h.Data()) { // This disabled code detects the situation described in the WARNING message below. // The situation seems to only occur for TOML data with integer values. // Perhaps the TOML parser returns ints in another type. @@ -366,7 +366,7 @@ func doTestDataDirImpl(t *testing.T, dd dataDir, expected interface{}, configKey } */ - return fmt.Sprintf("Expected data:\n%v got\n%v\n\nExpected type structure:\n%#[1]v got\n%#[2]v", expected, s.Data) + return fmt.Sprintf("Expected data:\n%v got\n%v\n\nExpected type structure:\n%#[1]v got\n%#[2]v", expected, s.h.Data()) } return diff --git a/hugolib/disableKinds_test.go b/hugolib/disableKinds_test.go index edada1419..f5c093646 100644 --- a/hugolib/disableKinds_test.go +++ b/hugolib/disableKinds_test.go @@ -1,4 +1,4 @@ -// Copyright 2016 The Hugo Authors. All rights reserved. +// Copyright 2019 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -18,6 +18,8 @@ import ( "fmt" + "github.com/gohugoio/hugo/resources/page" + "github.com/gohugoio/hugo/deps" "github.com/spf13/afero" @@ -33,13 +35,13 @@ func TestDisableKindsNoneDisabled(t *testing.T) { func TestDisableKindsSomeDisabled(t *testing.T) { t.Parallel() - doTestDisableKinds(t, KindSection, kind404) + doTestDisableKinds(t, page.KindSection, kind404) } func TestDisableKindsOneDisabled(t *testing.T) { t.Parallel() for _, kind := range allKinds { - if kind == KindPage { + if kind == page.KindPage { // Turning off regular page generation have some side-effects // not handled by the assertions below (no sections), so // skip that for now. @@ -124,64 +126,64 @@ func assertDisabledKinds(th testHelper, s *Site, disabled ...string) { assertDisabledKind(th, func(isDisabled bool) bool { if isDisabled { - return len(s.RegularPages) == 0 + return len(s.RegularPages()) == 0 } - return len(s.RegularPages) > 0 - }, disabled, KindPage, "public/sect/p1/index.html", "Single|P1") + return len(s.RegularPages()) > 0 + }, disabled, page.KindPage, "public/sect/p1/index.html", "Single|P1") assertDisabledKind(th, func(isDisabled bool) bool { - p := s.getPage(KindHome) + p := s.getPage(page.KindHome) if isDisabled { return p == nil } return p != nil - }, disabled, KindHome, "public/index.html", "Home") + }, disabled, page.KindHome, "public/index.html", "Home") assertDisabledKind(th, func(isDisabled bool) bool { - p := s.getPage(KindSection, "sect") + p := s.getPage(page.KindSection, "sect") if isDisabled { return p == nil } return p != nil - }, disabled, KindSection, "public/sect/index.html", "Sects") + }, disabled, page.KindSection, "public/sect/index.html", "Sects") assertDisabledKind(th, func(isDisabled bool) bool { - p := s.getPage(KindTaxonomy, "tags", "tag1") + p := s.getPage(page.KindTaxonomy, "tags", "tag1") if isDisabled { return p == nil } return p != nil - }, disabled, KindTaxonomy, "public/tags/tag1/index.html", "Tag1") + }, disabled, page.KindTaxonomy, "public/tags/tag1/index.html", "Tag1") assertDisabledKind(th, func(isDisabled bool) bool { - p := s.getPage(KindTaxonomyTerm, "tags") + p := s.getPage(page.KindTaxonomyTerm, "tags") if isDisabled { return p == nil } return p != nil - }, disabled, KindTaxonomyTerm, "public/tags/index.html", "Tags") + }, disabled, page.KindTaxonomyTerm, "public/tags/index.html", "Tags") assertDisabledKind(th, func(isDisabled bool) bool { - p := s.getPage(KindTaxonomyTerm, "categories") + p := s.getPage(page.KindTaxonomyTerm, "categories") if isDisabled { return p == nil } return p != nil - }, disabled, KindTaxonomyTerm, "public/categories/index.html", "Category Terms") + }, disabled, page.KindTaxonomyTerm, "public/categories/index.html", "Category Terms") assertDisabledKind(th, func(isDisabled bool) bool { - p := s.getPage(KindTaxonomy, "categories", "hugo") + p := s.getPage(page.KindTaxonomy, "categories", "hugo") if isDisabled { return p == nil } return p != nil - }, disabled, KindTaxonomy, "public/categories/hugo/index.html", "Hugo") + }, disabled, page.KindTaxonomy, "public/categories/hugo/index.html", "Hugo") // The below have no page in any collection. assertDisabledKind(th, func(isDisabled bool) bool { return true }, disabled, kindRSS, "public/index.xml", "") assertDisabledKind(th, func(isDisabled bool) bool { return true }, disabled, kindSitemap, "public/sitemap.xml", "sitemap") @@ -195,7 +197,7 @@ func assertDisabledKind(th testHelper, kindAssert func(bool) bool, disabled []st if kind == kindRSS && !isDisabled { // If the home page is also disabled, there is not RSS to look for. - if stringSliceContains(KindHome, disabled...) { + if stringSliceContains(page.KindHome, disabled...) { isDisabled = true } } diff --git a/hugolib/embedded_shortcodes_test.go b/hugolib/embedded_shortcodes_test.go index 3a6220b53..c70380a4b 100644 --- a/hugolib/embedded_shortcodes_test.go +++ b/hugolib/embedded_shortcodes_test.go @@ -1,4 +1,4 @@ -// Copyright 2016 The Hugo Authors. All rights reserved. +// Copyright 2019 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -20,6 +20,8 @@ import ( "strings" "testing" + "github.com/spf13/cast" + "path/filepath" "github.com/gohugoio/hugo/deps" @@ -67,9 +69,11 @@ func doTestShortcodeCrossrefs(t *testing.T, relative bool) { s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{}) - require.Len(t, s.RegularPages, 1) + require.Len(t, s.RegularPages(), 1) - output := string(s.RegularPages[0].content()) + content, err := s.RegularPages()[0].Content() + require.NoError(t, err) + output := cast.ToString(content) if !strings.Contains(output, expected) { t.Errorf("Got\n%q\nExpected\n%q", output, expected) diff --git a/hugolib/gitinfo.go b/hugolib/gitinfo.go index d356fcf07..6acc47d17 100644 --- a/hugolib/gitinfo.go +++ b/hugolib/gitinfo.go @@ -1,4 +1,4 @@ -// Copyright 2016-present The Hugo Authors. All rights reserved. +// Copyright 2019 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -19,6 +19,7 @@ import ( "github.com/bep/gitmap" "github.com/gohugoio/hugo/config" + "github.com/gohugoio/hugo/resources/page" ) type gitInfo struct { @@ -26,15 +27,12 @@ type gitInfo struct { repo *gitmap.GitRepo } -func (g *gitInfo) forPage(p *Page) (*gitmap.GitInfo, bool) { - if g == nil { - return nil, false - } - - name := strings.TrimPrefix(filepath.ToSlash(p.Filename()), g.contentDir) +func (g *gitInfo) forPage(p page.Page) *gitmap.GitInfo { + name := strings.TrimPrefix(filepath.ToSlash(p.File().Filename()), g.contentDir) name = strings.TrimPrefix(name, "/") - return g.repo.Files[name], true + return g.repo.Files[name] + } func newGitInfo(cfg config.Provider) (*gitInfo, error) { diff --git a/hugolib/hugo_sites.go b/hugolib/hugo_sites.go index 9ce1c438e..af1e0fbac 100644 --- a/hugolib/hugo_sites.go +++ b/hugolib/hugo_sites.go @@ -1,4 +1,4 @@ -// Copyright 2018 The Hugo Authors. All rights reserved. +// Copyright 2019 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -14,14 +14,24 @@ package hugolib import ( - "errors" "io" "path/filepath" "sort" "strings" "sync" + "github.com/gohugoio/hugo/output" + "github.com/gohugoio/hugo/parser/metadecoders" + + "github.com/gohugoio/hugo/hugofs" + + "github.com/pkg/errors" + + "github.com/gohugoio/hugo/source" + + "github.com/bep/gitmap" "github.com/gohugoio/hugo/config" + "github.com/spf13/afero" "github.com/gohugoio/hugo/publisher" @@ -30,8 +40,10 @@ import ( "github.com/gohugoio/hugo/deps" "github.com/gohugoio/hugo/helpers" "github.com/gohugoio/hugo/langs" + "github.com/gohugoio/hugo/lazy" "github.com/gohugoio/hugo/i18n" + "github.com/gohugoio/hugo/resources/page" "github.com/gohugoio/hugo/tpl" "github.com/gohugoio/hugo/tpl/tplimpl" ) @@ -48,17 +60,96 @@ type HugoSites struct { // If this is running in the dev server. running bool + // Render output formats for all sites. + renderFormats output.Formats + *deps.Deps + gitInfo *gitInfo + + // As loaded from the /data dirs + data map[string]interface{} + // Keeps track of bundle directories and symlinks to enable partial rebuilding. ContentChanges *contentChangeMap - // If enabled, keeps a revision map for all content. - gitInfo *gitInfo + init *hugoSitesInit + + *fatalErrorHandler +} + +type fatalErrorHandler struct { + mu sync.Mutex + + h *HugoSites + + err error + + done bool + donec chan bool // will be closed when done +} + +// FatalError error is used in some rare situations where it does not make sense to +// continue processing, to abort as soon as possible and log the error. +func (f *fatalErrorHandler) FatalError(err error) { + f.mu.Lock() + defer f.mu.Unlock() + if !f.done { + f.done = true + close(f.donec) + } + f.err = err } -func (h *HugoSites) siteInfos() SiteInfos { - infos := make(SiteInfos, len(h.Sites)) +func (f *fatalErrorHandler) getErr() error { + f.mu.Lock() + defer f.mu.Unlock() + return f.err +} + +func (f *fatalErrorHandler) Done() <-chan bool { + return f.donec +} + +type hugoSitesInit struct { + // Loads the data from all of the /data folders. + data *lazy.Init + + // Loads the Git info for all the pages if enabled. + gitInfo *lazy.Init + + // Maps page translations. + translations *lazy.Init +} + +func (h *hugoSitesInit) Reset() { + h.data.Reset() + h.gitInfo.Reset() + h.translations.Reset() +} + +func (h *HugoSites) Data() map[string]interface{} { + if _, err := h.init.data.Do(); err != nil { + h.SendError(errors.Wrap(err, "failed to load data")) + return nil + } + return h.data +} + +func (h *HugoSites) gitInfoForPage(p page.Page) (*gitmap.GitInfo, error) { + if _, err := h.init.gitInfo.Do(); err != nil { + return nil, err + } + + if h.gitInfo == nil { + return nil, nil + } + + return h.gitInfo.forPage(p), nil +} + +func (h *HugoSites) siteInfos() page.Sites { + infos := make(page.Sites, len(h.Sites)) for i, site := range h.Sites { infos[i] = &site.Info } @@ -106,7 +197,7 @@ func (h *HugoSites) IsMultihost() bool { func (h *HugoSites) LanguageSet() map[string]bool { set := make(map[string]bool) for _, s := range h.Sites { - set[s.Language.Lang] = true + set[s.language.Lang] = true } return set } @@ -129,14 +220,14 @@ func (h *HugoSites) PrintProcessingStats(w io.Writer) { func (h *HugoSites) langSite() map[string]*Site { m := make(map[string]*Site) for _, s := range h.Sites { - m[s.Language.Lang] = s + m[s.language.Lang] = s } return m } // GetContentPage finds a Page with content given the absolute filename. // Returns nil if none found. -func (h *HugoSites) GetContentPage(filename string) *Page { +func (h *HugoSites) GetContentPage(filename string) page.Page { for _, s := range h.Sites { pos := s.rawAllPages.findPagePosByFilename(filename) if pos == -1 { @@ -178,10 +269,40 @@ func newHugoSites(cfg deps.DepsCfg, sites ...*Site) (*HugoSites, error) { running: cfg.Running, multilingual: langConfig, multihost: cfg.Cfg.GetBool("multihost"), - Sites: sites} + Sites: sites, + init: &hugoSitesInit{ + data: lazy.New(), + gitInfo: lazy.New(), + translations: lazy.New(), + }, + } + + h.fatalErrorHandler = &fatalErrorHandler{ + h: h, + donec: make(chan bool), + } + + h.init.data.Add(func() (interface{}, error) { + err := h.loadData(h.PathSpec.BaseFs.Data.Fs) + return err, nil + }) + + h.init.translations.Add(func() (interface{}, error) { + if len(h.Sites) > 1 { + allTranslations := pagesToTranslationsMap(h.Sites) + assignTranslationsToPages(allTranslations, h.Sites) + } + + return nil, nil + }) + + h.init.gitInfo.Add(func() (interface{}, error) { + err := h.loadGitInfo() + return nil, err + }) for _, s := range sites { - s.owner = h + s.h = h } if err := applyDeps(cfg, sites...); err != nil { @@ -197,14 +318,10 @@ func newHugoSites(cfg deps.DepsCfg, sites ...*Site) (*HugoSites, error) { h.ContentChanges = contentChangeTracker } - if err := h.initGitInfo(); err != nil { - return nil, err - } - return h, nil } -func (h *HugoSites) initGitInfo() error { +func (h *HugoSites) loadGitInfo() error { if h.Cfg.GetBool("enableGitInfo") { gi, err := newGitInfo(h.Cfg) if err != nil { @@ -247,16 +364,16 @@ func applyDeps(cfg deps.DepsCfg, sites ...*Site) error { d.Site = &s.Info - siteConfig, err := loadSiteConfig(s.Language) + siteConfig, err := loadSiteConfig(s.language) if err != nil { return err } - s.siteConfig = siteConfig - s.siteRefLinker, err = newSiteRefLinker(s.Language, s) + s.siteConfigConfig = siteConfig + s.siteRefLinker, err = newSiteRefLinker(s.language, s) return err } - cfg.Language = s.Language + cfg.Language = s.language cfg.MediaTypes = s.mediaTypesConfig cfg.OutputFormats = s.outputFormatsConfig @@ -347,11 +464,23 @@ func createSitesFromConfig(cfg deps.DepsCfg) ([]*Site, error) { return sites, nil } -// Reset resets the sites and template caches, making it ready for a full rebuild. -func (h *HugoSites) reset() { - for i, s := range h.Sites { - h.Sites[i] = s.reset() +// Reset resets the sites and template caches etc., making it ready for a full rebuild. +func (h *HugoSites) reset(config *BuildCfg) { + if config.ResetState { + for i, s := range h.Sites { + h.Sites[i] = s.reset() + if r, ok := s.Fs.Destination.(hugofs.Reseter); ok { + r.Reset() + } + } } + + h.fatalErrorHandler = &fatalErrorHandler{ + h: h, + donec: make(chan bool), + } + + h.init.Reset() } // resetLogs resets the log counters etc. Used to do a new build on the same sites. @@ -387,7 +516,7 @@ func (h *HugoSites) createSitesFromConfig(cfg config.Provider) error { h.Sites = sites for _, s := range sites { - s.owner = h + s.h = h } if err := applyDeps(depsCfg, sites...); err != nil { @@ -435,7 +564,10 @@ type BuildCfg struct { // Note that a page does not have to have a content page / file. // For regular builds, this will allways return true. // TODO(bep) rename/work this. -func (cfg *BuildCfg) shouldRender(p *Page) bool { +func (cfg *BuildCfg) shouldRender(p *pageState) bool { + if !p.render { + return false + } if p.forceRender { p.forceRender = false return true @@ -445,15 +577,8 @@ func (cfg *BuildCfg) shouldRender(p *Page) bool { return true } - if cfg.RecentlyVisited[p.RelPermalink()] { - if cfg.PartialReRender { - _ = p.initMainOutputFormat() - } - return true - } - - if cfg.whatChanged != nil && p.File != nil { - return cfg.whatChanged.files[p.File.Filename()] + if cfg.whatChanged != nil && p.File() != nil { + return cfg.whatChanged.files[p.File().Filename()] } return false @@ -477,100 +602,85 @@ func (h *HugoSites) renderCrossSitesArtifacts() error { return nil } - // TODO(bep) DRY - sitemapDefault := parseSitemap(h.Cfg.GetStringMap("sitemap")) - s := h.Sites[0] smLayouts := []string{"sitemapindex.xml", "_default/sitemapindex.xml", "_internal/_default/sitemapindex.xml"} return s.renderAndWriteXML(&s.PathSpec.ProcessingStats.Sitemaps, "sitemapindex", - sitemapDefault.Filename, h.toSiteInfos(), smLayouts...) -} - -func (h *HugoSites) assignMissingTranslations() error { - - // This looks heavy, but it should be a small number of nodes by now. - allPages := h.findAllPagesByKindNotIn(KindPage) - for _, nodeType := range []string{KindHome, KindSection, KindTaxonomy, KindTaxonomyTerm} { - nodes := h.findPagesByKindIn(nodeType, allPages) - - // Assign translations - for _, t1 := range nodes { - for _, t2 := range nodes { - if t1.isNewTranslation(t2) { - t1.translations = append(t1.translations, t2) - } - } - } - } - - // Now we can sort the translations. - for _, p := range allPages { - if len(p.translations) > 0 { - pageBy(languagePageSort).Sort(p.translations) - } - } - return nil - + s.siteCfg.sitemap.Filename, h.toSiteInfos(), smLayouts...) } // createMissingPages creates home page, taxonomies etc. that isnt't created as an // effect of having a content file. func (h *HugoSites) createMissingPages() error { - var newPages Pages + var newPages pageStatePages for _, s := range h.Sites { - if s.isEnabled(KindHome) { + if s.isEnabled(page.KindHome) { // home pages - home := s.findPagesByKind(KindHome) - if len(home) > 1 { + homes := s.findWorkPagesByKind(page.KindHome) + if len(homes) > 1 { panic("Too many homes") } - if len(home) == 0 { - n := s.newHomePage() - s.Pages = append(s.Pages, n) - newPages = append(newPages, n) + var home *pageState + if len(homes) == 0 { + home = s.newPage(page.KindHome) + s.workAllPages = append(s.workAllPages, home) + newPages = append(newPages, home) + } else { + home = homes[0] } + + s.home = home } // Will create content-less root sections. newSections := s.assembleSections() - s.Pages = append(s.Pages, newSections...) + s.workAllPages = append(s.workAllPages, newSections...) newPages = append(newPages, newSections...) + taxonomyTermEnabled := s.isEnabled(page.KindTaxonomyTerm) + taxonomyEnabled := s.isEnabled(page.KindTaxonomy) + // taxonomy list and terms pages - taxonomies := s.Language.GetStringMapString("taxonomies") + taxonomies := s.Language().GetStringMapString("taxonomies") if len(taxonomies) > 0 { - taxonomyPages := s.findPagesByKind(KindTaxonomy) - taxonomyTermsPages := s.findPagesByKind(KindTaxonomyTerm) + taxonomyPages := s.findWorkPagesByKind(page.KindTaxonomy) + taxonomyTermsPages := s.findWorkPagesByKind(page.KindTaxonomyTerm) + + // Make them navigable from WeightedPage etc. + for _, p := range taxonomyPages { + p.getTaxonomyNodeInfo().TransferValues(p) + } + for _, p := range taxonomyTermsPages { + p.getTaxonomyNodeInfo().TransferValues(p) + } + for _, plural := range taxonomies { - if s.isEnabled(KindTaxonomyTerm) { + if taxonomyTermEnabled { foundTaxonomyTermsPage := false for _, p := range taxonomyTermsPages { - if p.sectionsPath() == plural { + if p.SectionsPath() == plural { foundTaxonomyTermsPage = true break } } if !foundTaxonomyTermsPage { - n := s.newTaxonomyTermsPage(plural) - s.Pages = append(s.Pages, n) + n := s.newPage(page.KindTaxonomyTerm, plural) + n.getTaxonomyNodeInfo().TransferValues(n) + s.workAllPages = append(s.workAllPages, n) newPages = append(newPages, n) } } - if s.isEnabled(KindTaxonomy) { - for key := range s.Taxonomies[plural] { + if taxonomyEnabled { + for termKey := range s.Taxonomies[plural] { + foundTaxonomyPage := false - origKey := key - if s.Info.preserveTaxonomyNames { - key = s.PathSpec.MakePathSanitized(key) - } for _, p := range taxonomyPages { - sectionsPath := p.sectionsPath() + sectionsPath := p.SectionsPath() if !strings.HasPrefix(sectionsPath, plural) { continue @@ -579,20 +689,21 @@ func (h *HugoSites) createMissingPages() error { singularKey := strings.TrimPrefix(sectionsPath, plural) singularKey = strings.TrimPrefix(singularKey, "/") - // Some people may have /authors/MaxMustermann etc. as paths. - // p.sections contains the raw values from the file system. - // See https://github.com/gohugoio/hugo/issues/4238 - singularKey = s.PathSpec.MakePathSanitized(singularKey) - - if singularKey == key { + if singularKey == termKey { foundTaxonomyPage = true break } } if !foundTaxonomyPage { - n := s.newTaxonomyPage(plural, origKey) - s.Pages = append(s.Pages, n) + info := s.taxonomyNodes.Get(plural, termKey) + if info == nil { + panic("no info found") + } + + n := s.newTaxonomyPage(info.term, info.plural, info.termKey) + info.TransferValues(n) + s.workAllPages = append(s.workAllPages, n) newPages = append(newPages, n) } } @@ -601,24 +712,6 @@ func (h *HugoSites) createMissingPages() error { } } - if len(newPages) > 0 { - // This resorting is unfortunate, but it also needs to be sorted - // when sections are created. - first := h.Sites[0] - - first.AllPages = append(first.AllPages, newPages...) - - first.AllPages.sort() - - for _, s := range h.Sites { - s.Pages.sort() - } - - for i := 1; i < len(h.Sites); i++ { - h.Sites[i].AllPages = first.AllPages - } - } - return nil } @@ -628,61 +721,58 @@ func (h *HugoSites) removePageByFilename(filename string) { } } -func (h *HugoSites) setupTranslations() { +func (h *HugoSites) createPageCollections() error { for _, s := range h.Sites { for _, p := range s.rawAllPages { - if p.Kind == kindUnknown { - p.Kind = p.kindFromSections() - } - - if !p.s.isEnabled(p.Kind) { + if !s.isEnabled(p.Kind()) { continue } - shouldBuild := p.shouldBuild() - s.updateBuildStats(p) + shouldBuild := s.shouldBuild(p) + s.buildStats.update(p) if shouldBuild { - if p.headless { + if p.m.headless { s.headlessPages = append(s.headlessPages, p) } else { - s.Pages = append(s.Pages, p) + s.workAllPages = append(s.workAllPages, p) } } } } - allPages := make(Pages, 0) + allPages := newLazyPagesFactory(func() page.Pages { + var pages page.Pages + for _, s := range h.Sites { + pages = append(pages, s.Pages()...) + } - for _, s := range h.Sites { - allPages = append(allPages, s.Pages...) - } + page.SortByDefault(pages) - allPages.sort() + return pages + }) - for _, s := range h.Sites { - s.AllPages = allPages - } + allRegularPages := newLazyPagesFactory(func() page.Pages { + return h.findPagesByKindIn(page.KindPage, allPages.get()) + }) - // Pull over the collections from the master site - for i := 1; i < len(h.Sites); i++ { - h.Sites[i].Data = h.Sites[0].Data + for _, s := range h.Sites { + s.PageCollections.allPages = allPages + s.PageCollections.allRegularPages = allRegularPages } - if len(h.Sites) > 1 { - allTranslations := pagesToTranslationsMap(allPages) - assignTranslationsToPages(allTranslations, allPages) - } + return nil } -func (s *Site) preparePagesForRender(start bool) error { - for _, p := range s.Pages { - if err := p.prepareForRender(start); err != nil { +func (s *Site) preparePagesForRender(idx int) error { + + for _, p := range s.workAllPages { + if err := p.initOutputFormat(idx); err != nil { return err } } for _, p := range s.headlessPages { - if err := p.prepareForRender(start); err != nil { + if err := p.initOutputFormat(idx); err != nil { return err } } @@ -691,62 +781,141 @@ func (s *Site) preparePagesForRender(start bool) error { } // Pages returns all pages for all sites. -func (h *HugoSites) Pages() Pages { - return h.Sites[0].AllPages +func (h *HugoSites) Pages() page.Pages { + return h.Sites[0].AllPages() } -func handleShortcodes(p *PageWithoutContent, rawContentCopy []byte) ([]byte, error) { - if p.shortcodeState != nil && p.shortcodeState.contentShortcodes.Len() > 0 { - p.s.Log.DEBUG.Printf("Replace %d shortcodes in %q", p.shortcodeState.contentShortcodes.Len(), p.BaseFileName()) - err := p.shortcodeState.executeShortcodesForDelta(p) +func (h *HugoSites) loadData(fs afero.Fs) (err error) { + spec := source.NewSourceSpec(h.PathSpec, fs) + fileSystem := spec.NewFilesystem("") + h.data = make(map[string]interface{}) + for _, r := range fileSystem.Files() { + if err := h.handleDataFile(r); err != nil { + return err + } + } - if err != nil { + return +} + +func (h *HugoSites) handleDataFile(r source.ReadableFile) error { + var current map[string]interface{} - return rawContentCopy, err + f, err := r.Open() + if err != nil { + return errors.Wrapf(err, "Failed to open data file %q:", r.LogicalName()) + } + defer f.Close() + + // Crawl in data tree to insert data + current = h.data + keyParts := strings.Split(r.Dir(), helpers.FilePathSeparator) + // The first path element is the virtual folder (typically theme name), which is + // not part of the key. + if len(keyParts) > 1 { + for _, key := range keyParts[1:] { + if key != "" { + if _, ok := current[key]; !ok { + current[key] = make(map[string]interface{}) + } + current = current[key].(map[string]interface{}) + } } + } - rawContentCopy, err = replaceShortcodeTokens(rawContentCopy, shortcodePlaceholderPrefix, p.shortcodeState.renderedShortcodes) + data, err := h.readData(r) + if err != nil { + return h.errWithFileContext(err, r) + } - if err != nil { - p.s.Log.FATAL.Printf("Failed to replace shortcode tokens in %s:\n%s", p.BaseFileName(), err.Error()) + if data == nil { + return nil + } + + // filepath.Walk walks the files in lexical order, '/' comes before '.' + // this warning could happen if + // 1. A theme uses the same key; the main data folder wins + // 2. A sub folder uses the same key: the sub folder wins + higherPrecedentData := current[r.BaseFileName()] + + switch data.(type) { + case nil: + // hear the crickets? + + case map[string]interface{}: + + switch higherPrecedentData.(type) { + case nil: + current[r.BaseFileName()] = data + case map[string]interface{}: + // merge maps: insert entries from data for keys that + // don't already exist in higherPrecedentData + higherPrecedentMap := higherPrecedentData.(map[string]interface{}) + for key, value := range data.(map[string]interface{}) { + if _, exists := higherPrecedentMap[key]; exists { + h.Log.WARN.Printf("Data for key '%s' in path '%s' is overridden by higher precedence data already in the data tree", key, r.Path()) + } else { + higherPrecedentMap[key] = value + } + } + default: + // can't merge: higherPrecedentData is not a map + h.Log.WARN.Printf("The %T data from '%s' overridden by "+ + "higher precedence %T data already in the data tree", data, r.Path(), higherPrecedentData) + } + + case []interface{}: + if higherPrecedentData == nil { + current[r.BaseFileName()] = data + } else { + // we don't merge array data + h.Log.WARN.Printf("The %T data from '%s' overridden by "+ + "higher precedence %T data already in the data tree", data, r.Path(), higherPrecedentData) } + + default: + h.Log.ERROR.Printf("unexpected data type %T in file %s", data, r.LogicalName()) } - return rawContentCopy, nil + return nil } -func (s *Site) updateBuildStats(page *Page) { - if page.IsDraft() { - s.draftCount++ +func (h *HugoSites) errWithFileContext(err error, f source.File) error { + rfi, ok := f.FileInfo().(hugofs.RealFilenameInfo) + if !ok { + return err } - if page.IsFuture() { - s.futureCount++ - } + realFilename := rfi.RealFilename() - if page.IsExpired() { - s.expiredCount++ - } -} + err, _ = herrors.WithFileContextForFile( + err, + realFilename, + realFilename, + h.SourceSpec.Fs.Source, + herrors.SimpleLineMatcher) -func (h *HugoSites) findPagesByKindNotIn(kind string, inPages Pages) Pages { - return h.Sites[0].findPagesByKindNotIn(kind, inPages) + return err } -func (h *HugoSites) findPagesByKindIn(kind string, inPages Pages) Pages { - return h.Sites[0].findPagesByKindIn(kind, inPages) -} +func (h *HugoSites) readData(f source.ReadableFile) (interface{}, error) { + file, err := f.Open() + if err != nil { + return nil, errors.Wrap(err, "readData: failed to open data file") + } + defer file.Close() + content := helpers.ReaderToBytes(file) -func (h *HugoSites) findAllPagesByKind(kind string) Pages { - return h.findPagesByKindIn(kind, h.Sites[0].AllPages) + format := metadecoders.FormatFromString(f.Extension()) + return metadecoders.Default.Unmarshal(content, format) } -func (h *HugoSites) findAllPagesByKindNotIn(kind string) Pages { - return h.findPagesByKindNotIn(kind, h.Sites[0].AllPages) +func (h *HugoSites) findPagesByKindIn(kind string, inPages page.Pages) page.Pages { + return h.Sites[0].findPagesByKindIn(kind, inPages) } -func (h *HugoSites) findPagesByShortcode(shortcode string) Pages { - var pages Pages +func (h *HugoSites) findPagesByShortcode(shortcode string) page.Pages { + var pages page.Pages for _, s := range h.Sites { pages = append(pages, s.findPagesByShortcode(shortcode)...) } diff --git a/hugolib/hugo_sites_build.go b/hugolib/hugo_sites_build.go index ec5070fa8..214f72c5f 100644 --- a/hugolib/hugo_sites_build.go +++ b/hugolib/hugo_sites_build.go @@ -1,4 +1,4 @@ -// Copyright 2016-present The Hugo Authors. All rights reserved. +// Copyright 2019 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -15,7 +15,12 @@ package hugolib import ( "bytes" + "context" "fmt" + "runtime/trace" + "sort" + + "github.com/gohugoio/hugo/output" "errors" @@ -26,6 +31,9 @@ import ( // Build builds all sites. If filesystem events are provided, // this is considered to be a potential partial rebuild. func (h *HugoSites) Build(config BuildCfg, events ...fsnotify.Event) error { + ctx, task := trace.NewTask(context.Background(), "Build") + defer task.End() + errCollector := h.StartErrorCollector() errs := make(chan error) @@ -71,22 +79,36 @@ func (h *HugoSites) Build(config BuildCfg, events ...fsnotify.Event) error { return err } } else { - if err := h.init(conf); err != nil { + if err := h.initSites(conf); err != nil { return err } } - if err := h.process(conf, events...); err != nil { + var err error + + f := func() { + err = h.process(conf, events...) + } + trace.WithRegion(ctx, "process", f) + if err != nil { return err } - if err := h.assemble(conf); err != nil { + f = func() { + err = h.assemble(conf) + } + trace.WithRegion(ctx, "assemble", f) + if err != nil { return err } + return nil } - prepareErr = prepare() + f := func() { + prepareErr = prepare() + } + trace.WithRegion(ctx, "prepare", f) if prepareErr != nil { h.SendError(prepareErr) } @@ -94,7 +116,12 @@ func (h *HugoSites) Build(config BuildCfg, events ...fsnotify.Event) error { } if prepareErr == nil { - if err := h.render(conf); err != nil { + var err error + f := func() { + err = h.render(conf) + } + trace.WithRegion(ctx, "render", f) + if err != nil { h.SendError(err) } } @@ -120,6 +147,10 @@ func (h *HugoSites) Build(config BuildCfg, events ...fsnotify.Event) error { return err } + if err := h.fatalErrorHandler.getErr(); err != nil { + return err + } + errorCount := h.Log.ErrorCounter.Count() if errorCount > 0 { return fmt.Errorf("logged %d error(s)", errorCount) @@ -132,17 +163,8 @@ func (h *HugoSites) Build(config BuildCfg, events ...fsnotify.Event) error { // Build lifecycle methods below. // The order listed matches the order of execution. -func (h *HugoSites) init(config *BuildCfg) error { - - for _, s := range h.Sites { - if s.PageCollections == nil { - s.PageCollections = newPageCollections() - } - } - - if config.ResetState { - h.reset() - } +func (h *HugoSites) initSites(config *BuildCfg) error { + h.reset(config) if config.NewConfig != nil { if err := h.createSitesFromConfig(config.NewConfig); err != nil { @@ -155,28 +177,22 @@ func (h *HugoSites) init(config *BuildCfg) error { func (h *HugoSites) initRebuild(config *BuildCfg) error { if config.NewConfig != nil { - return errors.New("Rebuild does not support 'NewConfig'.") + return errors.New("rebuild does not support 'NewConfig'") } if config.ResetState { - return errors.New("Rebuild does not support 'ResetState'.") + return errors.New("rebuild does not support 'ResetState'") } if !h.running { - return errors.New("Rebuild called when not in watch mode") - } - - if config.whatChanged.source { - // This is for the non-renderable content pages (rarely used, I guess). - // We could maybe detect if this is really needed, but it should be - // pretty fast. - h.TemplateHandler().RebuildClone() + return errors.New("rebuild called when not in watch mode") } for _, s := range h.Sites { s.resetBuildState() } + h.reset(config) h.resetLogs() helpers.InitLoggers() @@ -203,14 +219,6 @@ func (h *HugoSites) process(config *BuildCfg, events ...fsnotify.Event) error { } func (h *HugoSites) assemble(config *BuildCfg) error { - if config.whatChanged.source { - for _, s := range h.Sites { - s.createTaxonomiesEntries() - } - } - - // TODO(bep) we could probably wait and do this in one go later - h.setupTranslations() if len(h.Sites) > 1 { // The first is initialized during process; initialize the rest @@ -221,47 +229,26 @@ func (h *HugoSites) assemble(config *BuildCfg) error { } } + if err := h.createPageCollections(); err != nil { + return err + } + if config.whatChanged.source { for _, s := range h.Sites { - if err := s.buildSiteMeta(); err != nil { + if err := s.assembleTaxonomies(); err != nil { return err } } } + // Create pagexs for the section pages etc. without content file. if err := h.createMissingPages(); err != nil { return err } for _, s := range h.Sites { - for _, pages := range []Pages{s.Pages, s.headlessPages} { - for _, p := range pages { - // May have been set in front matter - if len(p.outputFormats) == 0 { - p.outputFormats = s.outputFormats[p.Kind] - } - - if p.headless { - // headless = 1 output format only - p.outputFormats = p.outputFormats[:1] - } - for _, r := range p.Resources.ByType(pageResourceType) { - r.(*Page).outputFormats = p.outputFormats - } - - if err := p.initPaths(); err != nil { - return err - } - - } - } - s.assembleMenus() - s.refreshPageCaches() s.setupSitePages() - } - - if err := h.assignMissingTranslations(); err != nil { - return err + sort.Stable(s.workAllPages) } return nil @@ -269,42 +256,60 @@ func (h *HugoSites) assemble(config *BuildCfg) error { } func (h *HugoSites) render(config *BuildCfg) error { + siteRenderContext := &siteRenderContext{cfg: config, multihost: h.multihost} + if !config.PartialReRender { + h.renderFormats = output.Formats{} for _, s := range h.Sites { s.initRenderFormats() + h.renderFormats = append(h.renderFormats, s.renderFormats...) } } + i := 0 for _, s := range h.Sites { - for i, rf := range s.renderFormats { - for _, s2 := range h.Sites { - // We render site by site, but since the content is lazily rendered - // and a site can "borrow" content from other sites, every site - // needs this set. - s2.rc = &siteRenderingContext{Format: rf} - - isRenderingSite := s == s2 - - if !config.PartialReRender { - if err := s2.preparePagesForRender(isRenderingSite && i == 0); err != nil { - return err + for siteOutIdx, renderFormat := range s.renderFormats { + siteRenderContext.outIdx = siteOutIdx + siteRenderContext.sitesOutIdx = i + i++ + + select { + case <-h.Done(): + return nil + default: + // For the non-renderable pages, we use the content iself as + // template and we may have to re-parse and execute it for + // each output format. + h.TemplateHandler().RebuildClone() + + for _, s2 := range h.Sites { + // We render site by site, but since the content is lazily rendered + // and a site can "borrow" content from other sites, every site + // needs this set. + s2.rc = &siteRenderingContext{Format: renderFormat} + + if !config.PartialReRender { + if err := s2.preparePagesForRender(siteRenderContext.sitesOutIdx); err != nil { + return err + } } } - } - - if !config.SkipRender { - if config.PartialReRender { - if err := s.renderPages(config); err != nil { - return err - } - } else { - if err := s.render(config, i); err != nil { - return err + if !config.SkipRender { + if config.PartialReRender { + if err := s.renderPages(siteRenderContext); err != nil { + return err + } + } else { + if err := s.render(siteRenderContext); err != nil { + return err + } } } } + } + } if !config.SkipRender { diff --git a/hugolib/hugo_sites_build_errors_test.go b/hugolib/hugo_sites_build_errors_test.go index dd80946e8..6fe4901a1 100644 --- a/hugolib/hugo_sites_build_errors_test.go +++ b/hugolib/hugo_sites_build_errors_test.go @@ -7,6 +7,9 @@ import ( "runtime" "strings" "testing" + "time" + + "github.com/fortytw2/leaktest" "github.com/gohugoio/hugo/common/herrors" "github.com/stretchr/testify/require" @@ -20,25 +23,24 @@ type testSiteBuildErrorAsserter struct { func (t testSiteBuildErrorAsserter) getFileError(err error) *herrors.ErrorWithFileContext { t.assert.NotNil(err, t.name) ferr := herrors.UnwrapErrorWithFileContext(err) - t.assert.NotNil(ferr, fmt.Sprintf("[%s] got %T: %+v\n%s", t.name, err, err, trace())) + t.assert.NotNil(ferr, fmt.Sprintf("[%s] got %T: %+v\n%s", t.name, err, err, stackTrace())) return ferr } func (t testSiteBuildErrorAsserter) assertLineNumber(lineNumber int, err error) { fe := t.getFileError(err) - t.assert.Equal(lineNumber, fe.Position().LineNumber, fmt.Sprintf("[%s] got => %s\n%s", t.name, fe, trace())) + t.assert.Equal(lineNumber, fe.Position().LineNumber, fmt.Sprintf("[%s] got => %s\n%s", t.name, fe, stackTrace())) } func (t testSiteBuildErrorAsserter) assertErrorMessage(e1, e2 string) { // The error message will contain filenames with OS slashes. Normalize before compare. e1, e2 = filepath.ToSlash(e1), filepath.ToSlash(e2) - t.assert.Contains(e2, e1, trace()) + t.assert.Contains(e2, e1, stackTrace()) } func TestSiteBuildErrors(t *testing.T) { t.Parallel() - assert := require.New(t) const ( yamlcontent = "yamlcontent" @@ -88,9 +90,9 @@ func TestSiteBuildErrors(t *testing.T) { }, assertCreateError: func(a testSiteBuildErrorAsserter, err error) { fe := a.getFileError(err) - assert.Equal(5, fe.Position().LineNumber) - assert.Equal(1, fe.Position().ColumnNumber) - assert.Equal("go-html-template", fe.ChromaLexer) + a.assert.Equal(5, fe.Position().LineNumber) + a.assert.Equal(1, fe.Position().ColumnNumber) + a.assert.Equal("go-html-template", fe.ChromaLexer) a.assertErrorMessage("\"layouts/_default/single.html:5:1\": parse failed: template: _default/single.html:5: unexpected \"}\" in operand", fe.Error()) }, @@ -103,9 +105,9 @@ func TestSiteBuildErrors(t *testing.T) { }, assertBuildError: func(a testSiteBuildErrorAsserter, err error) { fe := a.getFileError(err) - assert.Equal(5, fe.Position().LineNumber) - assert.Equal(14, fe.Position().ColumnNumber) - assert.Equal("go-html-template", fe.ChromaLexer) + a.assert.Equal(5, fe.Position().LineNumber) + a.assert.Equal(14, fe.Position().ColumnNumber) + a.assert.Equal("go-html-template", fe.ChromaLexer) a.assertErrorMessage("\"layouts/_default/single.html:5:14\": execute of template failed", fe.Error()) }, @@ -118,9 +120,9 @@ func TestSiteBuildErrors(t *testing.T) { }, assertBuildError: func(a testSiteBuildErrorAsserter, err error) { fe := a.getFileError(err) - assert.Equal(5, fe.Position().LineNumber) - assert.Equal(14, fe.Position().ColumnNumber) - assert.Equal("go-html-template", fe.ChromaLexer) + a.assert.Equal(5, fe.Position().LineNumber) + a.assert.Equal(14, fe.Position().ColumnNumber) + a.assert.Equal("go-html-template", fe.ChromaLexer) a.assertErrorMessage("\"layouts/_default/single.html:5:14\": execute of template failed", fe.Error()) }, @@ -143,8 +145,8 @@ func TestSiteBuildErrors(t *testing.T) { }, assertBuildError: func(a testSiteBuildErrorAsserter, err error) { fe := a.getFileError(err) - assert.Equal(7, fe.Position().LineNumber) - assert.Equal("md", fe.ChromaLexer) + a.assert.Equal(7, fe.Position().LineNumber) + a.assert.Equal("md", fe.ChromaLexer) // Make sure that it contains both the content file and template a.assertErrorMessage(`content/myyaml.md:7:10": failed to render shortcode "sc"`, fe.Error()) a.assertEr