summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--commands/server.go10
-rw-r--r--helpers/pygments.go40
-rw-r--r--hugolib/config.go2
-rw-r--r--hugolib/indexing_test.go22
-rw-r--r--hugolib/node.go32
-rw-r--r--hugolib/page.go980
-rw-r--r--hugolib/pageSort.go70
-rw-r--r--hugolib/page_test.go538
-rw-r--r--hugolib/shortcode.go338
-rw-r--r--hugolib/site.go1006
-rw-r--r--hugolib/site_test.go740
-rw-r--r--template/bundle/embedded.go12
-rw-r--r--template/bundle/template.go376
-rw-r--r--watcher/batcher.go112
14 files changed, 2139 insertions, 2139 deletions
diff --git a/commands/server.go b/commands/server.go
index 5ae518eef..874703b9b 100644
--- a/commands/server.go
+++ b/commands/server.go
@@ -52,11 +52,11 @@ func server(cmd *cobra.Command, args []string) {
BaseUrl = "http://" + BaseUrl
}
- if serverAppend {
- Config.BaseUrl = strings.TrimSuffix(BaseUrl, "/") + ":" + strconv.Itoa(serverPort)
- } else {
- Config.BaseUrl = strings.TrimSuffix(BaseUrl, "/")
- }
+ if serverAppend {
+ Config.BaseUrl = strings.TrimSuffix(BaseUrl, "/") + ":" + strconv.Itoa(serverPort)
+ } else {
+ Config.BaseUrl = strings.TrimSuffix(BaseUrl, "/")
+ }
build(serverWatch)
diff --git a/helpers/pygments.go b/helpers/pygments.go
index 7d22faf7d..28b296227 100644
--- a/helpers/pygments.go
+++ b/helpers/pygments.go
@@ -14,32 +14,32 @@
package helpers
import (
- "bytes"
- "log"
- "os/exec"
- "strings"
+ "bytes"
+ "log"
+ "os/exec"
+ "strings"
)
func Highlight(code string, lexer string) string {
- var pygmentsBin = "pygmentize"
+ var pygmentsBin = "pygmentize"
- if _, err := exec.LookPath(pygmentsBin); err != nil {
- log.Print("Highlighting requries Pygments to be installed and in the path")
- return code
- }
+ if _, err := exec.LookPath(pygmentsBin); err != nil {
+ log.Print("Highlighting requries Pygments to be installed and in the path")
+ return code
+ }
- var out bytes.Buffer
- var stderr bytes.Buffer
+ var out bytes.Buffer
+ var stderr bytes.Buffer
- cmd := exec.Command(pygmentsBin, "-l"+lexer, "-fhtml", "-O style=monokai,noclasses=true,encoding=utf-8")
- cmd.Stdin = strings.NewReader(code)
- cmd.Stdout = &out
- cmd.Stderr = &stderr
+ cmd := exec.Command(pygmentsBin, "-l"+lexer, "-fhtml", "-O style=monokai,noclasses=true,encoding=utf-8")
+ cmd.Stdin = strings.NewReader(code)
+ cmd.Stdout = &out
+ cmd.Stderr = &stderr
- if err := cmd.Run(); err != nil {
- log.Print(stderr.String())
- return code
- }
+ if err := cmd.Run(); err != nil {
+ log.Print(stderr.String())
+ return code
+ }
- return out.String()
+ return out.String()
}
diff --git a/hugolib/config.go b/hugolib/config.go
index f3e1d3ec9..df1b87ec6 100644
--- a/hugolib/config.go
+++ b/hugolib/config.go
@@ -36,7 +36,7 @@ type Config struct {
Params map[string]interface{}
Permalinks PermalinkOverrides
BuildDrafts, UglyUrls, Verbose bool
- CanonifyUrls bool
+ CanonifyUrls bool
}
var c Config
diff --git a/hugolib/indexing_test.go b/hugolib/indexing_test.go
index aab34e464..ad828de3e 100644
--- a/hugolib/indexing_test.go
+++ b/hugolib/indexing_test.go
@@ -1,18 +1,18 @@
package hugolib
import (
- "strings"
- "testing"
+ "strings"
+ "testing"
)
func TestSitePossibleIndexes(t *testing.T) {
- site := new(Site)
- page, _ := ReadFrom(strings.NewReader(PAGE_YAML_WITH_INDEXES_A), "path/to/page")
- site.Pages = append(site.Pages, page)
- indexes := site.possibleIndexes()
- if !compareStringSlice(indexes, []string{"tags", "categories"}) {
- if !compareStringSlice(indexes, []string{"categories", "tags"}) {
- t.Fatalf("possible indexes do not match [tags categories]. Got: %s", indexes)
- }
- }
+ site := new(Site)
+ page, _ := ReadFrom(strings.NewReader(PAGE_YAML_WITH_INDEXES_A), "path/to/page")
+ site.Pages = append(site.Pages, page)
+ indexes := site.possibleIndexes()
+ if !compareStringSlice(indexes, []string{"tags", "categories"}) {
+ if !compareStringSlice(indexes, []string{"categories", "tags"}) {
+ t.Fatalf("possible indexes do not match [tags categories]. Got: %s", indexes)
+ }
+ }
}
diff --git a/hugolib/node.go b/hugolib/node.go
index 16ecdd530..44328ff19 100644
--- a/hugolib/node.go
+++ b/hugolib/node.go
@@ -14,29 +14,29 @@
package hugolib
import (
- "html/template"
- "time"
+ "html/template"
+ "time"
)
type Node struct {
- RSSLink template.HTML
- Site SiteInfo
- // layout string
- Data map[string]interface{}
- Title string
- Description string
- Keywords []string
- Date time.Time
- UrlPath
+ RSSLink template.HTML
+ Site SiteInfo
+ // layout string
+ Data map[string]interface{}
+ Title string
+ Description string
+ Keywords []string
+ Date time.Time
+ UrlPath
}
func (n Node) RSSlink() template.HTML {
- return n.RSSLink
+ return n.RSSLink
}
type UrlPath struct {
- Url string
- Permalink template.HTML
- Slug string
- Section string
+ Url string
+ Permalink template.HTML
+ Slug string
+ Section string
}
diff --git a/hugolib/page.go b/hugolib/page.go
index 3cb831f86..dedbfa7fb 100644
--- a/hugolib/page.go
+++ b/hugolib/page.go
@@ -14,667 +14,667 @@
package hugolib
import (
- "bytes"
- "errors"
- "fmt"
- "github.com/BurntSushi/toml"
- "github.com/spf13/hugo/helpers"
- "github.com/spf13/hugo/parser"
- "github.com/spf13/hugo/template/bundle"
- "github.com/theplant/blackfriday"
- "html/template"
- "io"
- "launchpad.net/goyaml"
- json "launchpad.net/rjson"
- "net/url"
- "path"
- "strings"
- "time"
+ "bytes"
+ "errors"
+ "fmt"
+ "github.com/BurntSushi/toml"
+ "github.com/spf13/hugo/helpers"
+ "github.com/spf13/hugo/parser"
+ "github.com/spf13/hugo/template/bundle"
+ "github.com/theplant/blackfriday"
+ "html/template"
+ "io"
+ "launchpad.net/goyaml"
+ json "launchpad.net/rjson"
+ "net/url"
+ "path"
+ "strings"
+ "time"
)
type Page struct {
- Status string
- Images []string
- rawContent []byte
- Content template.HTML
- Summary template.HTML
- TableOfContents template.HTML
- Truncated bool
- plain string // TODO should be []byte
- Params map[string]interface{}
- contentType string
- Draft bool
- Aliases []string
- Tmpl bundle.Template
- Markup string
- renderable bool
- layout string
- linkTitle string
- PageMeta
- File
- Position
- Node
+ Status string
+ Images []string
+ rawContent []byte
+ Content template.HTML
+ Summary template.HTML
+ TableOfContents template.HTML
+ Truncated bool
+ plain string // TODO should be []byte
+ Params map[string]interface{}
+ contentType string
+ Draft bool
+ Aliases []string
+ Tmpl bundle.Template
+ Markup string
+ renderable bool
+ layout string
+ linkTitle string
+ PageMeta
+ File
+ Position
+ Node
}
type File struct {
- FileName, Extension, Dir string
+ FileName, Extension, Dir string
}
type PageMeta struct {
- WordCount int
- FuzzyWordCount int
- ReadingTime int
- Weight int
+ WordCount int
+ FuzzyWordCount int
+ ReadingTime int
+ Weight int
}
type Position struct {
- Prev *Page
- Next *Page
+ Prev *Page
+ Next *Page
}
type Pages []*Page
func (p *Page) Plain() string {
- if len(p.plain) == 0 {
- p.plain = StripHTML(StripShortcodes(string(p.renderBytes(p.rawContent))))
- }
- return p.plain
+ if len(p.plain) == 0 {
+ p.plain = StripHTML(StripShortcodes(string(p.renderBytes(p.rawContent))))
+ }
+ return p.plain
}
func (p *Page) setSummary() {
- if bytes.Contains(p.rawContent, summaryDivider) {
- // If user defines split:
- // Split then render
- p.Truncated = true // by definition
- header := bytes.Split(p.rawContent, summaryDivider)[0]
- p.Summary = bytesToHTML(p.renderBytes(header))
- } else {
- // If hugo defines split:
- // render, strip html, then split
- plain := strings.TrimSpace(p.Plain())
- p.Summary = bytesToHTML([]byte(TruncateWordsToWholeSentence(plain, summaryLength)))
- p.Truncated = len(p.Summary) != len(plain)
- }
+ if bytes.Contains(p.rawContent, summaryDivider) {
+ // If user defines split:
+ // Split then render
+ p.Truncated = true // by definition
+ header := bytes.Split(p.rawContent, summaryDivider)[0]
+ p.Summary = bytesToHTML(p.renderBytes(header))
+ } else {
+ // If hugo defines split:
+ // render, strip html, then split
+ plain := strings.TrimSpace(p.Plain())
+ p.Summary = bytesToHTML([]byte(TruncateWordsToWholeSentence(plain, summaryLength)))
+ p.Truncated = len(p.Summary) != len(plain)
+ }
}
func stripEmptyNav(in []byte) []byte {
- return bytes.Replace(in, []byte("<nav>\n</nav>\n\n"), []byte(``), -1)
+ return bytes.Replace(in, []byte("<nav>\n</nav>\n\n"), []byte(``), -1)
}
func bytesToHTML(b []byte) template.HTML {
- return template.HTML(string(b))
+ return template.HTML(string(b))
}
func (p *Page) renderBytes(content []byte) []byte {
- return renderBytes(content, p.guessMarkupType())
+ return renderBytes(content, p.guessMarkupType())
}
func (p *Page) renderContent(content []byte) []byte {
- return renderBytesWithTOC(content, p.guessMarkupType())
+ return renderBytesWithTOC(content, p.guessMarkupType())
}
func renderBytesWithTOC(content []byte, pagefmt string) []byte {
- switch pagefmt {
- default:
- return markdownRenderWithTOC(content)
- case "markdown":
- return markdownRenderWithTOC(content)
- case "rst":
- return []byte(getRstContent(content))
- }
+ switch pagefmt {
+ default:
+ return markdownRenderWithTOC(content)
+ case "markdown":
+ return markdownRenderWithTOC(content)
+ case "rst":
+ return []byte(getRstContent(content))
+ }
}
func renderBytes(content []byte, pagefmt string) []byte {
- switch pagefmt {
- default:
- return markdownRender(content)
- case "markdown":
- return markdownRender(content)
- case "rst":
- return []byte(getRstContent(content))
- }
+ switch pagefmt {
+ default:
+ return markdownRender(content)
+ case "markdown":
+ return markdownRender(content)
+ case "rst":
+ return []byte(getRstContent(content))
+ }
}
// TODO abstract further to support loading from more
// than just files on disk. Should load reader (file, []byte)
func newPage(filename string) *Page {
- page := Page{contentType: "",
- File: File{FileName: filename, Extension: "html"},
- Node: Node{Keywords: make([]string, 10, 30)},
- Params: make(map[string]interface{})}
- page.Date, _ = time.Parse("20060102", "20080101")
- page.guessSection()
- return &page
+ page := Page{contentType: "",
+ File: File{FileName: filename, Extension: "html"},
+ Node: Node{Keywords: make([]string, 10, 30)},
+ Params: make(map[string]interface{})}
+ page.Date, _ = time.Parse("20060102", "20080101")
+ page.guessSection()
+ return &page
}
func StripHTML(s string) string {
- output := ""
-
- // Shortcut strings with no tags in them
- if !strings.ContainsAny(s, "<>") {
- output = s
- } else {
- s = strings.Replace(s, "\n", " ", -1)
- s = strings.Replace(s, "</p>", " \n", -1)
- s = strings.Replace(s, "<br>", " \n", -1)
- s = strings.Replace(s, "</br>", " \n", -1)
-
- // Walk through the string removing all tags
- b := new(bytes.Buffer)
- inTag := false
- for _, r := range s {
- switch r {
- case '<':
- inTag = true
- case '>':
- inTag = false
- default:
- if !inTag {
- b.WriteRune(r)
- }
- }
- }
- output = b.String()
- }
- return output
+ output := ""
+
+ // Shortcut strings with no tags in them
+ if !strings.ContainsAny(s, "<>") {
+ output = s
+ } else {
+ s = strings.Replace(s, "\n", " ", -1)
+ s = strings.Replace(s, "</p>", " \n", -1)
+ s = strings.Replace(s, "<br>", " \n", -1)
+ s = strings.Replace(s, "</br>", " \n", -1)
+
+ // Walk through the string removing all tags
+ b := new(bytes.Buffer)
+ inTag := false
+ for _, r := range s {
+ switch r {
+ case '<':
+ inTag = true
+ case '>':
+ inTag = false
+ default:
+ if !inTag {
+ b.WriteRune(r)
+ }
+ }
+ }
+ output = b.String()
+ }
+ return output
}
func (p *Page) IsRenderable() bool {
- return p.renderable
+ return p.renderable
}
func (p *Page) guessSection() {
- if p.Section == "" {
- x := strings.Split(p.FileName, "/")
- x = x[:len(x)-1]
- if len(x) == 0 {
- return
- }
- if x[0] == "content" {
- x = x[1:]
- }
- p.Section = path.Join(x...)
- }
+ if p.Section == "" {
+ x := strings.Split(p.FileName, "/")
+ x = x[:len(x)-1]
+ if len(x) == 0 {
+ return
+ }
+ if x[0] == "content" {
+ x = x[1:]
+ }
+ p.Section = path.Join(x...)
+ }
}
func (page *Page) Type() string {
- if page.contentType != "" {
- return page.contentType
- }
- page.guessSection()
- if x := page.Section; x != "" {
- return x
- }
+ if page.contentType != "" {
+ return page.contentType
+ }
+ page.guessSection()
+ if x := page.Section; x != "" {
+ return x
+ }
- return "page"
+ return "page"
}
func (page *Page) Layout(l ...string) []string {
- if page.layout != "" {
- return layouts(page.Type(), page.layout)
- }
+ if page.layout != "" {
+ return layouts(page.Type(), page.layout)
+ }
- layout := ""
- if len(l) == 0 {
- layout = "single"
- } else {
- layout = l[0]
- }
+ layout := ""
+ if len(l) == 0 {
+ layout = "single"
+ } else {
+ layout = l[0]
+ }
- return layouts(page.Type(), layout)
+ return layouts(page.Type(), layout)
}
func layouts(types string, layout string) (layouts []string) {
- t := strings.Split(types, "/")
- for i := range t {
- search := t[:len(t)-i]
- layouts = append(layouts, fmt.Sprintf("%s/%s.html", strings.ToLower(path.Join(search...)), layout))
- }
- layouts = append(layouts, fmt.Sprintf("%s.html", layout))
- return
+ t := strings.Split(types, "/")
+ for i := range t {
+ search := t[:len(t)-i]
+ layouts = append(layouts, fmt.Sprintf("%s/%s.html", strings.ToLower(path.Join(search...)), layout))
+ }
+ layouts = append(layouts, fmt.Sprintf("%s.html", layout))
+ return
}
func ReadFrom(buf io.Reader, name string) (page *Page, err error) {
- if len(name) == 0 {
- return nil, errors.New("Zero length page name")
- }
+ if len(name) == 0 {
+ return nil, errors.New("Zero length page name")
+ }
- // Create new page
- p := newPage(name)
+ // Create new page
+ p := newPage(name)
- // Parse for metadata & body
- if err = p.parse(buf); err != nil {
- return
- }
+ // Parse for metadata & body
+ if err = p.parse(buf); err != nil {
+ return
+ }
- //analyze for raw stats
- p.analyzePage()
+ //analyze for raw stats
+ p.analyzePage()
- return p, nil
+ return p, nil
}
func (p *Page) analyzePage() {
- p.WordCount = TotalWords(p.Plain())
- p.FuzzyWordCount = int((p.WordCount+100)/100) * 100
- p.ReadingTime = int((p.WordCount + 212) / 213)
+ p.WordCount = TotalWords(p.Plain())
+ p.FuzzyWordCount = int((p.WordCount+100)/100) * 100
+ p.ReadingTime = int((p.WordCount + 212) / 213)
}
func (p *Page) permalink() (*url.URL, error) {
- baseUrl := string(p.Site.BaseUrl)
- dir := strings.TrimSpace(p.Dir)
- pSlug := strings.TrimSpace(p.Slug)
- pUrl := strings.TrimSpace(p.Url)
- var permalink string
- var err error
-
- if override, ok := p.Site.Permalinks[p.Section]; ok {
- permalink, err = override.Expand(p)
- if err != nil {
- return nil, err
- }
- //fmt.Printf("have an override for %q in section %s → %s\n", p.Title, p.Section, permalink)
- } else {
-
- if len(pSlug) > 0 {
- if p.Site.Config != nil && p.Site.Config.UglyUrls {
- permalink = path.Join(dir, p.Slug, p.Extension)
- } else {
- permalink = path.Join(dir, p.Slug) + "/"
- }
- } else if len(pUrl) > 2 {
- permalink = pUrl
- } else {
- _, t := path.Split(p.FileName)
- if p.Site.Config != nil && p.Site.Config.UglyUrls {
- x := replaceExtension(strings.TrimSpace(t), p.Extension)
- permalink = path.Join(dir, x)
- } else {
- file, _ := fileExt(strings.TrimSpace(t))
- permalink = path.Join(dir, file)
- }
- }
-
- }
-
- base, err := url.Parse(baseUrl)
- if err != nil {
- return nil, err
- }
-
- path, err := url.Parse(permalink)
- if err != nil {
- return nil, err
- }
-
- return MakePermalink(base, path), nil
+ baseUrl := string(p.Site.BaseUrl)
+ dir := strings.TrimSpace(p.Dir)
+ pSlug := strings.TrimSpace(p.Slug)
+ pUrl := strings.TrimSpace(p.Url)
+ var permalink string
+ var err error
+
+ if override, ok := p.Site.Permalinks[p.Section]; ok {
+ permalink, err = override.Expand(p)
+ if err != nil {
+ return nil, err
+ }
+ //fmt.Printf("have an override for %q in section %s → %s\n", p.Title, p.Section, permalink)
+ } else {
+
+ if len(pSlug) > 0 {
+ if p.Site.Config != nil && p.Site.Config.UglyUrls {
+ permalink = path.Join(dir, p.Slug, p.Extension)
+ } else {
+ permalink = path.Join(dir, p.Slug) + "/"
+ }
+ } else if len(pUrl) > 2 {
+ permalink = pUrl
+ } else {
+ _, t := path.Split(p.FileName)
+ if p.Site.Config != nil && p.Site.Config.UglyUrls {
+ x := replaceExtension(strings.TrimSpace(t), p.Extension)
+ permalink = path.Join(dir, x)
+ } else {
+ file, _ := fileExt(strings.TrimSpace(t))
+ permalink = path.Join(dir, file)
+ }
+ }
+
+ }
+
+ base, err := url.Parse(baseUrl)
+ if err != nil {
+ return nil, err
+ }
+
+ path, err := url.Parse(permalink)
+ if err != nil {
+ return nil, err
+ }
+
+ return MakePermalink(base, path), nil
}
func (p *Page) LinkTitle() string {
- if len(p.linkTitle) > 0 {
- return p.linkTitle
- } else {
- return p.Title
- }
+ if len(p.linkTitle) > 0 {
+ return p.linkTitle
+ } else {
+ return p.Title
+ }
}
func (p *Page) Permalink() (string, error) {
- link, err := p.permalink()
- if err != nil {
- return "", err
- }
- return link.String(), nil
+ link, err := p.permalink()
+ if err != nil {
+ return "", err
+ }
+ return link.String(), nil
}
func (p *Page) RelPermalink() (string, error) {
- link, err := p.permalink()
- if err != nil {
- return "", err
- }
+ link, err := p.permalink()
+ if err != nil {
+ return "", err
+ }
- link.Scheme = ""
- link.Host = ""
- link.User = nil
- link.Opaque = ""
- return link.String(), nil
+ link.Scheme = ""
+ link.Host = ""
+ link.User = nil
+ link.Opaque = ""
+ return link.String(), nil
}
func (page *Page) handleTomlMetaData(datum []byte) (interface{}, error) {
- m := map[string]interface{}{}
- datum = removeTomlIdentifier(datum)
- if _, err := toml.Decode(string(datum), &m); err != nil {
- return m, fmt.Errorf("Invalid TOML in %s \nError parsing page meta data: %s", page.FileName, err)
- }
- return m, nil
+ m := map[string]interface{}{}
+ datum = removeTomlIdentifier(datum)
+ if _, err := toml.Decode(string(datum), &m); err != nil {
+ return m, fmt.Errorf("Invalid TOML in %s \nError parsing page meta data: %s", page.FileName, err)
+ }
+ return m, nil
}
func removeTomlIdentifier(datum []byte) []byte {
- return bytes.Replace(datum, []byte("+++"), []byte(""), -1)
+ return bytes.Replace(datum, []byte("+++"), []byte(""), -1)
}
func (page *Page) handleYamlMetaData(datum []byte) (interface{}, error) {
- m := map[string]interface{}{}
- if err := goyaml.Unmarshal(datum, &m); err != nil {
- return m, fmt.Errorf("Invalid YAML in %s \nError parsing page meta data: %s", page.FileName, err)
- }
- return m, nil
+ m := map[string]interface{}{}
+ if err := goyaml.Unmarshal(datum, &m); err != nil {
+ return m, fmt.Errorf("Invalid YAML in %s \nError parsing page meta data: %s", page.FileName, err)
+ }
+ return m, nil
}
func (page *Page) handleJsonMetaData(datum []byte) (interface{}, error) {
- var f interface{}
- if err := json.Unmarshal(datum, &f); err != nil {
- return f, fmt.Errorf("Invalid JSON in %v \nError parsing page meta data: %s", page.FileName, err)
- }
- return f, nil
+ var f interface{}
+ if err := json.Unmarshal(datum, &f); err != nil {
+ return f, fmt.Errorf("Invalid JSON in %v \nError parsing page meta data: %s", page.FileName, err)
+ }
+ return f, nil
}
func (page *Page) update(f interface{}) error {
- m := f.(map[string]interface{})
-
- for k, v := range m {
- loki := strings.ToLower(k)
- switch loki {
- case "title":
- page.Title = interfaceToString(v)
- case "linktitle":
- page.linkTitle = interfaceToString(v)
- case "description":
- page.Description = interfaceToString(v)
- case "slug":
- page.Slug = helpers.Urlize(interfaceToString(v))
- case "url":
- if url := interfaceToString(v); strings.HasPrefix(url, "http://") || strings.HasPrefix(url, "https://") {
- return fmt.Errorf("Only relative urls are supported, %v provided", url)
- }
- page.Url = helpers.Urlize(interfaceToString(v))
- case "type":
- page.contentType = interfaceToString(v)
- case "keywords":
- page.Keywords = interfaceArrayToStringArray(v)
- case "date", "pubdate":
- page.Date = interfaceToTime(v)
- case "draft":
- page.Draft = interfaceToBool(v)
- case "layout":
- page.layout = interfaceToString(v)
- case "markup":
- page.Markup = interfaceToString(v)
- case "weight":
- page.Weight = interfaceToInt(v)
- case "aliases":
- page.Aliases = interfaceArrayToStringArray(v)
- for _, alias := range page.Aliases {
- if strings.HasPrefix(alias, "http://") || strings.HasPrefix(alias, "https://") {
- return fmt.Errorf("Only relative aliases are supported, %v provided", alias)
- }
- }
- case "status":
- page.Status = interfaceToString(v)
- default:
- // If not one of the explicit values, store in Params
- switch vv := v.(type) {
- case bool:
- page.Params[loki] = vv
- case string:
- page.Params[loki] = vv
- case int64, int32, int16, int8, int:
- page.Params[loki] = vv
- case float64, float32:
- page.Params[loki] = vv
- case time.Time:
- page.Params[loki] = vv
- default: // handle array of strings as well
- switch vvv := vv.(type) {
- case []interface{}:
- var a = make([]string, len(vvv))
- for i, u := range vvv {
- a[i] = interfaceToString(u)
- }
- page.Params[loki] = a
- }
- }
- }
- }
- return nil
+ m := f.(map[string]interface{})
+
+ for k, v := range m {
+ loki := strings.ToLower(k)
+ switch loki {
+ case "title":
+ page.Title = interfaceToString(v)
+ case "linktitle":
+ page.linkTitle = interfaceToString(v)
+ case "description":
+ page.Description = interfaceToString(v)
+ case "slug":
+ page.Slug = helpers.Urlize(interfaceToString(v))
+ case "url":
+ if url := interfaceToString(v); strings.HasPrefix(url, "http://") || strings.HasPrefix(url, "https://") {
+ return fmt.Errorf("Only relative urls are supported, %v provided", url)
+ }
+ page.Url = helpers.Urlize(interfaceToString(v))
+ case "type":
+ page.contentType = interfaceToString(v)
+ case "keywords":
+ page.Keywords = interfaceArrayToStringArray(v)
+ case "date", "pubdate":
+ page.Date = interfaceToTime(v)
+ case "draft":
+ page.Draft = interfaceToBool(v)
+ case "layout":
+ page.layout = interfaceToString(v)
+ case "markup":
+ page.Markup = interfaceToString(v)
+ case "weight":
+ page.Weight = interfaceToInt(v)
+ case "aliases":
+ page.Aliases = interfaceArrayToStringArray(v)
+ for _, alias := range page.Aliases {
+ if strings.HasPrefix(alias, "http://") || strings.HasPrefix(alias, "https://") {
<