summaryrefslogtreecommitdiffstats
path: root/parser
diff options
context:
space:
mode:
authorBjørn Erik Pedersen <bjorn.erik.pedersen@gmail.com>2018-10-18 09:47:39 +0200
committerBjørn Erik Pedersen <bjorn.erik.pedersen@gmail.com>2018-10-22 19:57:44 +0200
commit1b7ecfc2e176315b69914756c70b46306561e4d1 (patch)
treec1b9c74418e700123dff9e382e13fae99f95f43b /parser
parent27f5a906a2a34e3b8348c8baeea48355352b5bbb (diff)
hugolib: Use []byte in shortcode parsing
See #5324
Diffstat (limited to 'parser')
-rw-r--r--parser/pageparser/item.go4
-rw-r--r--parser/pageparser/pagelexer.go11
-rw-r--r--parser/pageparser/pageparser.go10
3 files changed, 13 insertions, 12 deletions
diff --git a/parser/pageparser/item.go b/parser/pageparser/item.go
index 35bc8e268..6e93bb696 100644
--- a/parser/pageparser/item.go
+++ b/parser/pageparser/item.go
@@ -21,6 +21,10 @@ type Item struct {
Val []byte
}
+func (i Item) ValStr() string {
+ return string(i.Val)
+}
+
func (i Item) IsText() bool {
return i.typ == tText
}
diff --git a/parser/pageparser/pagelexer.go b/parser/pageparser/pagelexer.go
index 3bdfb6c33..c15e977ca 100644
--- a/parser/pageparser/pagelexer.go
+++ b/parser/pageparser/pagelexer.go
@@ -60,17 +60,6 @@ type pageLexer struct {
items []Item
}
-func Parse(s string) *Tokens {
- return ParseFrom(s, 0)
-}
-
-func ParseFrom(s string, from int) *Tokens {
- input := []byte(s)
- lexer := newPageLexer(input, pos(from), lexMainSection) // TODO(bep) 2errors
- lexer.run()
- return &Tokens{lexer: lexer}
-}
-
// note: the input position here is normally 0 (start), but
// can be set if position of first shortcode is known
// TODO(bep) 2errors byte
diff --git a/parser/pageparser/pageparser.go b/parser/pageparser/pageparser.go
index 5534ee64b..948c05edf 100644
--- a/parser/pageparser/pageparser.go
+++ b/parser/pageparser/pageparser.go
@@ -17,7 +17,15 @@
// See slides here: http://cuddle.googlecode.com/hg/talk/lex.html
package pageparser
-// The lexical scanning below
+func Parse(input []byte) *Tokens {
+ return ParseFrom(input, 0)
+}
+
+func ParseFrom(input []byte, from int) *Tokens {
+ lexer := newPageLexer(input, pos(from), lexMainSection) // TODO(bep) 2errors
+ lexer.run()
+ return &Tokens{lexer: lexer}
+}
type Tokens struct {
lexer *pageLexer