Pass result in instead of returning

This commit is contained in:
Adam Scarr 2017-08-13 17:30:10 +10:00
parent 0dc37ae5bc
commit 5716ddb5e7
10 changed files with 129 additions and 146 deletions

View File

@ -12,22 +12,22 @@ var (
sumOp = Chars("+-", 1, 1) sumOp = Chars("+-", 1, 1)
prodOp = Chars("/*", 1, 1) prodOp = Chars("/*", 1, 1)
groupExpr = Seq("(", sum, ")").Map(func(n Result) Result { groupExpr = Seq("(", sum, ")").Map(func(n *Result) {
return Result{Result: n.Child[1].Result} n.Result = n.Child[1].Result
}) })
number = NumberLit().Map(func(n Result) Result { number = NumberLit().Map(func(n *Result) {
switch i := n.Result.(type) { switch i := n.Result.(type) {
case int64: case int64:
return Result{Result: float64(i)} n.Result = float64(i)
case float64: case float64:
return Result{Result: i} n.Result = i
default: default:
panic(fmt.Errorf("unknown value %#v", i)) panic(fmt.Errorf("unknown value %#v", i))
} }
}) })
sum = Seq(prod, Some(Seq(sumOp, prod))).Map(func(n Result) Result { sum = Seq(prod, Some(Seq(sumOp, prod))).Map(func(n *Result) {
i := n.Child[0].Result.(float64) i := n.Child[0].Result.(float64)
for _, op := range n.Child[1].Child { for _, op := range n.Child[1].Child {
@ -39,10 +39,10 @@ var (
} }
} }
return Result{Result: i} n.Result = i
}) })
prod = Seq(&value, Some(Seq(prodOp, &value))).Map(func(n Result) Result { prod = Seq(&value, Some(Seq(prodOp, &value))).Map(func(n *Result) {
i := n.Child[0].Result.(float64) i := n.Child[0].Result.(float64)
for _, op := range n.Child[1].Child { for _, op := range n.Child[1].Child {
@ -54,7 +54,7 @@ var (
} }
} }
return Result{Result: i} n.Result = i
}) })
y = Maybe(sum) y = Maybe(sum)

View File

@ -8,52 +8,48 @@ import (
func Seq(parsers ...Parserish) Parser { func Seq(parsers ...Parserish) Parser {
parserfied := ParsifyAll(parsers...) parserfied := ParsifyAll(parsers...)
return NewParser("Seq()", func(ps *State) Result { return NewParser("Seq()", func(ps *State, node *Result) {
result := Result{Child: make([]Result, len(parserfied))} node.Child = make([]Result, len(parserfied))
startpos := ps.Pos startpos := ps.Pos
for i, parser := range parserfied { for i, parser := range parserfied {
result.Child[i] = parser(ps) parser(ps, &node.Child[i])
if ps.Errored() { if ps.Errored() {
ps.Pos = startpos ps.Pos = startpos
return result return
} }
} }
return result
}) })
} }
// NoAutoWS disables automatically ignoring whitespace between tokens for all parsers underneath // NoAutoWS disables automatically ignoring whitespace between tokens for all parsers underneath
func NoAutoWS(parser Parserish) Parser { func NoAutoWS(parser Parserish) Parser {
parserfied := Parsify(parser) parserfied := Parsify(parser)
return func(ps *State) Result { return func(ps *State, node *Result) {
ps.NoAutoWS = true ps.NoAutoWS = true
parserfied(ps, node)
ret := parserfied(ps)
ps.NoAutoWS = false ps.NoAutoWS = false
return ret
} }
} }
// Any matches the first successful parser and returns its result // Any matches the first successful parser and returns its result
func Any(parsers ...Parserish) Parser { func Any(parsers ...Parserish) Parser {
parserfied := ParsifyAll(parsers...) parserfied := ParsifyAll(parsers...)
// For // Records which parser was successful for each byte, and will use it first next time.
predictor := [255]int{} predictor := [255]int{}
return NewParser("Any()", func(ps *State) Result { return NewParser("Any()", func(ps *State, node *Result) {
if ps.Pos >= len(ps.Input) { if ps.Pos >= len(ps.Input) {
ps.ErrorHere("!EOF") ps.ErrorHere("!EOF")
return Result{} return
} }
longestError := Error{} longestError := Error{}
startpos := ps.Pos startpos := ps.Pos
predictorChar := ps.Input[startpos] predictorChar := ps.Input[startpos]
predicted := predictor[predictorChar] predicted := predictor[predictorChar]
node := parserfied[predicted](ps) parserfied[predicted](ps, node)
if !ps.Errored() { if !ps.Errored() {
return node return
} }
if ps.Error.pos >= longestError.pos { if ps.Error.pos >= longestError.pos {
@ -62,14 +58,14 @@ func Any(parsers ...Parserish) Parser {
if ps.Cut <= startpos { if ps.Cut <= startpos {
ps.Recover() ps.Recover()
} else { } else {
return node return
} }
for i, parser := range parserfied { for i, parser := range parserfied {
if i == predicted { if i == predicted {
continue continue
} }
node := parser(ps) parser(ps, node)
if ps.Errored() { if ps.Errored() {
if ps.Error.pos >= longestError.pos { if ps.Error.pos >= longestError.pos {
longestError = ps.Error longestError = ps.Error
@ -81,12 +77,11 @@ func Any(parsers ...Parserish) Parser {
continue continue
} }
predictor[predictorChar] = i predictor[predictorChar] = i
return node return
} }
ps.Error = longestError ps.Error = longestError
ps.Pos = startpos ps.Pos = startpos
return Result{}
}) })
} }
@ -111,26 +106,26 @@ func manyImpl(min int, op Parserish, sep ...Parserish) Parser {
sepParser = Parsify(sep[0]) sepParser = Parsify(sep[0])
} }
return func(ps *State) Result { return func(ps *State, node *Result) {
var result Result var result Result
startpos := ps.Pos startpos := ps.Pos
for { for {
node := opParser(ps) opParser(ps, &result)
if ps.Errored() { if ps.Errored() {
if len(result.Child) < min || ps.Cut > ps.Pos { if len(node.Child) < min || ps.Cut > ps.Pos {
ps.Pos = startpos ps.Pos = startpos
return result return
} }
ps.Recover() ps.Recover()
return result return
} }
result.Child = append(result.Child, node) node.Child = append(node.Child, result)
if sepParser != nil { if sepParser != nil {
sepParser(ps) sepParser(ps, TrashResult)
if ps.Errored() { if ps.Errored() {
ps.Recover() ps.Recover()
return result return
} }
} }
} }
@ -141,14 +136,12 @@ func manyImpl(min int, op Parserish, sep ...Parserish) Parser {
func Maybe(parser Parserish) Parser { func Maybe(parser Parserish) Parser {
parserfied := Parsify(parser) parserfied := Parsify(parser)
return NewParser("Maybe()", func(ps *State) Result { return NewParser("Maybe()", func(ps *State, node *Result) {
startpos := ps.Pos startpos := ps.Pos
node := parserfied(ps) parserfied(ps, node)
if ps.Errored() && ps.Cut <= startpos { if ps.Errored() && ps.Cut <= startpos {
ps.Recover() ps.Recover()
} }
return node
}) })
} }
@ -158,49 +151,42 @@ func Maybe(parser Parserish) Parser {
func Bind(parser Parserish, val interface{}) Parser { func Bind(parser Parserish, val interface{}) Parser {
p := Parsify(parser) p := Parsify(parser)
return func(ps *State) Result { return func(ps *State, node *Result) {
node := p(ps) p(ps, node)
if ps.Errored() { if ps.Errored() {
return node return
} }
node.Result = val node.Result = val
return node return
} }
} }
// Map applies the callback if the parser matches. This is used to set the Result // Map applies the callback if the parser matches. This is used to set the Result
// based on the matched result. // based on the matched result.
func Map(parser Parserish, f func(n Result) Result) Parser { func Map(parser Parserish, f func(n *Result)) Parser {
p := Parsify(parser) p := Parsify(parser)
return func(ps *State) Result { return func(ps *State, node *Result) {
node := p(ps) p(ps, node)
if ps.Errored() { if ps.Errored() {
return node return
} }
return f(node) f(node)
} }
} }
func flatten(n Result) string { func flatten(n *Result) {
if n.Token != "" {
return n.Token
}
if len(n.Child) > 0 { if len(n.Child) > 0 {
sbuf := &bytes.Buffer{} sbuf := &bytes.Buffer{}
for _, node := range n.Child { for _, child := range n.Child {
sbuf.WriteString(flatten(node)) flatten(&child)
sbuf.WriteString(child.Token)
} }
return sbuf.String() n.Token = sbuf.String()
} }
return ""
} }
// Merge all child Tokens together recursively // Merge all child Tokens together recursively
func Merge(parser Parserish) Parser { func Merge(parser Parserish) Parser {
return Map(parser, func(n Result) Result { return Map(parser, flatten)
return Result{Token: flatten(n)}
})
} }

View File

@ -165,8 +165,8 @@ type htmlTag struct {
} }
func TestMap(t *testing.T) { func TestMap(t *testing.T) {
parser := Map(Seq("<", Chars("a-zA-Z0-9"), ">"), func(n Result) Result { parser := Seq("<", Chars("a-zA-Z0-9"), ">").Map(func(n *Result) {
return Result{Result: htmlTag{n.Child[1].Token}} n.Result = htmlTag{n.Child[1].Token}
}) })
t.Run("success", func(t *testing.T) { t.Run("success", func(t *testing.T) {
@ -235,8 +235,8 @@ func TestMerge(t *testing.T) {
} }
func TestMapShorthand(t *testing.T) { func TestMapShorthand(t *testing.T) {
Chars("a-z").Map(func(n Result) Result { Chars("a-z").Map(func(n *Result) {
return Result{Result: n.Token} n.Result = n.Token
}) })
} }

View File

@ -53,7 +53,7 @@ func (dp *debugParser) logf(ps *State, result *Result, format string, args ...in
buf.WriteString(fmt.Sprintf("%-10s | ", output)) buf.WriteString(fmt.Sprintf("%-10s | ", output))
buf.WriteString(strings.Repeat(" ", len(activeParsers)-1)) buf.WriteString(strings.Repeat(" ", len(activeParsers)-1))
buf.WriteString(fmt.Sprintf(format, args...)) buf.WriteString(fmt.Sprintf(format, args...))
buf.WriteString(fmt.Sprintf(" > %#v", result))
buf.WriteRune('\n') buf.WriteRune('\n')
return buf.String() return buf.String()
} }
@ -77,14 +77,14 @@ func (dp *debugParser) logEnd(ps *State, result *Result) {
} }
} }
func (dp *debugParser) Parse(ps *State) Result { func (dp *debugParser) Parse(ps *State, node *Result) {
activeParsers = append(activeParsers, dp) activeParsers = append(activeParsers, dp)
start := time.Now() start := time.Now()
dp.SelfStart = start dp.SelfStart = start
dp.logStart(ps) dp.logStart(ps)
ret := dp.Next(ps) dp.Next(ps, node)
dp.logEnd(ps, &ret) dp.logEnd(ps, node)
dp.Cumulative += time.Since(start) dp.Cumulative += time.Since(start)
dp.Self += time.Since(dp.SelfStart) dp.Self += time.Since(dp.SelfStart)
@ -94,7 +94,6 @@ func (dp *debugParser) Parse(ps *State) Result {
} }
activeParsers = activeParsers[0 : len(activeParsers)-1] activeParsers = activeParsers[0 : len(activeParsers)-1]
return ret
} }
// NewParser should be called around the creation of every Parser. // NewParser should be called around the creation of every Parser.
@ -109,13 +108,12 @@ func NewParser(name string, p Parser) Parser {
Location: location, Location: location,
} }
dp.Next = func(ps *State) Result { dp.Next = func(ps *State, ret *Result) {
dp.Self += time.Since(dp.SelfStart) dp.Self += time.Since(dp.SelfStart)
ret := p(ps) p(ps, ret)
dp.SelfStart = time.Now() dp.SelfStart = time.Now()
return ret
} }
if len(dp.Location) > longestLocation { if len(dp.Location) > longestLocation {

View File

@ -18,28 +18,26 @@ var (
tag Parser tag Parser
identifier = Regex("[a-zA-Z][a-zA-Z0-9]*") identifier = Regex("[a-zA-Z][a-zA-Z0-9]*")
text = NotChars("<>").Map(func(n Result) Result { text = NotChars("<>").Map(func(n *Result) { n.Result = n.Token })
return Result{Result: n.Token}
})
element = Any(text, &tag) element = Any(text, &tag)
elements = Some(element).Map(func(n Result) Result { elements = Some(element).Map(func(n *Result) {
ret := []interface{}{} ret := []interface{}{}
for _, child := range n.Child { for _, child := range n.Child {
ret = append(ret, child.Result) ret = append(ret, child.Result)
} }
return Result{Result: ret} n.Result = ret
}) })
attr = Seq(identifier, "=", StringLit(`"'`)) attr = Seq(identifier, "=", StringLit(`"'`))
attrs = Some(attr).Map(func(node Result) Result { attrs = Some(attr).Map(func(node *Result) {
attr := map[string]string{} attr := map[string]string{}
for _, attrNode := range node.Child { for _, attrNode := range node.Child {
attr[attrNode.Child[0].Token] = attrNode.Child[2].Result.(string) attr[attrNode.Child[0].Token] = attrNode.Child[2].Result.(string)
} }
return Result{Result: attr} node.Result = attr
}) })
tstart = Seq("<", identifier, Cut(), attrs, ">") tstart = Seq("<", identifier, Cut(), attrs, ">")
@ -47,13 +45,12 @@ var (
) )
func init() { func init() {
tag = Seq(tstart, Cut(), elements, tend).Map(func(node Result) Result { tag = Seq(tstart, Cut(), elements, tend).Map(func(node *Result) {
openTag := node.Child[0] openTag := node.Child[0]
return Result{Result: htmlTag{ node.Result = htmlTag{
Name: openTag.Child[1].Token, Name: openTag.Child[1].Token,
Attributes: openTag.Child[3].Result.(map[string]string), Attributes: openTag.Child[3].Result.(map[string]string),
Body: node.Child[2].Result.([]interface{}), Body: node.Child[2].Result.([]interface{}),
}} }
}) })
} }

View File

@ -1,12 +1,15 @@
package html package html
import ( import (
"os"
"testing" "testing"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/vektah/goparsify"
) )
func TestParse(t *testing.T) { func TestParse(t *testing.T) {
goparsify.EnableLogging(os.Stdout)
result, err := parse(`<body>hello <p color="blue">world</p></body>`) result, err := parse(`<body>hello <p color="blue">world</p></body>`)
require.NoError(t, err) require.NoError(t, err)
require.Equal(t, htmlTag{Name: "body", Attributes: map[string]string{}, Body: []interface{}{ require.Equal(t, htmlTag{Name: "body", Attributes: map[string]string{}, Body: []interface{}{

View File

@ -13,22 +13,22 @@ var (
_number = NumberLit() _number = NumberLit()
_properties = Some(Seq(StringLit(`"`), ":", &_value), ",") _properties = Some(Seq(StringLit(`"`), ":", &_value), ",")
_array = Seq("[", Cut(), Some(&_value, ","), "]").Map(func(n Result) Result { _array = Seq("[", Cut(), Some(&_value, ","), "]").Map(func(n *Result) {
ret := []interface{}{} ret := []interface{}{}
for _, child := range n.Child[2].Child { for _, child := range n.Child[2].Child {
ret = append(ret, child.Result) ret = append(ret, child.Result)
} }
return Result{Result: ret} n.Result = ret
}) })
_object = Seq("{", Cut(), _properties, "}").Map(func(n Result) Result { _object = Seq("{", Cut(), _properties, "}").Map(func(n *Result) {
ret := map[string]interface{}{} ret := map[string]interface{}{}
for _, prop := range n.Child[2].Child { for _, prop := range n.Child[2].Child {
ret[prop.Child[0].Result.(string)] = prop.Child[2].Result ret[prop.Child[0].Result.(string)] = prop.Child[2].Result
} }
return Result{Result: ret} n.Result = ret
}) })
) )

View File

@ -11,12 +11,12 @@ import (
// - escaped characters, eg \" or \n // - escaped characters, eg \" or \n
// - unicode sequences, eg \uBEEF // - unicode sequences, eg \uBEEF
func StringLit(allowedQuotes string) Parser { func StringLit(allowedQuotes string) Parser {
return NewParser("string literal", func(ps *State) Result { return NewParser("string literal", func(ps *State, node *Result) {
ps.AutoWS() ps.AutoWS()
if !stringContainsByte(allowedQuotes, ps.Input[ps.Pos]) { if !stringContainsByte(allowedQuotes, ps.Input[ps.Pos]) {
ps.ErrorHere(allowedQuotes) ps.ErrorHere(allowedQuotes)
return Result{} return
} }
quote := ps.Input[ps.Pos] quote := ps.Input[ps.Pos]
@ -30,7 +30,7 @@ func StringLit(allowedQuotes string) Parser {
case '\\': case '\\':
if end+1 >= inputLen { if end+1 >= inputLen {
ps.ErrorHere(string(quote)) ps.ErrorHere(string(quote))
return Result{} return
} }
if buf == nil { if buf == nil {
@ -42,14 +42,14 @@ func StringLit(allowedQuotes string) Parser {
if end+6 >= inputLen { if end+6 >= inputLen {
ps.Error.expected = "[a-f0-9]{4}" ps.Error.expected = "[a-f0-9]{4}"
ps.Error.pos = end + 2 ps.Error.pos = end + 2
return Result{} return
} }
r, ok := unhex(ps.Input[end+2 : end+6]) r, ok := unhex(ps.Input[end+2 : end+6])
if !ok { if !ok {
ps.Error.expected = "[a-f0-9]" ps.Error.expected = "[a-f0-9]"
ps.Error.pos = end + 2 ps.Error.pos = end + 2
return Result{} return
} }
buf.WriteRune(r) buf.WriteRune(r)
end += 6 end += 6
@ -59,12 +59,13 @@ func StringLit(allowedQuotes string) Parser {
} }
case quote: case quote:
if buf == nil { if buf == nil {
result := ps.Input[ps.Pos+1 : end] node.Result = ps.Input[ps.Pos+1 : end]
ps.Pos = end + 1 ps.Pos = end + 1
return Result{Result: result} return
} }
ps.Pos = end + 1 ps.Pos = end + 1
return Result{Result: buf.String()} node.Result = buf.String()
return
default: default:
if buf == nil { if buf == nil {
if ps.Input[end] < 127 { if ps.Input[end] < 127 {
@ -82,13 +83,12 @@ func StringLit(allowedQuotes string) Parser {
} }
ps.ErrorHere(string(quote)) ps.ErrorHere(string(quote))
return Result{}
}) })
} }
// NumberLit matches a floating point or integer number and returns it as a int64 or float64 in .Result // NumberLit matches a floating point or integer number and returns it as a int64 or float64 in .Result
func NumberLit() Parser { func NumberLit() Parser {
return NewParser("number literal", func(ps *State) Result { return NewParser("number literal", func(ps *State, node *Result) {
ps.AutoWS() ps.AutoWS()
end := ps.Pos end := ps.Pos
float := false float := false
@ -126,22 +126,20 @@ func NumberLit() Parser {
if end == ps.Pos { if end == ps.Pos {
ps.ErrorHere("number") ps.ErrorHere("number")
return Result{} return
} }
var result interface{}
var err error var err error
if float { if float {
result, err = strconv.ParseFloat(ps.Input[ps.Pos:end], 10) node.Result, err = strconv.ParseFloat(ps.Input[ps.Pos:end], 10)
} else { } else {
result, err = strconv.ParseInt(ps.Input[ps.Pos:end], 10, 64) node.Result, err = strconv.ParseInt(ps.Input[ps.Pos:end], 10, 64)
} }
if err != nil { if err != nil {
ps.ErrorHere("number") ps.ErrorHere("number")
return Result{} return
} }
ps.Pos = end ps.Pos = end
return Result{Result: result}
}) })
} }

View File

@ -7,6 +7,8 @@ import (
"unicode/utf8" "unicode/utf8"
) )
var TrashResult = &Result{}
// Result is the output of a parser. Usually only one of its fields will be set and should be though of // Result is the output of a parser. Usually only one of its fields will be set and should be though of
// more as a union type. having it avoids interface{} littered all through the parsing code and makes // more as a union type. having it avoids interface{} littered all through the parsing code and makes
// the it easy to do the two most common operations, getting a token and finding a child. // the it easy to do the two most common operations, getting a token and finding a child.
@ -22,10 +24,10 @@ type Result struct {
// - A parser that errors must set state.Error // - A parser that errors must set state.Error
// - A parser that errors must not change state.Pos // - A parser that errors must not change state.Pos
// - A parser that consumed some input should advance state.Pos // - A parser that consumed some input should advance state.Pos
type Parser func(*State) Result type Parser func(*State, *Result)
// Map shorthand for Map(p, func()) // Map shorthand for Map(p, func())
func (p Parser) Map(f func(n Result) Result) Parser { func (p Parser) Map(f func(n *Result)) Parser {
return Map(p, f) return Map(p, f)
} }
@ -51,14 +53,14 @@ type Parserish interface{}
// See Parserish for details. // See Parserish for details.
func Parsify(p Parserish) Parser { func Parsify(p Parserish) Parser {
switch p := p.(type) { switch p := p.(type) {
case func(*State) Result: case func(*State, *Result):
return p return p
case Parser: case Parser:
return p return p
case *Parser: case *Parser:
// Todo: Maybe capture this stack and on nil show it? Is there a good error library to do this? // Todo: Maybe capture this stack and on nil show it? Is there a good error library to do this?
return func(ptr *State) Result { return func(ptr *State, node *Result) {
return (*p)(ptr) (*p)(ptr, node)
} }
case string: case string:
return Exact(p) return Exact(p)
@ -85,7 +87,8 @@ func Run(parser Parserish, input string, ws ...VoidParser) (result interface{},
ps.WS = ws[0] ps.WS = ws[0]
} }
ret := p(ps) ret := Result{}
p(ps, &ret)
ps.AutoWS() ps.AutoWS()
if ps.Error.expected != "" { if ps.Error.expected != "" {
@ -101,32 +104,30 @@ func Run(parser Parserish, input string, ws ...VoidParser) (result interface{},
// WS will consume whitespace, it should only be needed when AutoWS is turned off // WS will consume whitespace, it should only be needed when AutoWS is turned off
func WS() Parser { func WS() Parser {
return NewParser("AutoWS", func(ps *State) Result { return NewParser("AutoWS", func(ps *State, ret *Result) {
ps.WS(ps) ps.WS(ps)
return Result{}
}) })
} }
// Cut prevents backtracking beyond this point. Usually used after keywords when you // Cut prevents backtracking beyond this point. Usually used after keywords when you
// are sure this is the correct path. Improves performance and error reporting. // are sure this is the correct path. Improves performance and error reporting.
func Cut() Parser { func Cut() Parser {
return func(ps *State) Result { return func(ps *State, node *Result) {
ps.Cut = ps.Pos ps.Cut = ps.Pos
return Result{}
} }
} }
// Regex returns a match if the regex successfully matches // Regex returns a match if the regex successfully matches
func Regex(pattern string) Parser { func Regex(pattern string) Parser {
re := regexp.MustCompile("^" + pattern) re := regexp.MustCompile("^" + pattern)
return NewParser(pattern, func(ps *State) Result { return NewParser(pattern, func(ps *State, node *Result) {
ps.AutoWS() ps.AutoWS()
if match := re.FindString(ps.Get()); match != "" { if match := re.FindString(ps.Get()); match != "" {
ps.Advance(len(match)) ps.Advance(len(match))
return Result{Token: match} node.Token = match
return
} }
ps.ErrorHere(pattern) ps.ErrorHere(pattern)
return Result{}
}) })
} }
@ -134,29 +135,29 @@ func Regex(pattern string) Parser {
func Exact(match string) Parser { func Exact(match string) Parser {
if len(match) == 1 { if len(match) == 1 {
matchByte := match[0] matchByte := match[0]
return NewParser(match, func(ps *State) Result { return NewParser(match, func(ps *State, node *Result) {
ps.AutoWS() ps.AutoWS()
if ps.Pos >= len(ps.Input) || ps.Input[ps.Pos] != matchByte { if ps.Pos >= len(ps.Input) || ps.Input[ps.Pos] != matchByte {
ps.ErrorHere(match) ps.ErrorHere(match)
return Result{} return
} }
ps.Advance(1) ps.Advance(1)
return Result{Token: match} node.Token = match
}) })
} }
return NewParser(match, func(ps *State) Result { return NewParser(match, func(ps *State, node *Result) {
ps.AutoWS() ps.AutoWS()
if !strings.HasPrefix(ps.Get(), match) { if !strings.HasPrefix(ps.Get(), match) {
ps.ErrorHere(match) ps.ErrorHere(match)
return Result{} return
} }
ps.Advance(len(match)) ps.Advance(len(match))
return Result{Token: match} node.Token = match
}) })
} }
@ -222,7 +223,7 @@ func charsImpl(matcher string, stopOn bool, repetition ...int) Parser {
min, max := parseRepetition(1, -1, repetition...) min, max := parseRepetition(1, -1, repetition...)
alphabet, ranges := parseMatcher(matcher) alphabet, ranges := parseMatcher(matcher)
return func(ps *State) Result { return func(ps *State, node *Result) {
ps.AutoWS() ps.AutoWS()
matched := 0 matched := 0
for ps.Pos+matched < len(ps.Input) { for ps.Pos+matched < len(ps.Input) {
@ -250,11 +251,10 @@ func charsImpl(matcher string, stopOn bool, repetition ...int) Parser {
if matched < min { if matched < min {
ps.ErrorHere(matcher) ps.ErrorHere(matcher)
return Result{} return
} }
result := ps.Input[ps.Pos : ps.Pos+matched] node.Token = ps.Input[ps.Pos : ps.Pos+matched]
ps.Advance(matched) ps.Advance(matched)
return Result{Token: result}
} }
} }

View File

@ -7,21 +7,21 @@ import (
) )
func TestParsify(t *testing.T) { func TestParsify(t *testing.T) {
result := Result{}
t.Run("strings", func(t *testing.T) { t.Run("strings", func(t *testing.T) {
require.Equal(t, "ff", Parsify("ff")(NewState("ffooo")).Token) Parsify("ff")(NewState("ffooo"), &result)
require.Equal(t, "ff", result.Token)
}) })
t.Run("parsers", func(t *testing.T) { t.Run("parsers", func(t *testing.T) {
require.Equal(t, "ff", Parsify(Chars("f"))(NewState("ffooo")).Token) Parsify(Chars("f"))(NewState("ffooo"), &result)
require.Equal(t, "ff", result.Token)
}) })
t.Run("parser funcs", func(t *testing.T) { t.Run("parser funcs", func(t *testing.T) {
node := Parsify(func(p *State) Result { Parsify(func(p *State, node *Result) { node.Token = "hello" })(NewState("ffooo"), &result)
return Result{Token: "hello"}
})(NewState("ffooo"))
require.Equal(t, "hello", node.Token) require.Equal(t, "hello", result.Token)
}) })
t.Run("*parsers", func(t *testing.T) { t.Run("*parsers", func(t *testing.T) {
@ -29,8 +29,8 @@ func TestParsify(t *testing.T) {
parserfied := Parsify(&parser) parserfied := Parsify(&parser)
parser = Chars("f") parser = Chars("f")
node := parserfied(NewState("ffooo")) parserfied(NewState("ffooo"), &result)
require.Equal(t, "ff", node.Token) require.Equal(t, "ff", result.Token)
}) })
require.Panics(t, func() { require.Panics(t, func() {
@ -41,10 +41,12 @@ func TestParsify(t *testing.T) {
func TestParsifyAll(t *testing.T) { func TestParsifyAll(t *testing.T) {
parsers := ParsifyAll("ff", "gg") parsers := ParsifyAll("ff", "gg")
result := parsers[0](NewState("ffooo")) result := Result{}
parsers[0](NewState("ffooo"), &result)
require.Equal(t, "ff", result.Token) require.Equal(t, "ff", result.Token)
result = parsers[1](NewState("ffooo")) result = Result{}
parsers[1](NewState("ffooo"), &result)
require.Equal(t, "", result.Token) require.Equal(t, "", result.Token)
} }
@ -169,7 +171,7 @@ func TestRegex(t *testing.T) {
} }
func TestParseString(t *testing.T) { func TestParseString(t *testing.T) {
Y := Map("hello", func(n Result) Result { return Result{Result: n.Token} }) Y := Map("hello", func(n *Result) { n.Result = n.Token })
t.Run("full match", func(t *testing.T) { t.Run("full match", func(t *testing.T) {
result, err := Run(Y, "hello") result, err := Run(Y, "hello")
@ -205,17 +207,16 @@ func TestAutoWS(t *testing.T) {
}) })
t.Run("unicode whitespace", func(t *testing.T) { t.Run("unicode whitespace", func(t *testing.T) {
ps := NewState(" \u202f hello") result, ps := runParser(" \u202f hello", NoAutoWS(Seq(WS(), "hello")))
ps.WS = UnicodeWhitespace require.Equal(t, "hello", result.Child[1].Token)
require.Equal(t, "", ps.Get())
result := Exact("hello")(ps)
require.Equal(t, "hello", result.Token)
require.False(t, ps.Errored()) require.False(t, ps.Errored())
}) })
} }
func runParser(input string, parser Parser) (Result, *State) { func runParser(input string, parser Parser) (Result, *State) {
ps := NewState(input) ps := NewState(input)
result := parser(ps) result := Result{}
parser(ps, &result)
return result, ps return result, ps
} }