Rename some things

This commit is contained in:
Adam Scarr 2017-08-09 19:35:15 +10:00
parent b62ae2f567
commit 8b2f10f238
7 changed files with 39 additions and 73 deletions

View File

@ -13,7 +13,7 @@ var (
sumOp = Chars("+-", 1, 1)
prodOp = Chars("/*", 1, 1)
groupExpr = Map(And("(", sum, ")"), func(n Node) Node {
groupExpr = Map(Seq("(", sum, ")"), func(n Node) Node {
return Node{Result: n.Child[1].Result}
})
@ -28,7 +28,7 @@ var (
}
})
sum = Map(And(prod, Kleene(And(sumOp, prod))), func(n Node) Node {
sum = Map(Seq(prod, Some(Seq(sumOp, prod))), func(n Node) Node {
i := n.Child[0].Result.(float64)
for _, op := range n.Child[1].Child {
@ -43,7 +43,7 @@ var (
return Node{Result: i}
})
prod = Map(And(&value, Kleene(And(prodOp, &value))), func(n Node) Node {
prod = Map(Seq(&value, Some(Seq(prodOp, &value))), func(n Node) Node {
i := n.Child[0].Result.(float64)
for _, op := range n.Child[1].Child {

View File

@ -4,18 +4,10 @@ import (
"bytes"
)
var Nil = NewParser("Nil", func(ps *State) Node {
return Node{}
})
func And(parsers ...Parserish) Parser {
if len(parsers) == 0 {
return Nil
}
func Seq(parsers ...Parserish) Parser {
parserfied := ParsifyAll(parsers...)
return NewParser("And()", func(ps *State) Node {
return NewParser("Seq()", func(ps *State) Node {
result := Node{Child: make([]Node, len(parserfied))}
startpos := ps.Pos
for i, parser := range parserfied {
@ -42,10 +34,6 @@ func NoAutoWS(parser Parserish) Parser {
}
func Any(parsers ...Parserish) Parser {
if len(parsers) == 0 {
return Nil
}
parserfied := ParsifyAll(parsers...)
return NewParser("Any()", func(ps *State) Node {
@ -69,8 +57,8 @@ func Any(parsers ...Parserish) Parser {
})
}
func Kleene(opScan Parserish, sepScan ...Parserish) Parser {
return NewParser("Kleene()", manyImpl(0, opScan, sepScan...))
func Some(opScan Parserish, sepScan ...Parserish) Parser {
return NewParser("Some()", manyImpl(0, opScan, sepScan...))
}
func Many(opScan Parserish, sepScan ...Parserish) Parser {

View File

@ -6,16 +6,8 @@ import (
"github.com/stretchr/testify/require"
)
func TestNil(t *testing.T) {
node, p2 := runParser("hello world", Nil)
require.Equal(t, Node{}, node)
require.Equal(t, 0, p2.Pos)
require.False(t, p2.Errored())
}
func TestAnd(t *testing.T) {
parser := And("hello", "world")
func TestSeq(t *testing.T) {
parser := Seq("hello", "world")
t.Run("matches sequence", func(t *testing.T) {
node, p2 := runParser("hello world", parser)
@ -29,10 +21,6 @@ func TestAnd(t *testing.T) {
require.Equal(t, 6, p2.Error.pos)
require.Equal(t, 0, p2.Pos)
})
t.Run("No parsers", func(t *testing.T) {
assertNilParser(t, And())
})
}
func TestMaybe(t *testing.T) {
@ -60,8 +48,8 @@ func TestAny(t *testing.T) {
t.Run("Returns longest error", func(t *testing.T) {
_, p2 := runParser("hello world!", Any(
"nope",
And("hello", "world", "."),
And("hello", "brother"),
Seq("hello", "world", "."),
Seq("hello", "brother"),
))
require.Equal(t, "offset 11: Expected .", p2.Error.Error())
require.Equal(t, 11, p2.Error.Pos())
@ -73,34 +61,30 @@ func TestAny(t *testing.T) {
require.Equal(t, Node{}, node)
require.Equal(t, 0, p2.Pos)
})
t.Run("No parsers", func(t *testing.T) {
assertNilParser(t, Any())
})
}
func TestKleene(t *testing.T) {
func TestSome(t *testing.T) {
t.Run("Matches sequence with sep", func(t *testing.T) {
node, p2 := runParser("a,b,c,d,e,", Kleene(Chars("a-g"), ","))
node, p2 := runParser("a,b,c,d,e,", Some(Chars("a-g"), ","))
require.False(t, p2.Errored())
assertSequence(t, node, "a", "b", "c", "d", "e")
require.Equal(t, 10, p2.Pos)
})
t.Run("Matches sequence without sep", func(t *testing.T) {
node, p2 := runParser("a,b,c,d,e,", Kleene(Any(Chars("a-g"), ",")))
node, p2 := runParser("a,b,c,d,e,", Some(Any(Chars("a-g"), ",")))
assertSequence(t, node, "a", ",", "b", ",", "c", ",", "d", ",", "e", ",")
require.Equal(t, 10, p2.Pos)
})
t.Run("splits words automatically on space", func(t *testing.T) {
node, p2 := runParser("hello world", Kleene(Chars("a-z")))
node, p2 := runParser("hello world", Some(Chars("a-z")))
assertSequence(t, node, "hello", "world")
require.Equal(t, "", p2.Get())
})
t.Run("Stops on error", func(t *testing.T) {
node, p2 := runParser("a,b,c,d,e,", Kleene(Chars("a-c"), ","))
node, p2 := runParser("a,b,c,d,e,", Some(Chars("a-c"), ","))
assertSequence(t, node, "a", "b", "c")
require.Equal(t, 6, p2.Pos)
require.Equal(t, "d,e,", p2.Get())
@ -139,7 +123,7 @@ type htmlTag struct {
}
func TestMap(t *testing.T) {
parser := Map(And("<", Chars("a-zA-Z0-9"), ">"), func(n Node) Node {
parser := Map(Seq("<", Chars("a-zA-Z0-9"), ">"), func(n Node) Node {
return Node{Result: htmlTag{n.Child[1].Token}}
})
@ -157,7 +141,7 @@ func TestMap(t *testing.T) {
func TestMerge(t *testing.T) {
var bracer Parser
bracer = And("(", Maybe(&bracer), ")")
bracer = Seq("(", Maybe(&bracer), ")")
parser := Merge(bracer)
t.Run("sucess", func(t *testing.T) {
@ -172,12 +156,6 @@ func TestMerge(t *testing.T) {
})
}
func assertNilParser(t *testing.T, parser Parser) {
node, p2 := runParser("fff", parser)
require.Equal(t, Node{}, node)
require.Equal(t, 0, p2.Pos)
}
func assertSequence(t *testing.T, node Node, expected ...string) {
require.NotNil(t, node)
actual := []string{}

View File

@ -17,13 +17,13 @@ type Tag struct {
var (
tag Parser
identifier = NoAutoWS(Merge(And(WS(), Chars("a-zA-Z", 1), Chars("a-zA-Z0-9", 0))))
identifier = NoAutoWS(Merge(Seq(WS(), Chars("a-zA-Z", 1), Chars("a-zA-Z0-9", 0))))
text = Map(NotChars("<>"), func(n Node) Node {
return Node{Result: n.Token}
})
element = Any(text, &tag)
elements = Map(Kleene(element), func(n Node) Node {
elements = Map(Some(element), func(n Node) Node {
ret := []interface{}{}
for _, child := range n.Child {
ret = append(ret, child.Result)
@ -31,8 +31,8 @@ var (
return Node{Result: ret}
})
attr = And(identifier, "=", StringLit(`"'`))
attrs = Map(Kleene(attr), func(node Node) Node {
attr = Seq(identifier, "=", StringLit(`"'`))
attrs = Map(Some(attr), func(node Node) Node {
attr := map[string]string{}
for _, attrNode := range node.Child {
@ -42,12 +42,12 @@ var (
return Node{Result: attr}
})
tstart = And("<", identifier, attrs, ">")
tend = And("</", identifier, ">")
tstart = Seq("<", identifier, attrs, ">")
tend = Seq("</", identifier, ">")
)
func init() {
tag = Map(And(tstart, elements, tend), func(node Node) Node {
tag = Map(Seq(tstart, elements, tend), func(node Node) Node {
openTag := node.Child[0]
return Node{Result: Tag{
Name: openTag.Child[1].Token,

View File

@ -10,9 +10,9 @@ var (
_false = Bind("false", false)
_string = StringLit(`"`)
_number = NumberLit()
_properties = Kleene(And(StringLit(`"`), ":", &_value), ",")
_properties = Some(Seq(StringLit(`"`), ":", &_value), ",")
_array = Map(And("[", Kleene(&_value, ","), "]"), func(n Node) Node {
_array = Map(Seq("[", Some(&_value, ","), "]"), func(n Node) Node {
ret := []interface{}{}
for _, child := range n.Child[1].Child {
ret = append(ret, child.Result)
@ -20,7 +20,7 @@ var (
return Node{Result: ret}
})
_object = Map(And("{", _properties, "}"), func(n Node) Node {
_object = Map(Seq("{", _properties, "}"), func(n Node) Node {
ret := map[string]interface{}{}
for _, prop := range n.Child[1].Child {

View File

@ -21,12 +21,12 @@ type Parser func(*State) Node
// eg, matching balanced paren:
// ```go
// var group Parser
// group = And("(", Maybe(&group), ")")
// group = Seq("(", Maybe(&group), ")")
// ```
// vs
// ```go
// var group ParserPtr{}
// group.P = And(Exact("("), Maybe(group.Parse), Exact(")"))
// group.P = Seq(Exact("("), Maybe(group.Parse), Exact(")"))
// ```
type Parserish interface{}

View File

@ -25,12 +25,12 @@ If you build the parser with -tags debug it will instrument each parser and a ca
```
Any() 415.7136ms 87000 calls json.go:35
Map() 309.6569ms 12000 calls json.go:31
And() 298.6519ms 12000 calls json.go:23
Kleene() 290.6462ms 12000 calls json.go:13
And() 272.6392ms 81000 calls json.go:13
And() 78.0404ms 13000 calls json.go:15
Seq() 298.6519ms 12000 calls json.go:23
Some() 290.6462ms 12000 calls json.go:13
Seq() 272.6392ms 81000 calls json.go:13
Seq() 78.0404ms 13000 calls json.go:15
Map() 78.0404ms 13000 calls json.go:21
Kleene() 77.0401ms 1000 calls json.go:15
Some() 77.0401ms 1000 calls json.go:15
string literal 7.5053ms 81000 calls json.go:13
string literal 4.5031ms 84000 calls json.go:11
, 4.0008ms 81000 calls json.go:13
@ -106,7 +106,7 @@ func TestAddition(t *testing.T) {
var sumOp = Chars("+-", 1, 1)
sum = Map(And(number, Kleene(And(sumOp, number))), func(n Node) Node {
sum = Map(Seq(number, Some(And(sumOp, number))), func(n Node) Node {
i := n.Child[0].Result.(float64)
for _, op := range n.Child[1].Child {
@ -124,7 +124,7 @@ sum = Map(And(number, Kleene(And(sumOp, number))), func(n Node) Node {
// and update Calc to point to the new root parser -> `result, remaining, err := ParseString(sum, input)`
```
This parser will match number ([+-] number)+, then map its to be the sum. See how the Child map directly to the positions in the parsers? n is the result of the and, n.Child[0] is its first argument, n.Child[1] is the result of the Kleene parser, n.Child[1].Child[0] is the result of the first And and so fourth. Given how closely tied the parser and the Map are it is good to keep the two together.
This parser will match number ([+-] number)+, then map its to be the sum. See how the Child map directly to the positions in the parsers? n is the result of the and, `n.Child[0]` is its first argument, `n.Child[1]` is the result of the Some parser, `n.Child[1].Child[0]` is the result of the first And and so fourth. Given how closely tied the parser and the Map are it is good to keep the two together.
You can continue like this and add multiplication and parenthesis fairly easily. Eventually if you keep adding parsers you will end up with a loop, and go will give you a handy error message like:
```
@ -132,10 +132,10 @@ typechecking loop involving value = goparsify.Any(number, groupExpr)
```
we need to break the loop using a pointer, then set its value in init
```
```go
var (
value Parser
prod = And(&value, Kleene(And(prodOp, &value)))
prod = Seq(&value, Some(And(prodOp, &value)))
)
func init() {