From 1a78e6caf0fc641ee0f0d0de64cc704c6ea7f5c2 Mon Sep 17 00:00:00 2001 From: Matthew Hall Date: Wed, 8 Mar 2023 19:20:19 +1300 Subject: i just love refactoring things, i guess --- spec/parcom_spec.cr | 179 ++++++++++++++++++++++++++++++++++------------------ 1 file changed, 116 insertions(+), 63 deletions(-) (limited to 'spec/parcom_spec.cr') diff --git a/spec/parcom_spec.cr b/spec/parcom_spec.cr index ade9aff..2fff7fc 100644 --- a/spec/parcom_spec.cr +++ b/spec/parcom_spec.cr @@ -4,10 +4,69 @@ require "../src/parcom.cr" include Parcom +describe TokenStream do + describe ".from_string" do + it "constructs a TokenStream(Char) from a String" do + tokens = TokenStream.from_string("abcd") + tokens.tokens.should eq("abcd".chars) + end + end + + describe "#initialize" do + it "wraps an array with the contents of the given iterable" do + set = Set{'a', 'b', 'c', 'd'} + tokens = TokenStream.new(set) + tokens.tokens.should eq(set.to_a) + + arr = "abcd".chars + tokens = TokenStream.new(arr) + tokens.tokens.should eq(arr) + end + end + + context do + tokens_empty = TokenStream.new([] of Char) + tokens = TokenStream.from_string("abcd") + + describe "#[]" do + it "returns the token at the given index" do + tokens[2].should eq('c') + expect_raises(IndexError) { tokens_empty[2] } + end + + it "returns a new TokenStream similar to Array#[](Int, Int)" do + tokens[1, 5].should eq(TokenStream.new(['b', 'c', 'd'])) + expect_raises(IndexError) { tokens_empty[1, 5] } + end + + it "returns a new TokenStream similar to Array#[](Range)" do + tokens[1..3].should eq(TokenStream.new(['b', 'c', 'd'])) + expect_raises(IndexError) { tokens_empty[1..3] } + end + end + + describe "#[]?" do + it "analogous to `Array#[]?`" do + # we should only need to check the nil-returning cases + tokens_empty[2]?.should be_nil + tokens_empty[1, 5]?.should be_nil + tokens_empty[1..3]?.should be_nil + end + end + + describe "#empty?" do + it "exposes the `#empty?` method of the wrapped array" do + tokens.empty?.should be_false + tokens_empty.empty?.should be_true + end + end + end +end + describe Result do describe "#initialize" do it "sets values for #tokens and #value" do - tokens = "esting".chars + tokens = TokenStream.from_string("esting") value = 't' result = Result(Char, Char).new(tokens, value) @@ -18,16 +77,18 @@ describe Result do end describe Parser do + p = AnyToken(Char).new + describe "#parse?" do it "returns `nil` if the parser fails" do - result = AnyToken(Char).new.parse?([] of Char) + result = p.parse?(TokenStream.new([] of Char)) result.should be_nil end it "returns a `Result(T, V)` if the parser succeeds" do - tokens = "testing".chars - result = AnyToken(Char).new.parse(tokens) + tokens = TokenStream.from_string("testing") + result = p.parse(tokens) result.should be_a(Result(Char, Char)) end @@ -35,27 +96,27 @@ describe Parser do describe "#|" do it "creates an `Alt` instance from `self` and another `Parser(T, V)`" do - p = AnyToken(Char).new | AnyToken(Char).new + p_alt = p | p - p.should be_a(Alt(Char, Char)) + p_alt.should be_a(Alt(Char, Char)) end end describe "#assert" do it "creates an `Assert` instance from `self` and a `Proc(T, Bool)`" do f = ->(x : Char) { x == '#' } - p = AnyToken(Char).new.assert(f) + p_assert = p.assert(f) - p.should be_a(Assert(Char, Char)) + p_assert.should be_a(Assert(Char, Char)) end end describe "#map" do it "creates a `Map(T, V, U)` instance from `self` and a Proc(V, U)" do f = ->(x : Char) { x.letter? } - p = AnyToken(Char).new.map(f) + p_map = p.map(f) - p.should be_a(Map(Char, Char, Bool)) + p_map.should be_a(Map(Char, Char, Bool)) end end end @@ -63,7 +124,7 @@ end describe Flunk do describe "#parse" do it "always fails" do - tokens = "testing".chars + tokens = TokenStream.from_string("testing") expect_raises(ParserException) { Flunk(Char, Char).new.parse(tokens) } end @@ -71,138 +132,130 @@ describe Flunk do end describe AnyToken do - describe "#parse" do - it "succeeds when input is non-empty" do - tokens = "testing".chars - result = AnyToken(Char).new.parse(tokens) + context do + p = AnyToken(Char).new - result.tokens.should eq(tokens[1..]) - result.value.should eq('t') - end + describe "#parse" do + it "succeeds when input is non-empty" do + tokens = TokenStream.from_string("testing") + result = p.parse(tokens) - it "fails when input is empty" do - p = AnyToken(Char).new + result.tokens.should eq(tokens[1..]) + result.value.should eq('t') + end - expect_raises(ParserException) { p.parse([] of Char) } + it "fails when input is empty" do + expect_raises(ParserException) { p.parse(TokenStream.new([] of Char)) } + end end end end describe Eof do + p = Eof(Char).new + describe "#parse" do it "succeeds when input is empty" do - result = Eof(Char).new.parse([] of Char) + result = p.parse(TokenStream.new([] of Char)) result.tokens.empty?.should be_true result.value.should be_nil end it "fails when input is non-empty" do - tokens = "testing".chars + tokens = TokenStream.from_string("testing") - expect_raises(ParserException) { Eof(Char).new.parse(tokens) } + expect_raises(ParserException) { p.parse(tokens) } end end end describe Peek do + tokens = TokenStream.from_string("testing") + p = AnyToken(Char).new + result_normal = p.parse(tokens) + result_peek = Peek.new(p).parse(tokens) + describe "#parse" do it "does not modify the result of the wrapped parser" do - tokens = "testing".chars - parser = AnyToken(Char).new - result_normal = parser.parse(tokens) - result_peek = Peek.new(parser).parse(tokens) - result_peek.value.should eq(result_normal.value) end it "does not consume any input" do - tokens = "testing".chars - parser = AnyToken(Char).new - result = Peek.new(parser).parse(tokens) - - result.tokens.should eq(tokens) + result_peek.tokens.should eq(tokens) end end end describe Assert do + test_f = ->(x : Char) { x == 't' } + p = Assert.new(AnyToken(Char).new, test_f) + describe "#parse" do it "fails if the wrapped parser fails" do - test = ->(x : Char) { true } # doesn't matter for this test - p = Assert.new(AnyToken(Char).new, test) - - expect_raises(ParserException) { p.parse([] of Char) } + expect_raises(ParserException) { p.parse(TokenStream.new([] of Char)) } end it "fails if the result value fails the test" do - tokens = "testing".chars - test = ->(x : Char) { x == '$' } - p = Assert.new(AnyToken(Char).new, test) + tokens = TokenStream.from_string("_testing") expect_raises(ParserException) { p.parse(tokens) } end it "succeeds if the wrapped parser succeeds and the test passes" do - tokens = "testing".chars + tokens = TokenStream.from_string("testing") expected_char = tokens[0] - test = ->(x : Char) { x == expected_char } - p = Assert.new(AnyToken(Char).new, test) result = p.parse(tokens) result.value.should eq(expected_char) - test.call(expected_char).should be_true + test_f.call(expected_char).should be_true end end end describe Satisfy do - describe "#parse" do - it "fails if there are input is empty" do - test = ->(x : Char) { x == '#' } # doesn't matter for this case - p = Satisfy(Char).new(test) + test_f = ->(x : Char) { x == 't' } + p = Satisfy.new(test_f) - expect_raises(ParserException) { p.parse([] of Char) } + describe "#parse" do + it "fails if the input is empty" do + expect_raises(ParserException) { p.parse(TokenStream.new([] of Char)) } end it "fails if the token fails the test" do - tokens = "testing".chars - test = ->(x : Char) { x == '#' } - p = Satisfy(Char).new(test) + tokens = TokenStream.from_string("_testing") expect_raises(ParserException) { p.parse(tokens) } end it "succeeds if the token passes the test" do - tokens = "testing".chars + tokens = TokenStream.from_string("testing") expected_char = tokens[0] - test = ->(x : Char) { x == expected_char } - p = Satisfy(Char).new(test) result = p.parse(tokens) result.value.should eq(expected_char) - test.call(result.value).should be_true + test_f.call(result.value).should be_true end end end describe Token do + tokens = TokenStream.from_string("testing") + describe "#parse" do it "fails if the input is empty" do p = Token(Char).new('t') - expect_raises(ParserException) { p.parse([] of Char) } + expect_raises(ParserException) { p.parse(TokenStream.new([] of Char)) } end it "fails if the token is not the expected token" do - tokens = "testing".chars p = Token(Char).new('#') expect_raises(ParserException) { p.parse(tokens) } end it "succeeds if the token is the expected token" do - tokens = "testing".chars expected_char = tokens[0] p = Token(Char).new(expected_char) result = p.parse(tokens) @@ -218,17 +271,17 @@ describe Map do id = ->(x : Char) { x } p = Map.new(AnyToken(Char).new, id) - expect_raises(ParserException) { p.parse([] of Char) } + expect_raises(ParserException) { p.parse(TokenStream.new([] of Char)) } end it "changes the result value via the provided proc" do is_letter = ->(x : Char) { x.letter? } p = Map.new(AnyToken(Char).new, is_letter) - result = p.parse("testing".chars) + result = p.parse(TokenStream.from_string("testing")) result.value.should be_true - result = p.parse("_testing".chars) + result = p.parse(TokenStream.from_string("_testing")) result.value.should be_false end end -- cgit v1.2.1