diff options
| author | Matthew Hall <hallmatthew314@gmail.com> | 2023-03-11 00:57:15 +1300 |
|---|---|---|
| committer | Matthew Hall <hallmatthew314@gmail.com> | 2023-03-11 00:57:15 +1300 |
| commit | 3fab598b4873fda69183de21b650d041cc95411c (patch) | |
| tree | 0890b5a2eb6dcb213f4fa5d86c5a5ce7eb778f65 /spec/parcom_spec.cr | |
| parent | 6f93f16468d83b84ea386fcf74666a89e80bc704 (diff) | |
Class renaming
Diffstat (limited to 'spec/parcom_spec.cr')
| -rw-r--r-- | spec/parcom_spec.cr | 172 |
1 files changed, 86 insertions, 86 deletions
diff --git a/spec/parcom_spec.cr b/spec/parcom_spec.cr index 9b70c2e..f28c560 100644 --- a/spec/parcom_spec.cr +++ b/spec/parcom_spec.cr @@ -4,10 +4,10 @@ require "../src/parcom.cr" include Parcom -describe TokenStream do +describe Tokens do describe ".from_string" do - it "constructs a TokenStream(Char) from a String" do - tokens = TokenStream.from_string("abcd") + it "constructs a Tokens(Char) from a String" do + tokens = Tokens.from_string("abcd") tokens.tokens.should eq("abcd".chars) end end @@ -15,18 +15,18 @@ describe TokenStream do describe "#initialize" do it "wraps an array with the contents of the given iterable" do set = Set{'a', 'b', 'c', 'd'} - tokens = TokenStream.new(set) + tokens = Tokens.new(set) tokens.tokens.should eq(set.to_a) arr = "abcd".chars - tokens = TokenStream.new(arr) + tokens = Tokens.new(arr) tokens.tokens.should eq(arr) end end context do - tokens_empty = TokenStream.new([] of Char) - tokens = TokenStream.from_string("abcd") + tokens_empty = Tokens.new([] of Char) + tokens = Tokens.from_string("abcd") describe "#[]" do it "returns the token at the given index" do @@ -34,13 +34,13 @@ describe TokenStream do expect_raises(IndexError) { tokens_empty[2] } end - it "returns a new TokenStream similar to Array#[](Int, Int)" do - tokens[1, 5].should eq(TokenStream.new(['b', 'c', 'd'])) + it "returns a new Tokens similar to Array#[](Int, Int)" do + tokens[1, 5].should eq(Tokens.new(['b', 'c', 'd'])) expect_raises(IndexError) { tokens_empty[1, 5] } end - it "returns a new TokenStream similar to Array#[](Range)" do - tokens[1..3].should eq(TokenStream.new(['b', 'c', 'd'])) + it "returns a new Tokens similar to Array#[](Range)" do + tokens[1..3].should eq(Tokens.new(['b', 'c', 'd'])) expect_raises(IndexError) { tokens_empty[1..3] } end end @@ -66,7 +66,7 @@ end describe Result do describe "#initialize" do it "sets values for #tokens and #value" do - tokens = TokenStream.from_string("esting") + tokens = Tokens.from_string("esting") value = 't' result = Result(Char, Char).new(tokens, value) @@ -81,13 +81,13 @@ describe Parser do describe "#parse?" do it "returns `nil` if the parser fails" do - result = p.parse?(TokenStream.new([] of Char)) + result = p.parse?(Tokens.new([] of Char)) result.should be_nil end it "returns a `Result(T, V)` if the parser succeeds" do - tokens = TokenStream.from_string("testing") + tokens = Tokens.from_string("testing") result = p.parse(tokens) result.should be_a(Result(Char, Char)) @@ -98,9 +98,9 @@ end describe Flunk do describe "#parse" do it "always fails" do - tokens = TokenStream.from_string("testing") + tokens = Tokens.from_string("testing") - expect_raises(ParserException) { Flunk(Char, Char).new.parse(tokens) } + expect_raises(ParserFail) { Flunk(Char, Char).new.parse(tokens) } end end end @@ -111,7 +111,7 @@ describe AnyToken do describe "#parse" do it "succeeds when input is non-empty" do - tokens = TokenStream.from_string("testing") + tokens = Tokens.from_string("testing") result = p.parse(tokens) result.tokens.should eq(tokens[1..]) @@ -119,7 +119,7 @@ describe AnyToken do end it "fails when input is empty" do - expect_raises(ParserException) { p.parse(TokenStream.new([] of Char)) } + expect_raises(ParserFail) { p.parse(Tokens.new([] of Char)) } end end end @@ -130,22 +130,22 @@ describe Eof do describe "#parse" do it "succeeds when input is empty" do - result = p.parse(TokenStream.new([] of Char)) + result = p.parse(Tokens.new([] of Char)) result.tokens.empty?.should be_true result.value.should be_nil end it "fails when input is non-empty" do - tokens = TokenStream.from_string("testing") + tokens = Tokens.from_string("testing") - expect_raises(ParserException) { p.parse(tokens) } + expect_raises(ParserFail) { p.parse(tokens) } end end end describe Peek do - tokens = TokenStream.from_string("testing") + tokens = Tokens.from_string("testing") p = AnyToken(Char).new result_normal = p.parse(tokens) result_peek = Peek.new(p).parse(tokens) @@ -167,19 +167,19 @@ describe Assert do describe "#parse" do it "fails if the wrapped parser fails" do - expect_raises(ParserException) do - p.parse(TokenStream.new([] of Char)) + expect_raises(ParserFail) do + p.parse(Tokens.new([] of Char)) end end it "fails if the result value fails the test" do - tokens = TokenStream.from_string("_testing") + tokens = Tokens.from_string("_testing") - expect_raises(ParserException) { p.parse(tokens) } + expect_raises(ParserFail) { p.parse(tokens) } end it "succeeds if the wrapped parser succeeds and the test passes" do - tokens = TokenStream.from_string("testing") + tokens = Tokens.from_string("testing") expected_char = tokens[0] result = p.parse(tokens) @@ -194,17 +194,17 @@ describe Satisfy do describe "#parse" do it "fails if the input is empty" do - expect_raises(ParserException) { p.parse(TokenStream.new([] of Char)) } + expect_raises(ParserFail) { p.parse(Tokens.new([] of Char)) } end it "fails if the token fails the test" do - tokens = TokenStream.from_string("_testing") + tokens = Tokens.from_string("_testing") - expect_raises(ParserException) { p.parse(tokens) } + expect_raises(ParserFail) { p.parse(tokens) } end it "succeeds if the token passes the test" do - tokens = TokenStream.from_string("testing") + tokens = Tokens.from_string("testing") expected_char = tokens[0] result = p.parse(tokens) @@ -214,19 +214,19 @@ describe Satisfy do end describe Token do - tokens = TokenStream.from_string("testing") + tokens = Tokens.from_string("testing") describe "#parse" do it "fails if the input is empty" do p = Token(Char).new('t') - expect_raises(ParserException) { p.parse(TokenStream.new([] of Char)) } + expect_raises(ParserFail) { p.parse(Tokens.new([] of Char)) } end it "fails if the token is not the expected token" do p = Token(Char).new('#') - expect_raises(ParserException) { p.parse(tokens) } + expect_raises(ParserFail) { p.parse(tokens) } end it "succeeds if the token is the expected token" do @@ -244,16 +244,16 @@ describe Map do it "fails if the wrapped parser fails" do p = AnyToken(Char).new.map { |x| x } - expect_raises(ParserException) { p.parse(TokenStream.new([] of Char)) } + expect_raises(ParserFail) { p.parse(Tokens.new([] of Char)) } end it "changes the result value via the provided proc" do p = AnyToken(Char).new.map { |x| x.letter? } - result = p.parse(TokenStream.from_string("testing")) + result = p.parse(Tokens.from_string("testing")) result.value.should be_true - result = p.parse(TokenStream.from_string("_testing")) + result = p.parse(Tokens.from_string("_testing")) result.value.should be_false end end @@ -261,24 +261,24 @@ end describe Plus do describe "#parse" do - tokens = TokenStream.from_string("testing") + tokens = Tokens.from_string("testing") p_t = Token(Char).new('t') p_e = Token(Char).new('e') p_at = Token(Char).new('@') it "fails if the first parser fails" do p = p_at + p_e - expect_raises(ParserException) { p.parse(tokens) } + expect_raises(ParserFail) { p.parse(tokens) } end it "fails if the second parser fails" do p = p_t + p_at - expect_raises(ParserException) { p.parse(tokens) } + expect_raises(ParserFail) { p.parse(tokens) } end it "fails if both parsers fail" do p = p_at + p_at - expect_raises(ParserException) { p.parse(tokens) } + expect_raises(ParserFail) { p.parse(tokens) } end it "succeeds if both parsers succeed" do @@ -295,7 +295,7 @@ describe Plus do p_fails = p_e + p_t p_succeeds.parse(tokens) # should not raise an exception - expect_raises(ParserException) { p_fails.parse(tokens) } + expect_raises(ParserFail) { p_fails.parse(tokens) } p_s = Token(Char).new('s') @@ -314,7 +314,7 @@ end describe Left do describe "#parse" do it "returns the value of the first parser if both succeed" do - tokens = TokenStream.from_string("testing") + tokens = Tokens.from_string("testing") letter_t = Token.new('t') letter_e = Token.new('e') result = (letter_t << letter_e).parse(tokens) @@ -329,7 +329,7 @@ end describe Right do describe "#parse" do it "returns the value of the second parser if both succeed" do - tokens = TokenStream.from_string("testing") + tokens = Tokens.from_string("testing") letter_t = Token.new('t') letter_e = Token.new('e') result = (letter_t >> letter_e).parse(tokens) @@ -345,19 +345,19 @@ describe Phrase do describe "#parse" do it "fails if the wrapped parser fails" do - tokens = TokenStream.from_string("_") + tokens = Tokens.from_string("_") - expect_raises(ParserException) { p.parse(tokens) } + expect_raises(ParserFail) { p.parse(tokens) } end it "fails if not all of the input tokens are parsed" do - tokens = TokenStream.from_string("tt") + tokens = Tokens.from_string("tt") - expect_raises(ParserException) { p.parse(tokens) } + expect_raises(ParserFail) { p.parse(tokens) } end it "succeeds if the wrapped parser successfully parses all of the input" do - tokens = TokenStream.from_string("t") + tokens = Tokens.from_string("t") result = p.parse(tokens) result.tokens.empty?.should be_true @@ -371,7 +371,7 @@ describe Recover do describe "#parse" do it "succeeds and returns the wrapped parser's value if it succeeds" do - tokens = TokenStream.from_string("testing") + tokens = Tokens.from_string("testing") result = p.parse(tokens) result.tokens.should eq(tokens[1..]) @@ -379,7 +379,7 @@ describe Recover do end it "succeeds and returns the default value without modifying the input if the wrapped parser fails" do - tokens = TokenStream.from_string("_____") + tokens = Tokens.from_string("_____") result = p.parse(tokens) result.tokens.should eq(tokens) @@ -393,7 +393,7 @@ describe Optional do describe "#parse" do it "succeeds and returns the wrapped parser's value if it succeeds" do - tokens = TokenStream.from_string("testing") + tokens = Tokens.from_string("testing") result = p.parse(tokens) result.tokens.should eq(tokens[1..]) @@ -401,7 +401,7 @@ describe Optional do end it "succeeds and returns a value of `nil` without modifying the input if the wrapped parser fails" do - tokens = TokenStream.from_string("_____") + tokens = Tokens.from_string("_____") result = p.parse(tokens) result.tokens.should eq(tokens) @@ -419,7 +419,7 @@ describe Sequence do describe "#parse" do it "runs each wrapped parser in order, returns each result" do - tokens = TokenStream.from_string("abcd") + tokens = Tokens.from_string("abcd") result = p.parse(tokens) result.value.should eq("abcd".chars) @@ -429,13 +429,13 @@ describe Sequence do it "fails if any of the wrapped parsers fail" do fail_strings = ["", "abed", "bbcd", "abce"] fail_strings.each do |s| - tokens = TokenStream.from_string(s) - expect_raises(ParserException) { p.parse(tokens) } + tokens = Tokens.from_string(s) + expect_raises(ParserFail) { p.parse(tokens) } end end it "succeeds and returns empty array if parser iterable is empty" do - tokens = TokenStream.from_string("abcd") + tokens = Tokens.from_string("abcd") empty_p = Sequence.new([] of Parser(Char, Char)) result = empty_p.parse(tokens) @@ -445,22 +445,22 @@ describe Sequence do end end -describe Tokens do - p = Tokens.new("test".chars) +describe TokenSeq do + p = TokenSeq.new("test".chars) describe "#parse" do it "fails if the input stream is too short" do - input = TokenStream.from_string("") - expect_raises(ParserException) { p.parse(input) } + input = Tokens.from_string("") + expect_raises(ParserFail) { p.parse(input) } end it "fails if it encounters an unexpected token" do - input = TokenStream.from_string("text") - expect_raises(ParserException) { p.parse(input) } + input = Tokens.from_string("text") + expect_raises(ParserFail) { p.parse(input) } end it "succeeds if the input starts with the expected tokens" do - input = TokenStream.from_string("testing") + input = Tokens.from_string("testing") result = p.parse(input) result.tokens.should eq(input[4..]) @@ -474,7 +474,7 @@ describe Many do describe "#parse" do it "returns an empty array if the wrapped parser never succeeds" do - tokens = TokenStream.from_string("bb") + tokens = Tokens.from_string("bb") result = p.parse(tokens) result.value.empty?.should be_true @@ -482,13 +482,13 @@ describe Many do end it "stops parsing when the wrapped parser fails, returns all successes" do - tokens = TokenStream.from_string("aaabcd") + tokens = Tokens.from_string("aaabcd") result = p.parse(tokens) result.value.should eq("aaa".chars) result.tokens.should eq(tokens[3..]) - tokens = TokenStream.from_string("aaa") + tokens = Tokens.from_string("aaa") result = p.parse(tokens) result.value.should eq("aaa".chars) @@ -501,18 +501,18 @@ describe Some do p = Some.new(Token.new('a')) describe "#parse" do it "fails if the wrapped parser never succeeds" do - tokens = TokenStream.from_string("") - expect_raises(ParserException) { p.parse(tokens) } + tokens = Tokens.from_string("") + expect_raises(ParserFail) { p.parse(tokens) } end it "stops parsing when the wrapped parser fails, returns all successes" do - tokens = TokenStream.from_string("aaabcd") + tokens = Tokens.from_string("aaabcd") result = p.parse(tokens) result.value.should eq("aaa".chars) result.tokens.should eq(tokens[3..]) - tokens = TokenStream.from_string("aaa") + tokens = Tokens.from_string("aaa") result = p.parse(tokens) result.value.should eq("aaa".chars) @@ -523,7 +523,7 @@ end describe Exactly do letter_a = Token.new('a') - tokens = TokenStream.from_string("aaabcd") + tokens = Tokens.from_string("aaabcd") describe "#parse" do it "tries to parse exactly n of the wrapper parser" do @@ -558,20 +558,20 @@ describe Exactly do it "fails if there are not enough matching tokens" do p = Exactly.new(60, letter_a) - expect_raises(ParserException) { p.parse(tokens) } + expect_raises(ParserFail) { p.parse(tokens) } end end end describe AtLeast do letter_a = Token.new('a') - tokens = TokenStream.from_string("aaaabcd") + tokens = Tokens.from_string("aaaabcd") describe "#parse" do it "fails if there are not enough matching tokens to parse" do p = AtLeast.new(5, letter_a) - expect_raises(ParserException) { p.parse(tokens) } - #expect_raises(ParserException) { raise ParserException.new("sdgseg") } + expect_raises(ParserFail) { p.parse(tokens) } + #expect_raises(ParserFail) { raise ParserFail.new("sdgseg") } end it "parses n or more times with the given parser" do @@ -594,7 +594,7 @@ end describe AtMost do letter_a = Token.new('a') - tokens = TokenStream.from_string("aaaabcd") + tokens = Tokens.from_string("aaaabcd") describe "#parse" do it "does not parse more than n times" do @@ -619,7 +619,7 @@ end describe Between do letter_a = Token.new('a') - tokens = TokenStream.from_string("aaaabcd") + tokens = Tokens.from_string("aaaabcd") describe "#parse" do it "parses at least i times, up to a limit of j times" do @@ -632,13 +632,13 @@ describe Between do it "fails if there are not enough parser successes" do p = Between.new(5, 6, letter_a) - expect_raises(ParserException) { p.parse(tokens) } + expect_raises(ParserFail) { p.parse(tokens) } end end end describe FirstOf do - tokens = TokenStream.from_string("abcd") + tokens = Tokens.from_string("abcd") letter_a = Token.new('a') f = Flunk(Char, Char).new @@ -666,7 +666,7 @@ describe FirstOf do y = Token.new('x') z = Token.new('x') p = FirstOf.new([x, y, z] of Parser(Char, Char)) - expect_raises(ParserException) { p.parse(tokens) } + expect_raises(ParserFail) { p.parse(tokens) } end end end @@ -675,16 +675,16 @@ describe SepBy do describe "#parse" do letter_a = Token.new('a') comma = Token.new(',') - tokens = TokenStream.from_string("a,a,a,a") + tokens = Tokens.from_string("a,a,a,a") it "fails if no elements can be parsed" do p = SepBy(Char, Char, Char).new(comma, comma) - expect_raises(ParserException) { p.parse(tokens) } + expect_raises(ParserFail) { p.parse(tokens) } end it "succeeds if only one element can be parsed" do - t1 = TokenStream.from_string("a") - t2 = TokenStream.from_string("a,") + t1 = Tokens.from_string("a") + t2 = Tokens.from_string("a,") p = SepBy(Char, Char, Char).new(letter_a, comma) result = p.parse(t1) @@ -706,7 +706,7 @@ describe SepBy do # drop last char in tokens, should parse three elements result = p.parse(tokens[..5]) result.value.should eq("aaa".chars) - result.tokens.should eq(TokenStream.from_string(",")) + result.tokens.should eq(Tokens.from_string(",")) end end end |
