require "./spec_helper" require "../src/parcom.cr" include Parcom describe TokenStream do describe ".from_string" do it "constructs a TokenStream(Char) from a String" do tokens = TokenStream.from_string("abcd") tokens.tokens.should eq("abcd".chars) end end describe "#initialize" do it "wraps an array with the contents of the given iterable" do set = Set{'a', 'b', 'c', 'd'} tokens = TokenStream.new(set) tokens.tokens.should eq(set.to_a) arr = "abcd".chars tokens = TokenStream.new(arr) tokens.tokens.should eq(arr) end end context do tokens_empty = TokenStream.new([] of Char) tokens = TokenStream.from_string("abcd") describe "#[]" do it "returns the token at the given index" do tokens[2].should eq('c') expect_raises(IndexError) { tokens_empty[2] } end it "returns a new TokenStream similar to Array#[](Int, Int)" do tokens[1, 5].should eq(TokenStream.new(['b', 'c', 'd'])) expect_raises(IndexError) { tokens_empty[1, 5] } end it "returns a new TokenStream similar to Array#[](Range)" do tokens[1..3].should eq(TokenStream.new(['b', 'c', 'd'])) expect_raises(IndexError) { tokens_empty[1..3] } end end describe "#[]?" do it "analogous to `Array#[]?`" do # we should only need to check the nil-returning cases tokens_empty[2]?.should be_nil tokens_empty[1, 5]?.should be_nil tokens_empty[1..3]?.should be_nil end end describe "#empty?" do it "exposes the `#empty?` method of the wrapped array" do tokens.empty?.should be_false tokens_empty.empty?.should be_true end end end end describe Result do describe "#initialize" do it "sets values for #tokens and #value" do tokens = TokenStream.from_string("esting") value = 't' result = Result(Char, Char).new(tokens, value) result.tokens.should eq(tokens) result.value.should eq(value) end end end describe Parser do p = AnyToken(Char).new describe "#parse?" do it "returns `nil` if the parser fails" do result = p.parse?(TokenStream.new([] of Char)) result.should be_nil end it "returns a `Result(T, V)` if the parser succeeds" do tokens = TokenStream.from_string("testing") result = p.parse(tokens) result.should be_a(Result(Char, Char)) end end end describe Flunk do describe "#parse" do it "always fails" do tokens = TokenStream.from_string("testing") expect_raises(ParserException) { Flunk(Char, Char).new.parse(tokens) } end end end describe AnyToken do context do p = AnyToken(Char).new describe "#parse" do it "succeeds when input is non-empty" do tokens = TokenStream.from_string("testing") result = p.parse(tokens) result.tokens.should eq(tokens[1..]) result.value.should eq('t') end it "fails when input is empty" do expect_raises(ParserException) { p.parse(TokenStream.new([] of Char)) } end end end end describe Eof do p = Eof(Char).new describe "#parse" do it "succeeds when input is empty" do result = p.parse(TokenStream.new([] of Char)) result.tokens.empty?.should be_true result.value.should be_nil end it "fails when input is non-empty" do tokens = TokenStream.from_string("testing") expect_raises(ParserException) { p.parse(tokens) } end end end describe Peek do tokens = TokenStream.from_string("testing") p = AnyToken(Char).new result_normal = p.parse(tokens) result_peek = Peek.new(p).parse(tokens) describe "#parse" do it "does not modify the result of the wrapped parser" do result_peek.value.should eq(result_normal.value) end it "does not consume any input" do result_peek.tokens.should eq(tokens) end end end describe Assert do test_f = ->(x : Char) { x == 't' } p = AnyToken(Char).new.assert { |x| x == 't' } describe "#parse" do it "fails if the wrapped parser fails" do expect_raises(ParserException) do p.parse(TokenStream.new([] of Char)) end end it "fails if the result value fails the test" do tokens = TokenStream.from_string("_testing") expect_raises(ParserException) { p.parse(tokens) } end it "succeeds if the wrapped parser succeeds and the test passes" do tokens = TokenStream.from_string("testing") expected_char = tokens[0] result = p.parse(tokens) result.value.should eq(expected_char) test_f.call(expected_char).should be_true end end end describe Satisfy do p = Satisfy(Char).new { |x| x == 't' } describe "#parse" do it "fails if the input is empty" do expect_raises(ParserException) { p.parse(TokenStream.new([] of Char)) } end it "fails if the token fails the test" do tokens = TokenStream.from_string("_testing") expect_raises(ParserException) { p.parse(tokens) } end it "succeeds if the token passes the test" do tokens = TokenStream.from_string("testing") expected_char = tokens[0] result = p.parse(tokens) result.value.should eq(expected_char) end end end describe Token do tokens = TokenStream.from_string("testing") describe "#parse" do it "fails if the input is empty" do p = Token(Char).new('t') expect_raises(ParserException) { p.parse(TokenStream.new([] of Char)) } end it "fails if the token is not the expected token" do p = Token(Char).new('#') expect_raises(ParserException) { p.parse(tokens) } end it "succeeds if the token is the expected token" do expected_char = tokens[0] p = Token(Char).new(expected_char) result = p.parse(tokens) result.value.should eq(expected_char) end end end describe Map do describe "#parse" do it "fails if the wrapped parser fails" do p = AnyToken(Char).new.map { |x| x } expect_raises(ParserException) { p.parse(TokenStream.new([] of Char)) } end it "changes the result value via the provided proc" do p = AnyToken(Char).new.map { |x| x.letter? } result = p.parse(TokenStream.from_string("testing")) result.value.should be_true result = p.parse(TokenStream.from_string("_testing")) result.value.should be_false end end end describe Plus do describe "#parse" do tokens = TokenStream.from_string("testing") p_t = Token(Char).new('t') p_e = Token(Char).new('e') p_at = Token(Char).new('@') it "fails if the first parser fails" do p = p_at + p_e expect_raises(ParserException) { p.parse(tokens) } end it "fails if the second parser fails" do p = p_t + p_at expect_raises(ParserException) { p.parse(tokens) } end it "fails if both parsers fail" do p = p_at + p_at expect_raises(ParserException) { p.parse(tokens) } end it "succeeds if both parsers succeed" do p = p_t + p_e result = p.parse(tokens) result.tokens.should eq(tokens[2..]) result.value[0].should eq('t') result.value[1].should eq('e') end it "evaluates parsers from left to right (left associative)" do p_succeeds = p_t + p_e p_fails = p_e + p_t p_succeeds.parse(tokens) # should not raise an exception expect_raises(ParserException) { p_fails.parse(tokens) } p_s = Token(Char).new('s') r = (p_t + p_e + p_s).parse(tokens) # should not raise an exception r.value.should be_a({ {Char, Char}, Char}) r = (p_t + (p_e + p_s)).parse(tokens) # should not raise an exception r.value.should be_a({Char, {Char, Char} }) end end end describe Phrase do p = Phrase.new(Token.new('t')) describe "#parse" do it "fails if the wrapped parser fails" do tokens = TokenStream.from_string("_") expect_raises(ParserException) { p.parse(tokens) } end it "fails if not all of the input tokens are parsed" do tokens = TokenStream.from_string("tt") expect_raises(ParserException) { p.parse(tokens) } end it "succeeds if the wrapped parser successfully parses all of the input" do tokens = TokenStream.from_string("t") result = p.parse(tokens) result.tokens.empty?.should be_true result.value.should eq('t') end end end describe Recover do p = Token.new('t').recover('@') describe "#parse" do it "succeeds and returns the wrapped parser's value if it succeeds" do tokens = TokenStream.from_string("testing") result = p.parse(tokens) result.tokens.should eq(tokens[1..]) result.value.should eq('t') end it "succeeds and returns the default value without modifying the input if the wrapped parser fails" do tokens = TokenStream.from_string("_____") result = p.parse(tokens) result.tokens.should eq(tokens) result.value.should eq('@') end end end describe Optional do p = Optional.new(Token.new('t')) describe "#parse" do it "succeeds and returns the wrapped parser's value if it succeeds" do tokens = TokenStream.from_string("testing") result = p.parse(tokens) result.tokens.should eq(tokens[1..]) result.value.should eq('t') end it "succeeds and returns a value of `nil` without modifying the input if the wrapped parser fails" do tokens = TokenStream.from_string("_____") result = p.parse(tokens) result.tokens.should eq(tokens) result.value.should be_nil end end end describe Sequence do # HACK: ps has to be declared this way due to contravariance # https://crystal-lang.org/reference/1.7/syntax_and_semantics/inheritance.html#covariance-and-contravariance ps = [] of Parser(Char, Char) ps = ps + "abcd".chars.map { |c| Token.new(c) } p = Sequence.new(ps) describe "#parse" do it "runs each wrapped parser in order, returns each result" do tokens = TokenStream.from_string("abcd") result = p.parse(tokens) result.value.should eq("abcd".chars) result.tokens.empty?.should be_true end it "fails if any of the wrapped parsers fail" do fail_strings = ["", "abed", "bbcd", "abce"] fail_strings.each do |s| tokens = TokenStream.from_string(s) expect_raises(ParserException) { p.parse(tokens) } end end it "succeeds and returns empty array if parser iterable is empty" do tokens = TokenStream.from_string("abcd") empty_p = Sequence.new([] of Parser(Char, Char)) result = empty_p.parse(tokens) result.value.empty?.should be_true result.tokens.should eq(tokens) end end end describe Tokens do p = Tokens.new("test".chars) describe "#parse" do it "fails if the input stream is too short" do input = TokenStream.from_string("") expect_raises(ParserException) { p.parse(input) } end it "fails if it encounters an unexpected token" do input = TokenStream.from_string("text") expect_raises(ParserException) { p.parse(input) } end it "succeeds if the input starts with the expected tokens" do input = TokenStream.from_string("testing") result = p.parse(input) result.tokens.should eq(input[4..]) result.value.should eq("test".chars) end end end describe Many do p = Many.new(Token.new('a')) describe "#parse" do it "returns an empty array if the wrapped parser never succeeds" do tokens = TokenStream.from_string("bb") result = p.parse(tokens) result.value.empty?.should be_true result.tokens.should eq(tokens) end it "stops parsing when the wrapped parser fails, returns all successes" do tokens = TokenStream.from_string("aaabcd") result = p.parse(tokens) result.value.should eq("aaa".chars) result.tokens.should eq(tokens[3..]) tokens = TokenStream.from_string("aaa") result = p.parse(tokens) result.value.should eq("aaa".chars) result.tokens.should eq(tokens[3..]) end end end describe Some do p = Some.new(Token.new('a')) describe "#parse" do it "fails if the wrapped parser never succeeds" do tokens = TokenStream.from_string("") expect_raises(ParserException) { p.parse(tokens) } end it "stops parsing when the wrapped parser fails, returns all successes" do tokens = TokenStream.from_string("aaabcd") result = p.parse(tokens) result.value.should eq("aaa".chars) result.tokens.should eq(tokens[3..]) tokens = TokenStream.from_string("aaa") result = p.parse(tokens) result.value.should eq("aaa".chars) result.tokens.should eq(tokens[3..]) end end end describe Exactly do letter_a = Token.new('a') tokens = TokenStream.from_string("aaabcd") describe "#parse" do it "tries to parse exactly n of the wrapper parser" do p = Exactly.new(3, letter_a) result = p.parse(tokens) result.value.should eq("aaa".chars) result.tokens.should eq(tokens[3..]) end it "always succeeds with an empty array if n < 1" do p = Exactly.new(0, letter_a) result = p.parse(tokens) result.value.empty?.should be_true result.tokens.should eq(tokens) p = Exactly.new(-42, letter_a) result = p.parse(tokens) result.value.empty?.should be_true result.tokens.should eq(tokens) end it "does not take extra matching tokens" do p = Exactly.new(2, letter_a) result = p.parse(tokens) result.value.should eq("aa".chars) result.tokens.should eq(tokens[2..]) end it "fails if there are not enough matching tokens" do p = Exactly.new(60, letter_a) expect_raises(ParserException) { p.parse(tokens) } end end end describe AtLeast do letter_a = Token.new('a') tokens = TokenStream.from_string("aaaabcd") describe "#parse" do it "fails if there are not enough matching tokens to parse" do p = AtLeast.new(5, letter_a) expect_raises(ParserException) { p.parse(tokens) } end it "parses n or more times with the given parser" do p0 = AtLeast.new(0, letter_a) p2 = AtLeast.new(2, letter_a) p4 = AtLeast.new(4, letter_a) result0 = p0.parse(tokens) result2 = p2.parse(tokens) result4 = p4.parse(tokens) result0.value.should eq("aaaa".chars) result0.tokens.should eq(tokens[4..]) result2.should eq(result0) result4.should eq(result0) end end end describe AtMost do letter_a = Token.new('a') tokens = TokenStream.from_string("aaaabcd") describe "#parse" do it "does not parse more than n times" do p0 = AtMost.new(0, letter_a) p2 = AtMost.new(2, letter_a) p6 = AtMost.new(6, letter_a) r0 = p0.parse(tokens) r0.value.empty?.should be_true r0.tokens.should eq(tokens) r2 = p2.parse(tokens) r2.value.should eq("aa".chars) r2.tokens.should eq(tokens[2..]) r6 = p6.parse(tokens) r6.value.should eq("aaaa".chars) r6.tokens.should eq(tokens[4..]) end end end pending Between do end pending StopAt do end pending StopAfter do end pending StopIf do end pending FirstOf do end pending SepBy do end