From fdfce3d4c7a672fdff10e91bf5a4808cd4d46c4d Mon Sep 17 00:00:00 2001 From: Matthew Hall Date: Fri, 31 Mar 2023 21:48:41 +1300 Subject: Separate core tests into separate files --- spec/parcom_spec.cr | 150 ---------------------------------------------- spec/parser_chain_spec.cr | 79 ++++++++++++++++++++++++ spec/result_spec.cr | 20 +++++++ spec/tokens_spec.cr | 63 +++++++++++++++++++ 4 files changed, 162 insertions(+), 150 deletions(-) create mode 100644 spec/parser_chain_spec.cr create mode 100644 spec/result_spec.cr create mode 100644 spec/tokens_spec.cr (limited to 'spec') diff --git a/spec/parcom_spec.cr b/spec/parcom_spec.cr index 42188fb..be384e6 100644 --- a/spec/parcom_spec.cr +++ b/spec/parcom_spec.cr @@ -2,156 +2,6 @@ require "./spec_helper" include Parcom -describe "parser_chain" do - it "works with zero intermediate steps" do - p = parser_chain "test", Char, Char, finally: Parser(Char, Char).pure('x') - tokens = Tokens.from_string("abcd") - result = p.parse(tokens) - - result.value.should eq('x') - result.tokens.should eq(tokens) - end - - it "works with one intermediate step" do - p = parser_chain "test", Char, Char, - {c, Parser(Char, Char).any_token}, - finally: Parser(Char, Char).pure(c) - tokens = Tokens.from_string("abcd") - result = p.parse(tokens) - - result.value.should eq('a') - result.tokens.should eq(tokens[1..]) - end - - it "works with many intermediate steps" do - digit = Parser(Char, Char).satisfy(&.number?) - p = parser_chain "float", Char, Float64, - {sign, Parser.token('-').map_const(-1).recover(1)}, - {front, digit.many}, - {point, Parser.token('.').optional}, - {back, digit.many}, - finally: case {front.empty?, point.nil?, back.empty?} - when {false, _, true} - Parser(Char, Float64).pure(front.join.to_f64 * sign) - when {true, false, false} - Parser(Char, Float64).pure("0.#{back.join}".to_f64 * sign) - when {false, false, false} - Parser(Char, Float64).pure("#{front.join}.#{back.join}".to_f64 * sign) - else - Parser(Char, Float64).flunk - end - { - {"1", 1_f64}, - {"-1", -1_f64}, - {"2.", 2_f64}, - {"-2.", -2_f64}, - {".3", 0.3_f64}, - {"-.3", -0.3_f64}, - {"0.4", 0.4_f64}, - {"-0.4", -0.4_f64}, - }.each do |s, v| - tokens = Tokens.from_string(s) - result = p.parse(tokens) - - result.value.should eq(v) - result.tokens.empty?.should be_true - end - end - - it "allows ignoring results with underscores" do - any_word = Parser(Char, String).satisfy(&.letter?).some.map(&.join) - ws = Parser(Char, Array(Char)).satisfy(&.whitespace?).many - two_words = parser_chain "two words", Char, {String, String}, - {word, any_word}, - {_, ws}, - finally: Parser.token_sequence(word.chars).map_const({word, word}) - - tokens = Tokens.from_string("foo \n foo") - result = two_words.parse(tokens) - result.value.should eq({"foo", "foo"}) - - tokens = Tokens.from_string("foo bar") - expect_raises(ParserFail) { two_words.parse(tokens) } - tokens = Tokens.from_string("foofoo") - expect_raises(ParserFail) { two_words.parse(tokens) } - end -end - -describe Tokens do - describe ".from_string" do - it "constructs a Tokens(Char) from a String" do - tokens = Tokens.from_string("abcd") - tokens.tokens.should eq("abcd".chars) - end - end - - describe "#initialize" do - it "wraps an array with the contents of the given iterable" do - set = Set{'a', 'b', 'c', 'd'} - tokens = Tokens.new(set) - tokens.tokens.should eq(set.to_a) - - arr = "abcd".chars - tokens = Tokens.new(arr) - tokens.tokens.should eq(arr) - end - end - - context do - tokens_empty = Tokens.new([] of Char) - tokens = Tokens.from_string("abcd") - - describe "#[]" do - it "returns the token at the given index" do - tokens[2].should eq('c') - expect_raises(IndexError) { tokens_empty[2] } - end - - it "returns a new Tokens similar to Array#[](Int, Int)" do - tokens[1, 5].should eq(Tokens.new(['b', 'c', 'd'])) - expect_raises(IndexError) { tokens_empty[1, 5] } - end - - it "returns a new Tokens similar to Array#[](Range)" do - tokens[1..3].should eq(Tokens.new(['b', 'c', 'd'])) - expect_raises(IndexError) { tokens_empty[1..3] } - end - end - - describe "#[]?" do - it "analogous to `Array#[]?`" do - # we should only need to check the nil-returning cases - tokens_empty[2]?.should be_nil - tokens_empty[1, 5]?.should be_nil - tokens_empty[1..3]?.should be_nil - end - end - - describe "#empty?" do - it "exposes the `#empty?` method of the wrapped array" do - tokens.empty?.should be_false - tokens_empty.empty?.should be_true - end - end - end -end - -describe Result do - describe "#map" do - r = Result.new(Tokens.from_string("abcd"), 'x') - r_expected = Result.new(Tokens.from_string("abcd"), 'x'.ord) - - it "accepts a proc" do - f = ->(c : Char) { c.ord } - r.map(f).should eq(r_expected) - end - - it "accepts a block" do - r.map { |c| c.ord }.should eq(r_expected) - end - end -end - describe Parser do describe "self.pure" do v = 'a' diff --git a/spec/parser_chain_spec.cr b/spec/parser_chain_spec.cr new file mode 100644 index 0000000..3f1e3a0 --- /dev/null +++ b/spec/parser_chain_spec.cr @@ -0,0 +1,79 @@ +require "./spec_helper" + +include Parcom + +describe "parser_chain" do + it "works with zero intermediate steps" do + p = parser_chain "test", Char, Char, finally: Parser(Char, Char).pure('x') + tokens = Tokens.from_string("abcd") + result = p.parse(tokens) + + result.value.should eq('x') + result.tokens.should eq(tokens) + end + + it "works with one intermediate step" do + p = parser_chain "test", Char, Char, + {c, Parser(Char, Char).any_token}, + finally: Parser(Char, Char).pure(c) + tokens = Tokens.from_string("abcd") + result = p.parse(tokens) + + result.value.should eq('a') + result.tokens.should eq(tokens[1..]) + end + + it "works with many intermediate steps" do + digit = Parser(Char, Char).satisfy(&.number?) + p = parser_chain "float", Char, Float64, + {sign, Parser.token('-').map_const(-1).recover(1)}, + {front, digit.many}, + {point, Parser.token('.').optional}, + {back, digit.many}, + finally: case {front.empty?, point.nil?, back.empty?} + when {false, _, true} + Parser(Char, Float64).pure(front.join.to_f64 * sign) + when {true, false, false} + Parser(Char, Float64).pure("0.#{back.join}".to_f64 * sign) + when {false, false, false} + Parser(Char, Float64).pure("#{front.join}.#{back.join}".to_f64 * sign) + else + Parser(Char, Float64).flunk + end + { + {"1", 1_f64}, + {"-1", -1_f64}, + {"2.", 2_f64}, + {"-2.", -2_f64}, + {".3", 0.3_f64}, + {"-.3", -0.3_f64}, + {"0.4", 0.4_f64}, + {"-0.4", -0.4_f64}, + }.each do |s, v| + tokens = Tokens.from_string(s) + result = p.parse(tokens) + + result.value.should eq(v) + result.tokens.empty?.should be_true + end + end + + it "allows ignoring results with underscores" do + any_word = Parser(Char, String).satisfy(&.letter?).some.map(&.join) + ws = Parser(Char, Array(Char)).satisfy(&.whitespace?).many + two_words = parser_chain "two words", Char, {String, String}, + {word, any_word}, + {_, ws}, + finally: Parser.token_sequence(word.chars).map_const({word, word}) + + tokens = Tokens.from_string("foo \n foo") + result = two_words.parse(tokens) + result.value.should eq({"foo", "foo"}) + + tokens = Tokens.from_string("foo bar") + expect_raises(ParserFail) { two_words.parse(tokens) } + tokens = Tokens.from_string("foofoo") + expect_raises(ParserFail) { two_words.parse(tokens) } + end +end + diff --git a/spec/result_spec.cr b/spec/result_spec.cr new file mode 100644 index 0000000..094713e --- /dev/null +++ b/spec/result_spec.cr @@ -0,0 +1,20 @@ +require "./spec_helper" + +include Parcom + +describe Result do + describe "#map" do + r = Result.new(Tokens.from_string("abcd"), 'x') + r_expected = Result.new(Tokens.from_string("abcd"), 'x'.ord) + + it "accepts a proc" do + f = ->(c : Char) { c.ord } + r.map(f).should eq(r_expected) + end + + it "accepts a block" do + r.map { |c| c.ord }.should eq(r_expected) + end + end +end + diff --git a/spec/tokens_spec.cr b/spec/tokens_spec.cr new file mode 100644 index 0000000..ab43ce2 --- /dev/null +++ b/spec/tokens_spec.cr @@ -0,0 +1,63 @@ +require "./spec_helper" + +include Parcom + +describe Tokens do + describe ".from_string" do + it "constructs a Tokens(Char) from a String" do + tokens = Tokens.from_string("abcd") + tokens.tokens.should eq("abcd".chars) + end + end + + describe "#initialize" do + it "wraps an array with the contents of the given iterable" do + set = Set{'a', 'b', 'c', 'd'} + tokens = Tokens.new(set) + tokens.tokens.should eq(set.to_a) + + arr = "abcd".chars + tokens = Tokens.new(arr) + tokens.tokens.should eq(arr) + end + end + + context do + tokens_empty = Tokens.new([] of Char) + tokens = Tokens.from_string("abcd") + + describe "#[]" do + it "returns the token at the given index" do + tokens[2].should eq('c') + expect_raises(IndexError) { tokens_empty[2] } + end + + it "returns a new Tokens similar to Array#[](Int, Int)" do + tokens[1, 5].should eq(Tokens.new(['b', 'c', 'd'])) + expect_raises(IndexError) { tokens_empty[1, 5] } + end + + it "returns a new Tokens similar to Array#[](Range)" do + tokens[1..3].should eq(Tokens.new(['b', 'c', 'd'])) + expect_raises(IndexError) { tokens_empty[1..3] } + end + end + + describe "#[]?" do + it "analogous to `Array#[]?`" do + # we should only need to check the nil-returning cases + tokens_empty[2]?.should be_nil + tokens_empty[1, 5]?.should be_nil + tokens_empty[1..3]?.should be_nil + end + end + + describe "#empty?" do + it "exposes the `#empty?` method of the wrapped array" do + tokens.empty?.should be_false + tokens_empty.empty?.should be_true + end + end + end +end + -- cgit v1.2.1