forked from pool/python-parsimonious
- Add replace-nose.patch which replaces use of nose with the
standard library (gh#erikrose/parsimonious#160) - Switch from using nose runner to pytest OBS-URL: https://build.opensuse.org/package/show/devel:languages:python/python-parsimonious?expand=0&rev=3
This commit is contained in:
@@ -1,3 +1,10 @@
|
||||
-------------------------------------------------------------------
|
||||
Mon Apr 20 15:58:03 UTC 2020 - Matej Cepl <mcepl@suse.com>
|
||||
|
||||
- Add replace-nose.patch which replaces use of nose with the
|
||||
standard library (gh#erikrose/parsimonious#160)
|
||||
- Switch from using nose runner to pytest
|
||||
|
||||
-------------------------------------------------------------------
|
||||
Mon Dec 23 05:13:58 UTC 2019 - Martin Herkt <9+suse@cirno.systems>
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#
|
||||
# spec file for package python-parsimonious
|
||||
#
|
||||
# Copyright (c) 2019 SUSE LLC
|
||||
# Copyright (c) 2020 SUSE LLC
|
||||
#
|
||||
# All modifications and additions to the file contributed by third parties
|
||||
# remain the property of their copyright owners, unless otherwise agreed
|
||||
@@ -25,7 +25,11 @@ License: MIT
|
||||
Group: Development/Languages/Python
|
||||
URL: https://github.com/erikrose/parsimonious
|
||||
Source: https://files.pythonhosted.org/packages/source/p/parsimonious/parsimonious-%{version}.tar.gz
|
||||
BuildRequires: %{python_module nose}
|
||||
# PATCH-FIX-UPSTREAM replace-nose.patch gh#erikrose/parsimonious#160 mcepl@suse.com
|
||||
# replace use of nose with the standard library
|
||||
Patch0: replace-nose.patch
|
||||
|
||||
BuildRequires: %{python_module pytest}
|
||||
BuildRequires: %{python_module setuptools}
|
||||
BuildRequires: %{python_module six >= 1.9.0}
|
||||
BuildRequires: fdupes
|
||||
@@ -41,6 +45,7 @@ means you feed it a simplified sort of EBNF notation.
|
||||
|
||||
%prep
|
||||
%setup -q -n parsimonious-%{version}
|
||||
%autopatch -p1
|
||||
|
||||
%build
|
||||
%python_build
|
||||
@@ -50,7 +55,7 @@ means you feed it a simplified sort of EBNF notation.
|
||||
%python_expand %fdupes %{buildroot}%{$python_sitelib}
|
||||
|
||||
%check
|
||||
%python_exec setup.py nosetests
|
||||
%pytest
|
||||
|
||||
%files %{python_files}
|
||||
%doc README.rst
|
||||
|
||||
979
replace-nose.patch
Normal file
979
replace-nose.patch
Normal file
@@ -0,0 +1,979 @@
|
||||
--- a/parsimonious/tests/test_benchmarks.py
|
||||
+++ b/parsimonious/tests/test_benchmarks.py
|
||||
@@ -1,46 +1,44 @@
|
||||
"""Tests to show that the benchmarks we based our speed optimizations on are
|
||||
still valid"""
|
||||
|
||||
+import unittest
|
||||
from functools import partial
|
||||
from timeit import timeit
|
||||
|
||||
-from nose.tools import ok_
|
||||
-
|
||||
-
|
||||
timeit = partial(timeit, number=500000)
|
||||
|
||||
-
|
||||
-def test_lists_vs_dicts():
|
||||
- """See what's faster at int key lookup: dicts or lists."""
|
||||
- list_time = timeit('item = l[9000]', 'l = [0] * 10000')
|
||||
- dict_time = timeit('item = d[9000]', 'd = {x: 0 for x in range(10000)}')
|
||||
-
|
||||
- # Dicts take about 1.6x as long as lists in Python 2.6 and 2.7.
|
||||
- ok_(list_time < dict_time, '%s < %s' % (list_time, dict_time))
|
||||
-
|
||||
-
|
||||
-def test_call_vs_inline():
|
||||
- """How bad is the calling penalty?"""
|
||||
- no_call = timeit('l[0] += 1', 'l = [0]')
|
||||
- call = timeit('add(); l[0] += 1', 'l = [0]\n'
|
||||
- 'def add():\n'
|
||||
- ' pass')
|
||||
-
|
||||
- # Calling a function is pretty fast; it takes just 1.2x as long as the
|
||||
- # global var access and addition in l[0] += 1.
|
||||
- ok_(no_call < call, '%s (no call) < %s (call)' % (no_call, call))
|
||||
-
|
||||
-
|
||||
-def test_startswith_vs_regex():
|
||||
- """Can I beat the speed of regexes by special-casing literals?"""
|
||||
- re_time = timeit(
|
||||
- 'r.match(t, 19)',
|
||||
- 'import re\n'
|
||||
- "r = re.compile('hello')\n"
|
||||
- "t = 'this is the finest hello ever'")
|
||||
- startswith_time = timeit("t.startswith('hello', 19)",
|
||||
- "t = 'this is the finest hello ever'")
|
||||
-
|
||||
- # Regexes take 2.24x as long as simple string matching.
|
||||
- ok_(startswith_time < re_time,
|
||||
- '%s (startswith) < %s (re)' % (startswith_time, re_time))
|
||||
+class TestBenchmarks(unittest.TestCase):
|
||||
+ def test_lists_vs_dicts(self):
|
||||
+ """See what's faster at int key lookup: dicts or lists."""
|
||||
+ list_time = timeit('item = l[9000]', 'l = [0] * 10000')
|
||||
+ dict_time = timeit('item = d[9000]', 'd = {x: 0 for x in range(10000)}')
|
||||
+
|
||||
+ # Dicts take about 1.6x as long as lists in Python 2.6 and 2.7.
|
||||
+ self.assertTrue(list_time < dict_time, '%s < %s' % (list_time, dict_time))
|
||||
+
|
||||
+
|
||||
+ def test_call_vs_inline(self):
|
||||
+ """How bad is the calling penalty?"""
|
||||
+ no_call = timeit('l[0] += 1', 'l = [0]')
|
||||
+ call = timeit('add(); l[0] += 1', 'l = [0]\n'
|
||||
+ 'def add():\n'
|
||||
+ ' pass')
|
||||
+
|
||||
+ # Calling a function is pretty fast; it takes just 1.2x as long as the
|
||||
+ # global var access and addition in l[0] += 1.
|
||||
+ self.assertTrue(no_call < call, '%s (no call) < %s (call)' % (no_call, call))
|
||||
+
|
||||
+
|
||||
+ def test_startswith_vs_regex(self):
|
||||
+ """Can I beat the speed of regexes by special-casing literals?"""
|
||||
+ re_time = timeit(
|
||||
+ 'r.match(t, 19)',
|
||||
+ 'import re\n'
|
||||
+ "r = re.compile('hello')\n"
|
||||
+ "t = 'this is the finest hello ever'")
|
||||
+ startswith_time = timeit("t.startswith('hello', 19)",
|
||||
+ "t = 'this is the finest hello ever'")
|
||||
+
|
||||
+ # Regexes take 2.24x as long as simple string matching.
|
||||
+ self.assertTrue(startswith_time < re_time,
|
||||
+ '%s (startswith) < %s (re)' % (startswith_time, re_time))
|
||||
--- a/parsimonious/tests/test_expressions.py
|
||||
+++ b/parsimonious/tests/test_expressions.py
|
||||
@@ -1,7 +1,6 @@
|
||||
#coding=utf-8
|
||||
from unittest import TestCase
|
||||
|
||||
-from nose.tools import eq_, ok_, assert_raises
|
||||
from six import text_type
|
||||
|
||||
from parsimonious.exceptions import ParseError, IncompleteParseError
|
||||
@@ -11,17 +10,6 @@ from parsimonious.grammar import Grammar
|
||||
from parsimonious.nodes import Node
|
||||
|
||||
|
||||
-def len_eq(node, length):
|
||||
- """Return whether the match lengths of 2 nodes are equal.
|
||||
-
|
||||
- Makes tests shorter and lets them omit positional stuff they don't care
|
||||
- about.
|
||||
-
|
||||
- """
|
||||
- node_length = None if node is None else node.end - node.start
|
||||
- return node_length == length
|
||||
-
|
||||
-
|
||||
class LengthTests(TestCase):
|
||||
"""Tests for returning the right lengths
|
||||
|
||||
@@ -29,47 +17,58 @@ class LengthTests(TestCase):
|
||||
partially redundant with TreeTests.
|
||||
|
||||
"""
|
||||
+ def len_eq(self, node, length):
|
||||
+ """Return whether the match lengths of 2 nodes are equal.
|
||||
+
|
||||
+ Makes tests shorter and lets them omit positional stuff they don't care
|
||||
+ about.
|
||||
+
|
||||
+ """
|
||||
+ node_length = None if node is None else node.end - node.start
|
||||
+ self.assertTrue(node_length == length)
|
||||
+
|
||||
+
|
||||
def test_regex(self):
|
||||
- len_eq(Literal('hello').match('ehello', 1), 5) # simple
|
||||
- len_eq(Regex('hello*').match('hellooo'), 7) # *
|
||||
- assert_raises(ParseError, Regex('hello*').match, 'goodbye') # no match
|
||||
- len_eq(Regex('hello', ignore_case=True).match('HELLO'), 5)
|
||||
+ self.len_eq(Literal('hello').match('ehello', 1), 5) # simple
|
||||
+ self.len_eq(Regex('hello*').match('hellooo'), 7) # *
|
||||
+ self.assertRaises(ParseError, Regex('hello*').match, 'goodbye') # no match
|
||||
+ self.len_eq(Regex('hello', ignore_case=True).match('HELLO'), 5)
|
||||
|
||||
def test_sequence(self):
|
||||
- len_eq(Sequence(Regex('hi*'), Literal('lo'), Regex('.ingo')).match('hiiiilobingo1234'),
|
||||
+ self.len_eq(Sequence(Regex('hi*'), Literal('lo'), Regex('.ingo')).match('hiiiilobingo1234'),
|
||||
12) # succeed
|
||||
- assert_raises(ParseError, Sequence(Regex('hi*'), Literal('lo'), Regex('.ingo')).match, 'hiiiilobing') # don't
|
||||
- len_eq(Sequence(Regex('hi*')).match('>hiiii', 1),
|
||||
+ self.assertRaises(ParseError, Sequence(Regex('hi*'), Literal('lo'), Regex('.ingo')).match, 'hiiiilobing') # don't
|
||||
+ self.len_eq(Sequence(Regex('hi*')).match('>hiiii', 1),
|
||||
5) # non-0 pos
|
||||
|
||||
def test_one_of(self):
|
||||
- len_eq(OneOf(Literal('aaa'), Literal('bb')).match('aaa'), 3) # first alternative
|
||||
- len_eq(OneOf(Literal('aaa'), Literal('bb')).match('bbaaa'), 2) # second
|
||||
- assert_raises(ParseError, OneOf(Literal('aaa'), Literal('bb')).match, 'aa') # no match
|
||||
+ self.len_eq(OneOf(Literal('aaa'), Literal('bb')).match('aaa'), 3) # first alternative
|
||||
+ self.len_eq(OneOf(Literal('aaa'), Literal('bb')).match('bbaaa'), 2) # second
|
||||
+ self.assertRaises(ParseError, OneOf(Literal('aaa'), Literal('bb')).match, 'aa') # no match
|
||||
|
||||
def test_not(self):
|
||||
- len_eq(Not(Regex('.')).match(''), 0) # match
|
||||
- assert_raises(ParseError, Not(Regex('.')).match, 'Hi') # don't
|
||||
+ self.len_eq(Not(Regex('.')).match(''), 0) # match
|
||||
+ self.assertRaises(ParseError, Not(Regex('.')).match, 'Hi') # don't
|
||||
|
||||
def test_optional(self):
|
||||
- len_eq(Sequence(Optional(Literal('a')), Literal('b')).match('b'), 1) # contained expr fails
|
||||
- len_eq(Sequence(Optional(Literal('a')), Literal('b')).match('ab'), 2) # contained expr succeeds
|
||||
+ self.len_eq(Sequence(Optional(Literal('a')), Literal('b')).match('b'), 1) # contained expr fails
|
||||
+ self.len_eq(Sequence(Optional(Literal('a')), Literal('b')).match('ab'), 2) # contained expr succeeds
|
||||
|
||||
def test_zero_or_more(self):
|
||||
- len_eq(ZeroOrMore(Literal('b')).match(''), 0) # zero
|
||||
- len_eq(ZeroOrMore(Literal('b')).match('bbb'), 3) # more
|
||||
+ self.len_eq(ZeroOrMore(Literal('b')).match(''), 0) # zero
|
||||
+ self.len_eq(ZeroOrMore(Literal('b')).match('bbb'), 3) # more
|
||||
|
||||
- len_eq(Regex('^').match(''), 0) # Validate the next test.
|
||||
+ self.len_eq(Regex('^').match(''), 0) # Validate the next test.
|
||||
|
||||
# Try to make it loop infinitely using a zero-length contained expression:
|
||||
- len_eq(ZeroOrMore(Regex('^')).match(''), 0)
|
||||
+ self.len_eq(ZeroOrMore(Regex('^')).match(''), 0)
|
||||
|
||||
def test_one_or_more(self):
|
||||
- len_eq(OneOrMore(Literal('b')).match('b'), 1) # one
|
||||
- len_eq(OneOrMore(Literal('b')).match('bbb'), 3) # more
|
||||
- len_eq(OneOrMore(Literal('b'), min=3).match('bbb'), 3) # with custom min; success
|
||||
- assert_raises(ParseError, OneOrMore(Literal('b'), min=3).match, 'bb') # with custom min; failure
|
||||
- len_eq(OneOrMore(Regex('^')).match('bb'), 0) # attempt infinite loop
|
||||
+ self.len_eq(OneOrMore(Literal('b')).match('b'), 1) # one
|
||||
+ self.len_eq(OneOrMore(Literal('b')).match('bbb'), 3) # more
|
||||
+ self.len_eq(OneOrMore(Literal('b'), min=3).match('bbb'), 3) # with custom min; success
|
||||
+ self.assertRaises(ParseError, OneOrMore(Literal('b'), min=3).match, 'bb') # with custom min; failure
|
||||
+ self.len_eq(OneOrMore(Regex('^')).match('bb'), 0) # attempt infinite loop
|
||||
|
||||
|
||||
class TreeTests(TestCase):
|
||||
@@ -82,14 +81,14 @@ class TreeTests(TestCase):
|
||||
def test_simple_node(self):
|
||||
"""Test that leaf expressions like ``Literal`` make the right nodes."""
|
||||
h = Literal('hello', name='greeting')
|
||||
- eq_(h.match('hello'), Node(h, 'hello', 0, 5))
|
||||
+ self.assertEqual(h.match('hello'), Node(h, 'hello', 0, 5))
|
||||
|
||||
def test_sequence_nodes(self):
|
||||
"""Assert that ``Sequence`` produces nodes with the right children."""
|
||||
s = Sequence(Literal('heigh', name='greeting1'),
|
||||
Literal('ho', name='greeting2'), name='dwarf')
|
||||
text = 'heighho'
|
||||
- eq_(s.match(text), Node(s, text, 0, 7, children=
|
||||
+ self.assertEqual(s.match(text), Node(s, text, 0, 7, children=
|
||||
[Node(s.members[0], text, 0, 5),
|
||||
Node(s.members[1], text, 5, 7)]))
|
||||
|
||||
@@ -97,7 +96,7 @@ class TreeTests(TestCase):
|
||||
"""``OneOf`` should return its own node, wrapping the child that succeeds."""
|
||||
o = OneOf(Literal('a', name='lit'), name='one_of')
|
||||
text = 'aa'
|
||||
- eq_(o.match(text), Node(o, text, 0, 1, children=[
|
||||
+ self.assertEqual(o.match(text), Node(o, text, 0, 1, children=[
|
||||
Node(o.members[0], text, 0, 1)]))
|
||||
|
||||
def test_optional(self):
|
||||
@@ -105,25 +104,25 @@ class TreeTests(TestCase):
|
||||
expr = Optional(Literal('a', name='lit'), name='opt')
|
||||
|
||||
text = 'a'
|
||||
- eq_(expr.match(text), Node(expr, text, 0, 1, children=[
|
||||
+ self.assertEqual(expr.match(text), Node(expr, text, 0, 1, children=[
|
||||
Node(expr.members[0], text, 0, 1)]))
|
||||
|
||||
# Test failure of the Literal inside the Optional; the
|
||||
# LengthTests.test_optional is ambiguous for that.
|
||||
text = ''
|
||||
- eq_(expr.match(text), Node(expr, text, 0, 0))
|
||||
+ self.assertEqual(expr.match(text), Node(expr, text, 0, 0))
|
||||
|
||||
def test_zero_or_more_zero(self):
|
||||
"""Test the 0 case of ``ZeroOrMore``; it should still return a node."""
|
||||
expr = ZeroOrMore(Literal('a'), name='zero')
|
||||
text = ''
|
||||
- eq_(expr.match(text), Node(expr, text, 0, 0))
|
||||
+ self.assertEqual(expr.match(text), Node(expr, text, 0, 0))
|
||||
|
||||
def test_one_or_more_one(self):
|
||||
"""Test the 1 case of ``OneOrMore``; it should return a node with a child."""
|
||||
expr = OneOrMore(Literal('a', name='lit'), name='one')
|
||||
text = 'a'
|
||||
- eq_(expr.match(text), Node(expr, text, 0, 1, children=[
|
||||
+ self.assertEqual(expr.match(text), Node(expr, text, 0, 1, children=[
|
||||
Node(expr.members[0], text, 0, 1)]))
|
||||
|
||||
# Things added since Grammar got implemented are covered in integration
|
||||
@@ -142,7 +141,7 @@ class ParseTests(TestCase):
|
||||
"""
|
||||
expr = OneOrMore(Literal('a', name='lit'), name='more')
|
||||
text = 'aa'
|
||||
- eq_(expr.parse(text), Node(expr, text, 0, 2, children=[
|
||||
+ self.assertEqual(expr.parse(text), Node(expr, text, 0, 2, children=[
|
||||
Node(expr.members[0], text, 0, 1),
|
||||
Node(expr.members[0], text, 1, 2)]))
|
||||
|
||||
@@ -168,10 +167,10 @@ class ErrorReportingTests(TestCase):
|
||||
try:
|
||||
grammar.parse(text)
|
||||
except ParseError as error:
|
||||
- eq_(error.pos, 6)
|
||||
- eq_(error.expr, grammar['close_parens'])
|
||||
- eq_(error.text, text)
|
||||
- eq_(text_type(error), "Rule 'close_parens' didn't match at '!!' (line 1, column 7).")
|
||||
+ self.assertEqual(error.pos, 6)
|
||||
+ self.assertEqual(error.expr, grammar['close_parens'])
|
||||
+ self.assertEqual(error.text, text)
|
||||
+ self.assertEqual(text_type(error), "Rule 'close_parens' didn't match at '!!' (line 1, column 7).")
|
||||
|
||||
def test_rewinding(self):
|
||||
"""Make sure rewinding the stack and trying an alternative (which
|
||||
@@ -195,9 +194,9 @@ class ErrorReportingTests(TestCase):
|
||||
try:
|
||||
grammar.parse(text)
|
||||
except ParseError as error:
|
||||
- eq_(error.pos, 8)
|
||||
- eq_(error.expr, grammar['bork'])
|
||||
- eq_(error.text, text)
|
||||
+ self.assertEqual(error.pos, 8)
|
||||
+ self.assertEqual(error.expr, grammar['bork'])
|
||||
+ self.assertEqual(error.text, text)
|
||||
|
||||
def test_no_named_rule_succeeding(self):
|
||||
"""Make sure ParseErrors have sane printable representations even if we
|
||||
@@ -206,9 +205,9 @@ class ErrorReportingTests(TestCase):
|
||||
try:
|
||||
grammar.parse('snork')
|
||||
except ParseError as error:
|
||||
- eq_(error.pos, 0)
|
||||
- eq_(error.expr, grammar['bork'])
|
||||
- eq_(error.text, 'snork')
|
||||
+ self.assertEqual(error.pos, 0)
|
||||
+ self.assertEqual(error.expr, grammar['bork'])
|
||||
+ self.assertEqual(error.text, 'snork')
|
||||
|
||||
def test_parse_with_leftovers(self):
|
||||
"""Make sure ``parse()`` reports where we started failing to match,
|
||||
@@ -217,7 +216,7 @@ class ErrorReportingTests(TestCase):
|
||||
try:
|
||||
grammar.parse('chitty bangbang')
|
||||
except IncompleteParseError as error:
|
||||
- eq_(text_type(error), u"Rule 'sequence' matched in its entirety, but it didn't consume all the text. The non-matching portion of the text begins with 'bang' (line 1, column 12).")
|
||||
+ self.assertEqual(text_type(error), u"Rule 'sequence' matched in its entirety, but it didn't consume all the text. The non-matching portion of the text begins with 'bang' (line 1, column 12).")
|
||||
|
||||
def test_favoring_named_rules(self):
|
||||
"""Named rules should be used in error messages in favor of anonymous
|
||||
@@ -227,7 +226,7 @@ class ErrorReportingTests(TestCase):
|
||||
try:
|
||||
grammar.parse('burp')
|
||||
except ParseError as error:
|
||||
- eq_(text_type(error), u"Rule 'starts_with_a' didn't match at 'burp' (line 1, column 1).")
|
||||
+ self.assertEqual(text_type(error), u"Rule 'starts_with_a' didn't match at 'burp' (line 1, column 1).")
|
||||
|
||||
def test_line_and_column(self):
|
||||
"""Make sure we got the line and column computation right."""
|
||||
@@ -241,7 +240,7 @@ class ErrorReportingTests(TestCase):
|
||||
except ParseError as error:
|
||||
# TODO: Right now, this says "Rule <Literal "\n" at 0x4368250432>
|
||||
# didn't match". That's not the greatest. Fix that, then fix this.
|
||||
- ok_(text_type(error).endswith(r"""didn't match at 'GOO' (line 2, column 4)."""))
|
||||
+ self.assertTrue(text_type(error).endswith(r"""didn't match at 'GOO' (line 2, column 4)."""))
|
||||
|
||||
|
||||
class RepresentationTests(TestCase):
|
||||
@@ -267,19 +266,19 @@ class RepresentationTests(TestCase):
|
||||
|
||||
"""
|
||||
# ZeroOrMore
|
||||
- eq_(text_type(Grammar('foo = "bar" ("baz" "eggs")* "spam"')),
|
||||
+ self.assertEqual(text_type(Grammar('foo = "bar" ("baz" "eggs")* "spam"')),
|
||||
u'foo = "bar" ("baz" "eggs")* "spam"')
|
||||
|
||||
# OneOf
|
||||
- eq_(text_type(Grammar('foo = "bar" ("baz" / "eggs") "spam"')),
|
||||
+ self.assertEqual(text_type(Grammar('foo = "bar" ("baz" / "eggs") "spam"')),
|
||||
u'foo = "bar" ("baz" / "eggs") "spam"')
|
||||
|
||||
# Lookahead
|
||||
- eq_(text_type(Grammar('foo = "bar" &("baz" "eggs") "spam"')),
|
||||
+ self.assertEqual(text_type(Grammar('foo = "bar" &("baz" "eggs") "spam"')),
|
||||
u'foo = "bar" &("baz" "eggs") "spam"')
|
||||
|
||||
# Multiple sequences
|
||||
- eq_(text_type(Grammar('foo = ("bar" "baz") / ("baff" "bam")')),
|
||||
+ self.assertEqual(text_type(Grammar('foo = ("bar" "baz") / ("baff" "bam")')),
|
||||
u'foo = ("bar" "baz") / ("baff" "bam")')
|
||||
|
||||
def test_unicode_surrounding_parens(self):
|
||||
@@ -288,7 +287,7 @@ class RepresentationTests(TestCase):
|
||||
right-hand side of an expression (as they're unnecessary).
|
||||
|
||||
"""
|
||||
- eq_(text_type(Grammar('foo = ("foo" ("bar" "baz"))')),
|
||||
+ self.assertEqual(text_type(Grammar('foo = ("foo" ("bar" "baz"))')),
|
||||
u'foo = "foo" ("bar" "baz")')
|
||||
|
||||
|
||||
@@ -315,5 +314,5 @@ class SlotsTests(TestCase):
|
||||
self.smoo = 'smoo'
|
||||
|
||||
smoo = Smoo()
|
||||
- eq_(smoo.__dict__, {}) # has a __dict__ but with no smoo in it
|
||||
- eq_(smoo.smoo, 'smoo') # The smoo attr ended up in a slot.
|
||||
+ self.assertEqual(smoo.__dict__, {}) # has a __dict__ but with no smoo in it
|
||||
+ self.assertEqual(smoo.smoo, 'smoo') # The smoo attr ended up in a slot.
|
||||
--- a/parsimonious/tests/test_grammar.py
|
||||
+++ b/parsimonious/tests/test_grammar.py
|
||||
@@ -1,11 +1,9 @@
|
||||
# coding=utf-8
|
||||
|
||||
from sys import version_info
|
||||
-from unittest import TestCase
|
||||
+from unittest import TestCase, SkipTest
|
||||
|
||||
import sys
|
||||
-from nose import SkipTest
|
||||
-from nose.tools import eq_, assert_raises, ok_
|
||||
from six import text_type
|
||||
|
||||
from parsimonious.exceptions import UndefinedLabel, ParseError
|
||||
@@ -22,33 +20,33 @@ class BootstrappingGrammarTests(TestCase
|
||||
def test_quantifier(self):
|
||||
text = '*'
|
||||
quantifier = rule_grammar['quantifier']
|
||||
- eq_(quantifier.parse(text),
|
||||
+ self.assertEqual(quantifier.parse(text),
|
||||
Node(quantifier, text, 0, 1, children=[
|
||||
Node(quantifier.members[0], text, 0, 1), Node(rule_grammar['_'], text, 1, 1)]))
|
||||
text = '?'
|
||||
- eq_(quantifier.parse(text),
|
||||
+ self.assertEqual(quantifier.parse(text),
|
||||
Node(quantifier, text, 0, 1, children=[
|
||||
Node(quantifier.members[0], text, 0, 1), Node(rule_grammar['_'], text, 1, 1)]))
|
||||
text = '+'
|
||||
- eq_(quantifier.parse(text),
|
||||
+ self.assertEqual(quantifier.parse(text),
|
||||
Node(quantifier, text, 0, 1, children=[
|
||||
Node(quantifier.members[0], text, 0, 1), Node(rule_grammar['_'], text, 1, 1)]))
|
||||
|
||||
def test_spaceless_literal(self):
|
||||
text = '"anything but quotes#$*&^"'
|
||||
spaceless_literal = rule_grammar['spaceless_literal']
|
||||
- eq_(spaceless_literal.parse(text),
|
||||
+ self.assertEqual(spaceless_literal.parse(text),
|
||||
Node(spaceless_literal, text, 0, len(text), children=[
|
||||
Node(spaceless_literal.members[0], text, 0, len(text))]))
|
||||
text = r'''r"\""'''
|
||||
- eq_(spaceless_literal.parse(text),
|
||||
+ self.assertEqual(spaceless_literal.parse(text),
|
||||
Node(spaceless_literal, text, 0, 5, children=[
|
||||
Node(spaceless_literal.members[0], text, 0, 5)]))
|
||||
|
||||
def test_regex(self):
|
||||
text = '~"[a-zA-Z_][a-zA-Z_0-9]*"LI'
|
||||
regex = rule_grammar['regex']
|
||||
- eq_(rule_grammar['regex'].parse(text),
|
||||
+ self.assertEqual(rule_grammar['regex'].parse(text),
|
||||
Node(regex, text, 0, len(text), children=[
|
||||
Node(Literal('~'), text, 0, 1),
|
||||
Node(rule_grammar['spaceless_literal'], text, 1, 25, children=[
|
||||
@@ -58,40 +56,40 @@ class BootstrappingGrammarTests(TestCase
|
||||
|
||||
def test_successes(self):
|
||||
"""Make sure the PEG recognition grammar succeeds on various inputs."""
|
||||
- ok_(rule_grammar['label'].parse('_'))
|
||||
- ok_(rule_grammar['label'].parse('jeff'))
|
||||
- ok_(rule_grammar['label'].parse('_THIS_THING'))
|
||||
+ self.assertTrue(rule_grammar['label'].parse('_'))
|
||||
+ self.assertTrue(rule_grammar['label'].parse('jeff'))
|
||||
+ self.assertTrue(rule_grammar['label'].parse('_THIS_THING'))
|
||||
|
||||
- ok_(rule_grammar['atom'].parse('some_label'))
|
||||
- ok_(rule_grammar['atom'].parse('"some literal"'))
|
||||
- ok_(rule_grammar['atom'].parse('~"some regex"i'))
|
||||
+ self.assertTrue(rule_grammar['atom'].parse('some_label'))
|
||||
+ self.assertTrue(rule_grammar['atom'].parse('"some literal"'))
|
||||
+ self.assertTrue(rule_grammar['atom'].parse('~"some regex"i'))
|
||||
|
||||
- ok_(rule_grammar['quantified'].parse('~"some regex"i*'))
|
||||
- ok_(rule_grammar['quantified'].parse('thing+'))
|
||||
- ok_(rule_grammar['quantified'].parse('"hi"?'))
|
||||
+ self.assertTrue(rule_grammar['quantified'].parse('~"some regex"i*'))
|
||||
+ self.assertTrue(rule_grammar['quantified'].parse('thing+'))
|
||||
+ self.assertTrue(rule_grammar['quantified'].parse('"hi"?'))
|
||||
|
||||
- ok_(rule_grammar['term'].parse('this'))
|
||||
- ok_(rule_grammar['term'].parse('that+'))
|
||||
+ self.assertTrue(rule_grammar['term'].parse('this'))
|
||||
+ self.assertTrue(rule_grammar['term'].parse('that+'))
|
||||
|
||||
- ok_(rule_grammar['sequence'].parse('this that? other'))
|
||||
+ self.assertTrue(rule_grammar['sequence'].parse('this that? other'))
|
||||
|
||||
- ok_(rule_grammar['ored'].parse('this / that+ / "other"'))
|
||||
+ self.assertTrue(rule_grammar['ored'].parse('this / that+ / "other"'))
|
||||
|
||||
# + is higher precedence than &, so 'anded' should match the whole
|
||||
# thing:
|
||||
- ok_(rule_grammar['lookahead_term'].parse('&this+'))
|
||||
+ self.assertTrue(rule_grammar['lookahead_term'].parse('&this+'))
|
||||
|
||||
- ok_(rule_grammar['expression'].parse('this'))
|
||||
- ok_(rule_grammar['expression'].parse('this? that other*'))
|
||||
- ok_(rule_grammar['expression'].parse('&this / that+ / "other"'))
|
||||
- ok_(rule_grammar['expression'].parse('this / that? / "other"+'))
|
||||
- ok_(rule_grammar['expression'].parse('this? that other*'))
|
||||
-
|
||||
- ok_(rule_grammar['rule'].parse('this = that\r'))
|
||||
- ok_(rule_grammar['rule'].parse('this = the? that other* \t\r'))
|
||||
- ok_(rule_grammar['rule'].parse('the=~"hi*"\n'))
|
||||
+ self.assertTrue(rule_grammar['expression'].parse('this'))
|
||||
+ self.assertTrue(rule_grammar['expression'].parse('this? that other*'))
|
||||
+ self.assertTrue(rule_grammar['expression'].parse('&this / that+ / "other"'))
|
||||
+ self.assertTrue(rule_grammar['expression'].parse('this / that? / "other"+'))
|
||||
+ self.assertTrue(rule_grammar['expression'].parse('this? that other*'))
|
||||
+
|
||||
+ self.assertTrue(rule_grammar['rule'].parse('this = that\r'))
|
||||
+ self.assertTrue(rule_grammar['rule'].parse('this = the? that other* \t\r'))
|
||||
+ self.assertTrue(rule_grammar['rule'].parse('the=~"hi*"\n'))
|
||||
|
||||
- ok_(rule_grammar.parse('''
|
||||
+ self.assertTrue(rule_grammar.parse('''
|
||||
this = the? that other*
|
||||
that = "thing"
|
||||
the=~"hi*"
|
||||
@@ -120,12 +118,12 @@ class RuleVisitorTests(TestCase):
|
||||
rules, default_rule = RuleVisitor().visit(tree)
|
||||
|
||||
text = '98'
|
||||
- eq_(default_rule.parse(text), Node(default_rule, text, 0, 2))
|
||||
+ self.assertEqual(default_rule.parse(text), Node(default_rule, text, 0, 2))
|
||||
|
||||
def test_undefined_rule(self):
|
||||
"""Make sure we throw the right exception on undefined rules."""
|
||||
tree = rule_grammar.parse('boy = howdy\n')
|
||||
- assert_raises(UndefinedLabel, RuleVisitor().visit, tree)
|
||||
+ self.assertRaises(UndefinedLabel, RuleVisitor().visit, tree)
|
||||
|
||||
def test_optional(self):
|
||||
tree = rule_grammar.parse('boy = "howdy"?\n')
|
||||
@@ -135,7 +133,7 @@ class RuleVisitorTests(TestCase):
|
||||
|
||||
# It should turn into a Node from the Optional and another from the
|
||||
# Literal within.
|
||||
- eq_(default_rule.parse(howdy), Node(default_rule, howdy, 0, 5, children=[
|
||||
+ self.assertEqual(default_rule.parse(howdy), Node(default_rule, howdy, 0, 5, children=[
|
||||
Node(Literal("howdy"), howdy, 0, 5)]))
|
||||
|
||||
|
||||
@@ -153,7 +151,7 @@ class GrammarTests(TestCase):
|
||||
"""
|
||||
greeting_grammar = Grammar('greeting = "hi" / "howdy"')
|
||||
tree = greeting_grammar.parse('hi')
|
||||
- eq_(tree, Node(greeting_grammar['greeting'], 'hi', 0, 2, children=[
|
||||
+ self.assertEqual(tree, Node(greeting_grammar['greeting'], 'hi', 0, 2, children=[
|
||||
Node(Literal('hi'), 'hi', 0, 2)]))
|
||||
|
||||
def test_unicode(self):
|
||||
@@ -166,12 +164,12 @@ class GrammarTests(TestCase):
|
||||
bold_close = "))"
|
||||
""")
|
||||
lines = text_type(grammar).splitlines()
|
||||
- eq_(lines[0], 'bold_text = bold_open text bold_close')
|
||||
- ok_('text = ~"[A-Z 0-9]*"i%s' % ('u' if version_info >= (3,) else '')
|
||||
+ self.assertEqual(lines[0], 'bold_text = bold_open text bold_close')
|
||||
+ self.assertTrue('text = ~"[A-Z 0-9]*"i%s' % ('u' if version_info >= (3,) else '')
|
||||
in lines)
|
||||
- ok_('bold_open = "(("' in lines)
|
||||
- ok_('bold_close = "))"' in lines)
|
||||
- eq_(len(lines), 4)
|
||||
+ self.assertTrue('bold_open = "(("' in lines)
|
||||
+ self.assertTrue('bold_close = "))"' in lines)
|
||||
+ self.assertEqual(len(lines), 4)
|
||||
|
||||
def test_match(self):
|
||||
"""Make sure partial-matching (with pos) works."""
|
||||
@@ -182,14 +180,14 @@ class GrammarTests(TestCase):
|
||||
bold_close = "))"
|
||||
""")
|
||||
s = ' ((boo))yah'
|
||||
- eq_(grammar.match(s, pos=1), Node(grammar['bold_text'], s, 1, 8, children=[
|
||||
+ self.assertEqual(grammar.match(s, pos=1), Node(grammar['bold_text'], s, 1, 8, children=[
|
||||
Node(grammar['bold_open'], s, 1, 3),
|
||||
Node(grammar['text'], s, 3, 6),
|
||||
Node(grammar['bold_close'], s, 6, 8)]))
|
||||
|
||||
def test_bad_grammar(self):
|
||||
"""Constructing a Grammar with bad rules should raise ParseError."""
|
||||
- assert_raises(ParseError, Grammar, 'just a bunch of junk')
|
||||
+ self.assertRaises(ParseError, Grammar, 'just a bunch of junk')
|
||||
|
||||
def test_comments(self):
|
||||
"""Test tolerance of comments and blank lines in and around rules."""
|
||||
@@ -204,7 +202,7 @@ class GrammarTests(TestCase):
|
||||
# Pretty good
|
||||
#Oh yeah.#""") # Make sure a comment doesn't need a
|
||||
# \n or \r to end.
|
||||
- eq_(list(sorted(str(grammar).splitlines())),
|
||||
+ self.assertEqual(list(sorted(str(grammar).splitlines())),
|
||||
['''bold_text = stars text stars''',
|
||||
# TODO: Unicode flag is on by default in Python 3. I wonder if we
|
||||
# should turn it on all the time in Parsimonious.
|
||||
@@ -222,30 +220,30 @@ class GrammarTests(TestCase):
|
||||
text = ~"[A-Z 0-9]*"i
|
||||
bold_open = "((" bold_close = "))"
|
||||
""")
|
||||
- ok_(grammar.parse('((booyah))') is not None)
|
||||
+ self.assertTrue(grammar.parse('((booyah))') is not None)
|
||||
|
||||
def test_not(self):
|
||||
"""Make sure "not" predicates get parsed and work properly."""
|
||||
grammar = Grammar(r'''not_arp = !"arp" ~"[a-z]+"''')
|
||||
- assert_raises(ParseError, grammar.parse, 'arp')
|
||||
- ok_(grammar.parse('argle') is not None)
|
||||
+ self.assertRaises(ParseError, grammar.parse, 'arp')
|
||||
+ self.assertTrue(grammar.parse('argle') is not None)
|
||||
|
||||
def test_lookahead(self):
|
||||
grammar = Grammar(r'''starts_with_a = &"a" ~"[a-z]+"''')
|
||||
- assert_raises(ParseError, grammar.parse, 'burp')
|
||||
+ self.assertRaises(ParseError, grammar.parse, 'burp')
|
||||
|
||||
s = 'arp'
|
||||
- eq_(grammar.parse('arp'), Node(grammar['starts_with_a'], s, 0, 3, children=[
|
||||
+ self.assertEqual(grammar.parse('arp'), Node(grammar['starts_with_a'], s, 0, 3, children=[
|
||||
Node(Lookahead(Literal('a')), s, 0, 0),
|
||||
Node(Regex(r'[a-z]+'), s, 0, 3)]))
|
||||
|
||||
def test_parens(self):
|
||||
grammar = Grammar(r'''sequence = "chitty" (" " "bang")+''')
|
||||
# Make sure it's not as if the parens aren't there:
|
||||
- assert_raises(ParseError, grammar.parse, 'chitty bangbang')
|
||||
+ self.assertRaises(ParseError, grammar.parse, 'chitty bangbang')
|
||||
|
||||
s = 'chitty bang bang'
|
||||
- eq_(str(grammar.parse(s)),
|
||||
+ self.assertEqual(str(grammar.parse(s)),
|
||||
"""<Node called "sequence" matching "chitty bang bang">
|
||||
<Node matching "chitty">
|
||||
<Node matching " bang bang">
|
||||
@@ -287,13 +285,13 @@ class GrammarTests(TestCase):
|
||||
digits = digit digits?
|
||||
digit = ~r"[0-9]"
|
||||
""")
|
||||
- ok_(grammar.parse('12') is not None)
|
||||
+ self.assertTrue(grammar.parse('12') is not None)
|
||||
|
||||
def test_badly_circular(self):
|
||||
"""Uselessly circular references should be detected by the grammar
|
||||
compiler."""
|
||||
raise SkipTest('We have yet to make the grammar compiler detect these.')
|
||||
- grammar = Grammar("""
|
||||
+ Grammar("""
|
||||
foo = bar
|
||||
bar = foo
|
||||
""")
|
||||
@@ -315,7 +313,7 @@ class GrammarTests(TestCase):
|
||||
digit=lambda text, pos:
|
||||
(pos + 1) if text[pos].isdigit() else None)
|
||||
s = '[6]'
|
||||
- eq_(grammar.parse(s),
|
||||
+ self.assertEqual(grammar.parse(s),
|
||||
Node(grammar['bracketed_digit'], s, 0, 3, children=[
|
||||
Node(grammar['start'], s, 0, 1),
|
||||
Node(grammar['digit'], s, 1, 2),
|
||||
@@ -339,7 +337,7 @@ class GrammarTests(TestCase):
|
||||
digit=lambda text, pos, cache, error, grammar:
|
||||
grammar['real_digit'].match_core(text, pos, cache, error))
|
||||
s = '[6]'
|
||||
- eq_(grammar.parse(s),
|
||||
+ self.assertEqual(grammar.parse(s),
|
||||
Node(grammar['bracketed_digit'], s, 0, 3, children=[
|
||||
Node(grammar['start'], s, 0, 1),
|
||||
Node(grammar['real_digit'], s, 1, 2),
|
||||
@@ -360,7 +358,7 @@ class GrammarTests(TestCase):
|
||||
LazyReference('five'),
|
||||
name='forty_five')).default('forty_five')
|
||||
s = '45'
|
||||
- eq_(grammar.parse(s),
|
||||
+ self.assertEqual(grammar.parse(s),
|
||||
Node(grammar['forty_five'], s, 0, 2, children=[
|
||||
Node(grammar['four'], s, 0, 1),
|
||||
Node(grammar['five'], s, 1, 2)]))
|
||||
@@ -375,7 +373,7 @@ class GrammarTests(TestCase):
|
||||
"""
|
||||
grammar = Grammar(one_char=lambda text, pos: pos + 1).default('one_char')
|
||||
s = '4'
|
||||
- eq_(grammar.parse(s),
|
||||
+ self.assertEqual(grammar.parse(s),
|
||||
Node(grammar['one_char'], s, 0, 1))
|
||||
|
||||
def test_lazy_default_rule(self):
|
||||
@@ -388,7 +386,7 @@ class GrammarTests(TestCase):
|
||||
styled_text = text
|
||||
text = "hi"
|
||||
""")
|
||||
- eq_(grammar.parse('hi'), Node(grammar['text'], 'hi', 0, 2))
|
||||
+ self.assertEqual(grammar.parse('hi'), Node(grammar['text'], 'hi', 0, 2))
|
||||
|
||||
def test_immutable_grammar(self):
|
||||
"""Make sure that a Grammar is immutable after being created."""
|
||||
@@ -398,14 +396,14 @@ class GrammarTests(TestCase):
|
||||
|
||||
def mod_grammar(grammar):
|
||||
grammar['foo'] = 1
|
||||
- assert_raises(TypeError, mod_grammar, [grammar])
|
||||
+ self.assertRaises(TypeError, mod_grammar, [grammar])
|
||||
|
||||
def mod_grammar(grammar):
|
||||
new_grammar = Grammar(r"""
|
||||
baz = 'biff'
|
||||
""")
|
||||
grammar.update(new_grammar)
|
||||
- assert_raises(AttributeError, mod_grammar, [grammar])
|
||||
+ self.assertRaises(AttributeError, mod_grammar, [grammar])
|
||||
|
||||
def test_repr(self):
|
||||
self.assertTrue(repr(Grammar(r'foo = "a"')))
|
||||
@@ -433,7 +431,7 @@ class TokenGrammarTests(TestCase):
|
||||
foo = token1 "token2"
|
||||
token1 = "token1"
|
||||
""")
|
||||
- eq_(grammar.parse(s),
|
||||
+ self.assertEqual(grammar.parse(s),
|
||||
Node(grammar['foo'], s, 0, 2, children=[
|
||||
Node(grammar['token1'], s, 0, 1),
|
||||
Node(TokenMatcher('token2'), s, 1, 2)]))
|
||||
@@ -443,7 +441,7 @@ class TokenGrammarTests(TestCase):
|
||||
grammar = TokenGrammar("""
|
||||
foo = "token1" "token2"
|
||||
""")
|
||||
- assert_raises(ParseError,
|
||||
+ self.assertRaises(ParseError,
|
||||
grammar.parse,
|
||||
[Token('tokenBOO'), Token('token2')])
|
||||
|
||||
--- a/parsimonious/tests/test_nodes.py
|
||||
+++ b/parsimonious/tests/test_nodes.py
|
||||
@@ -1,6 +1,5 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
-from nose import SkipTest
|
||||
-from nose.tools import eq_, ok_, assert_raises, assert_in
|
||||
+from unittest import SkipTest, TestCase
|
||||
|
||||
from parsimonious import Grammar, NodeVisitor, VisitationError, rule
|
||||
from parsimonious.expressions import Literal
|
||||
@@ -33,61 +32,62 @@ class ExplosiveFormatter(NodeVisitor):
|
||||
raise ValueError
|
||||
|
||||
|
||||
-def test_visitor():
|
||||
- """Assert a tree gets visited correctly."""
|
||||
- grammar = Grammar(r'''
|
||||
- bold_text = bold_open text bold_close
|
||||
- text = ~'[a-zA-Z 0-9]*'
|
||||
- bold_open = '(('
|
||||
- bold_close = '))'
|
||||
- ''')
|
||||
- text = '((o hai))'
|
||||
- tree = Node(grammar['bold_text'], text, 0, 9,
|
||||
- [Node(grammar['bold_open'], text, 0, 2),
|
||||
- Node(grammar['text'], text, 2, 7),
|
||||
- Node(grammar['bold_close'], text, 7, 9)])
|
||||
- eq_(grammar.parse(text), tree)
|
||||
- result = HtmlFormatter().visit(tree)
|
||||
- eq_(result, '<b>o hai</b>')
|
||||
-
|
||||
-
|
||||
-def test_visitation_exception():
|
||||
- assert_raises(VisitationError,
|
||||
- ExplosiveFormatter().visit,
|
||||
- Node(Literal(''), '', 0, 0))
|
||||
-
|
||||
-
|
||||
-def test_str():
|
||||
- """Test str and unicode of ``Node``."""
|
||||
- n = Node(Literal('something', name='text'), 'o hai', 0, 5)
|
||||
- good = '<Node called "text" matching "o hai">'
|
||||
- eq_(str(n), good)
|
||||
-
|
||||
-
|
||||
-def test_repr():
|
||||
- """Test repr of ``Node``."""
|
||||
- s = u'hai ö'
|
||||
- boogie = u'böogie'
|
||||
- n = Node(Literal(boogie), s, 0, 3, children=[
|
||||
- Node(Literal(' '), s, 3, 4), Node(Literal(u'ö'), s, 4, 5)])
|
||||
- eq_(repr(n),
|
||||
- str("""s = {hai_o}\nNode({boogie}, s, 0, 3, children=[Node({space}, s, 3, 4), Node({o}, s, 4, 5)])""").format(
|
||||
- hai_o=repr(s),
|
||||
- boogie=repr(Literal(boogie)),
|
||||
- space=repr(Literal(" ")),
|
||||
- o=repr(Literal(u"ö")),
|
||||
+class SimpleTests(TestCase):
|
||||
+ def test_visitor(self):
|
||||
+ """Assert a tree gets visited correctly."""
|
||||
+ grammar = Grammar(r'''
|
||||
+ bold_text = bold_open text bold_close
|
||||
+ text = ~'[a-zA-Z 0-9]*'
|
||||
+ bold_open = '(('
|
||||
+ bold_close = '))'
|
||||
+ ''')
|
||||
+ text = '((o hai))'
|
||||
+ tree = Node(grammar['bold_text'], text, 0, 9,
|
||||
+ [Node(grammar['bold_open'], text, 0, 2),
|
||||
+ Node(grammar['text'], text, 2, 7),
|
||||
+ Node(grammar['bold_close'], text, 7, 9)])
|
||||
+ self.assertEqual(grammar.parse(text), tree)
|
||||
+ result = HtmlFormatter().visit(tree)
|
||||
+ self.assertEqual(result, '<b>o hai</b>')
|
||||
+
|
||||
+
|
||||
+ def test_visitation_exception(self):
|
||||
+ self.assertRaises(VisitationError,
|
||||
+ ExplosiveFormatter().visit,
|
||||
+ Node(Literal(''), '', 0, 0))
|
||||
+
|
||||
+
|
||||
+ def test_str(self):
|
||||
+ """Test str and unicode of ``Node``."""
|
||||
+ n = Node(Literal('something', name='text'), 'o hai', 0, 5)
|
||||
+ good = '<Node called "text" matching "o hai">'
|
||||
+ self.assertEqual(str(n), good)
|
||||
+
|
||||
+
|
||||
+ def test_repr(self):
|
||||
+ """Test repr of ``Node``."""
|
||||
+ s = u'hai ö'
|
||||
+ boogie = u'böogie'
|
||||
+ n = Node(Literal(boogie), s, 0, 3, children=[
|
||||
+ Node(Literal(' '), s, 3, 4), Node(Literal(u'ö'), s, 4, 5)])
|
||||
+ self.assertEqual(repr(n),
|
||||
+ str("""s = {hai_o}\nNode({boogie}, s, 0, 3, children=[Node({space}, s, 3, 4), Node({o}, s, 4, 5)])""").format(
|
||||
+ hai_o=repr(s),
|
||||
+ boogie=repr(Literal(boogie)),
|
||||
+ space=repr(Literal(" ")),
|
||||
+ o=repr(Literal(u"ö")),
|
||||
+ )
|
||||
)
|
||||
- )
|
||||
|
||||
|
||||
-def test_parse_shortcut():
|
||||
- """Exercise the simple case in which the visitor takes care of parsing."""
|
||||
- eq_(HtmlFormatter().parse('(('), '<b>')
|
||||
+ def test_parse_shortcut(self):
|
||||
+ """Exercise the simple case in which the visitor takes care of parsing."""
|
||||
+ self.assertEqual(HtmlFormatter().parse('(('), '<b>')
|
||||
|
||||
|
||||
-def test_match_shortcut():
|
||||
- """Exercise the simple case in which the visitor takes care of matching."""
|
||||
- eq_(HtmlFormatter().match('((other things'), '<b>')
|
||||
+ def test_match_shortcut(self):
|
||||
+ """Exercise the simple case in which the visitor takes care of matching."""
|
||||
+ self.assertEqual(HtmlFormatter().match('((other things'), '<b>')
|
||||
|
||||
|
||||
class CoupledFormatter(NodeVisitor):
|
||||
@@ -108,82 +108,84 @@ class CoupledFormatter(NodeVisitor):
|
||||
"""Return the text verbatim."""
|
||||
return node.text
|
||||
|
||||
+class DecoratorTests(TestCase):
|
||||
+ def test_rule_decorator(self):
|
||||
+ """Make sure the @rule decorator works."""
|
||||
+ self.assertEqual(CoupledFormatter().parse('((hi))'), '<b>hi</b>')
|
||||
+
|
||||
+
|
||||
+ def test_rule_decorator_subclassing(self):
|
||||
+ """Make sure we can subclass and override visitor methods without blowing
|
||||
+ away the rules attached to them."""
|
||||
+ class OverridingFormatter(CoupledFormatter):
|
||||
+ def visit_text(self, node, visited_children):
|
||||
+ """Return the text capitalized."""
|
||||
+ return node.text.upper()
|
||||
+
|
||||
+ @rule('"not used"')
|
||||
+ def visit_useless(self, node, visited_children):
|
||||
+ """Get in the way. Tempt the metaclass to pave over the
|
||||
+ superclass's grammar with a new one."""
|
||||
|
||||
-def test_rule_decorator():
|
||||
- """Make sure the @rule decorator works."""
|
||||
- eq_(CoupledFormatter().parse('((hi))'), '<b>hi</b>')
|
||||
-
|
||||
-
|
||||
-def test_rule_decorator_subclassing():
|
||||
- """Make sure we can subclass and override visitor methods without blowing
|
||||
- away the rules attached to them."""
|
||||
- class OverridingFormatter(CoupledFormatter):
|
||||
- def visit_text(self, node, visited_children):
|
||||
- """Return the text capitalized."""
|
||||
- return node.text.upper()
|
||||
-
|
||||
- @rule('"not used"')
|
||||
- def visit_useless(self, node, visited_children):
|
||||
- """Get in the way. Tempt the metaclass to pave over the
|
||||
- superclass's grammar with a new one."""
|
||||
-
|
||||
- raise SkipTest("I haven't got around to making this work yet.")
|
||||
- eq_(OverridingFormatter().parse('((hi))'), '<b>HI</b>')
|
||||
+ raise SkipTest("I haven't got around to making this work yet.")
|
||||
+ self.assertEqual(OverridingFormatter().parse('((hi))'), '<b>HI</b>')
|
||||
|
||||
|
||||
class PrimalScream(Exception):
|
||||
pass
|
||||
|
||||
|
||||
-def test_unwrapped_exceptions():
|
||||
- class Screamer(NodeVisitor):
|
||||
- grammar = Grammar("""greeting = 'howdy'""")
|
||||
- unwrapped_exceptions = (PrimalScream,)
|
||||
-
|
||||
- def visit_greeting(self, thing, visited_children):
|
||||
- raise PrimalScream('This should percolate up!')
|
||||
-
|
||||
- assert_raises(PrimalScream, Screamer().parse, 'howdy')
|
||||
-
|
||||
-
|
||||
-def test_node_inequality():
|
||||
- node = Node(Literal('12345'), 'o hai', 0, 5)
|
||||
- ok_(node != 5)
|
||||
- ok_(node != None)
|
||||
- ok_(node != Node(Literal('23456'), 'o hai', 0, 5))
|
||||
- ok_(not (node != Node(Literal('12345'), 'o hai', 0, 5)))
|
||||
-
|
||||
-
|
||||
-def test_generic_visit_NotImplementedError_unnamed_node():
|
||||
- """
|
||||
- Test that generic_visit provides informative error messages
|
||||
- when visitors are not defined.
|
||||
-
|
||||
- Regression test for https://github.com/erikrose/parsimonious/issues/110
|
||||
- """
|
||||
- class MyVisitor(NodeVisitor):
|
||||
- grammar = Grammar(r'''
|
||||
- bar = "b" "a" "r"
|
||||
- ''')
|
||||
- unwrapped_exceptions = (NotImplementedError, )
|
||||
-
|
||||
- with assert_raises(NotImplementedError) as e:
|
||||
- MyVisitor().parse('bar')
|
||||
- assert_in('No visitor method was defined for this expression: "b"', str(e.exception))
|
||||
-
|
||||
-
|
||||
-def test_generic_visit_NotImplementedError_named_node():
|
||||
- """
|
||||
- Test that generic_visit provides informative error messages
|
||||
- when visitors are not defined.
|
||||
- """
|
||||
- class MyVisitor(NodeVisitor):
|
||||
- grammar = Grammar(r'''
|
||||
- bar = myrule myrule myrule
|
||||
- myrule = ~"[bar]"
|
||||
- ''')
|
||||
- unwrapped_exceptions = (NotImplementedError, )
|
||||
+class SpecialCasesTests(TestCase):
|
||||
|
||||
- with assert_raises(NotImplementedError) as e:
|
||||
- MyVisitor().parse('bar')
|
||||
- assert_in('No visitor method was defined for this expression: myrule = ~"[bar]"', str(e.exception))
|
||||
+ def test_unwrapped_exceptions(self):
|
||||
+ class Screamer(NodeVisitor):
|
||||
+ grammar = Grammar("""greeting = 'howdy'""")
|
||||
+ unwrapped_exceptions = (PrimalScream,)
|
||||
+
|
||||
+ def visit_greeting(self, thing, visited_children):
|
||||
+ raise PrimalScream('This should percolate up!')
|
||||
+
|
||||
+ self.assertRaises(PrimalScream, Screamer().parse, 'howdy')
|
||||
+
|
||||
+
|
||||
+ def test_node_inequality(self):
|
||||
+ node = Node(Literal('12345'), 'o hai', 0, 5)
|
||||
+ self.assertTrue(node != 5)
|
||||
+ self.assertTrue(node != None)
|
||||
+ self.assertTrue(node != Node(Literal('23456'), 'o hai', 0, 5))
|
||||
+ self.assertTrue(not (node != Node(Literal('12345'), 'o hai', 0, 5)))
|
||||
+
|
||||
+
|
||||
+ def test_generic_visit_NotImplementedError_unnamed_node(self):
|
||||
+ """
|
||||
+ Test that generic_visit provides informative error messages
|
||||
+ when visitors are not defined.
|
||||
+
|
||||
+ Regression test for https://github.com/erikrose/parsimonious/issues/110
|
||||
+ """
|
||||
+ class MyVisitor(NodeVisitor):
|
||||
+ grammar = Grammar(r'''
|
||||
+ bar = "b" "a" "r"
|
||||
+ ''')
|
||||
+ unwrapped_exceptions = (NotImplementedError, )
|
||||
+
|
||||
+ with self.assertRaises(NotImplementedError) as e:
|
||||
+ MyVisitor().parse('bar')
|
||||
+ self.assertIn('No visitor method was defined for this expression: "b"', str(e.exception))
|
||||
+
|
||||
+
|
||||
+ def test_generic_visit_NotImplementedError_named_node(self):
|
||||
+ """
|
||||
+ Test that generic_visit provides informative error messages
|
||||
+ when visitors are not defined.
|
||||
+ """
|
||||
+ class MyVisitor(NodeVisitor):
|
||||
+ grammar = Grammar(r'''
|
||||
+ bar = myrule myrule myrule
|
||||
+ myrule = ~"[bar]"
|
||||
+ ''')
|
||||
+ unwrapped_exceptions = (NotImplementedError, )
|
||||
+
|
||||
+ with self.assertRaises(NotImplementedError) as e:
|
||||
+ MyVisitor().parse('bar')
|
||||
+ self.assertIn('No visitor method was defined for this expression: myrule = ~"[bar]"', str(e.exception))
|
||||
Reference in New Issue
Block a user