14
0
forked from pool/python-Pyphen

- Switch to github tarball for the test.py so source_validators

actually works

OBS-URL: https://build.opensuse.org/package/show/devel:languages:python/python-Pyphen?expand=0&rev=8
This commit is contained in:
Tomáš Chvátal
2019-02-22 10:54:21 +00:00
committed by Git OBS Bridge
parent f7ce74f86a
commit 66625ce048
5 changed files with 11 additions and 144 deletions

3
0.9.5.tar.gz Normal file
View File

@@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:6f5f13ebca44c00ba78a42716e600c916cddcffab5da541c6c488e0932637ff5
size 1901384

View File

@@ -1,3 +0,0 @@
version https://git-lfs.github.com/spec/v1
oid sha256:3b633a50873156d777e1f1075ba4d8e96a6ad0a3ca42aa3ea9a6259f93f18921
size 2971389

View File

@@ -1,3 +1,9 @@
-------------------------------------------------------------------
Fri Feb 22 10:46:08 UTC 2019 - Tomáš Chvátal <tchvatal@suse.com>
- Switch to github tarball for the test.py so source_validators
actually works
-------------------------------------------------------------------
Thu Feb 21 16:16:53 UTC 2019 - Hans-Peter Jansen <hpj@urpla.net>

View File

@@ -24,8 +24,7 @@ Summary: Pure Python module to hyphenate text
License: GPL-2.0-or-later AND LGPL-2.1-or-later AND MPL-1.1
Group: Development/Languages/Python
Url: https://github.com/Kozea/Pyphen
Source: https://files.pythonhosted.org/packages/source/P/Pyphen/Pyphen-%{version}.tar.gz
Source1: https://github.com/Kozea/Pyphen/blob/master/test.py
Source: https://github.com/Kozea/Pyphen/archive/%{version}.tar.gz
BuildRequires: %{python_module pytest}
BuildRequires: %{python_module setuptools}
BuildRequires: fdupes
@@ -42,7 +41,6 @@ This module is a fork of python-hyphenator, written by Wilbert Berendsen.
%prep
%setup -q -n Pyphen-%{version}
cp %{S:1} test.py
%build
%python_build
@@ -52,8 +50,7 @@ cp %{S:1} test.py
%python_expand %fdupes %{buildroot}%{$python_sitelib}
%check
%{python_expand PYTHONPATH=%{buildroot}%{$python_sitelib} \
py.test-%{$python_bin_suffix} test.py}
%python_expand PYTHONPATH=%{buildroot}%{$python_sitelib} py.test-%{$python_bin_suffix} -v test.py
%files %{python_files}
%doc README

136
test.py
View File

@@ -1,136 +0,0 @@
# This file is part of Pyphen
#
# Copyright 2013 - Guillaume Ayoub <guillaume.ayoub@kozea.fr>
#
# This library is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the Free
# Software Foundation, either version 2.1 of the License, or (at your option)
# any later version.
#
# This library is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Pyphen. If not, see <http://www.gnu.org/licenses/>.
"""
Pyphen Tests
============
Tests can be launched with:
- Pytest (``py.test test.py``).
- Nose (``nosetests``).
"""
from __future__ import unicode_literals
import pyphen
def test_inserted():
"""Test the ``inserted`` method."""
dic = pyphen.Pyphen(lang='nl_NL')
assert dic.inserted('lettergrepen') == 'let-ter-gre-pen'
def test_wrap():
"""Test the ``wrap`` method."""
dic = pyphen.Pyphen(lang='nl_NL')
assert dic.wrap('autobandventieldopje', 11) == (
'autoband-', 'ventieldopje')
def test_iterate():
"""Test the ``iterate`` method."""
dic = pyphen.Pyphen(lang='nl_NL')
assert tuple(dic.iterate('Amsterdam')) == (
('Amster', 'dam'), ('Am', 'sterdam'))
def test_fallback_dict():
"""Test the ``iterate`` method with a fallback dict."""
dic = pyphen.Pyphen(lang='nl_NL-variant')
assert tuple(dic.iterate('Amsterdam')) == (
('Amster', 'dam'), ('Am', 'sterdam'))
def test_missing_dict():
"""Test a missing dict."""
try:
pyphen.Pyphen(lang='mi_SS')
except KeyError:
pass
else: # pragma: no cover
raise Exception('Importing a missing dict must raise a KeyError')
def test_personal_dict():
"""Test a personal dict."""
dic = pyphen.Pyphen(lang='fr')
assert dic.inserted('autobandventieldopje') != 'au-to-band-ven-tiel-dop-je'
pyphen.LANGUAGES['fr'] = pyphen.LANGUAGES['nl_NL']
dic = pyphen.Pyphen(lang='fr')
assert dic.inserted('autobandventieldopje') == 'au-to-band-ven-tiel-dop-je'
def test_left_right():
"""Test the ``left`` and ``right`` parameters."""
dic = pyphen.Pyphen(lang='nl_NL')
assert dic.inserted('lettergrepen') == 'let-ter-gre-pen'
dic = pyphen.Pyphen(lang='nl_NL', left=4)
assert dic.inserted('lettergrepen') == 'letter-gre-pen'
dic = pyphen.Pyphen(lang='nl_NL', right=4)
assert dic.inserted('lettergrepen') == 'let-ter-grepen'
dic = pyphen.Pyphen(lang='nl_NL', left=4, right=4)
assert dic.inserted('lettergrepen') == 'letter-grepen'
def test_filename():
"""Test the ``filename`` parameter."""
dic = pyphen.Pyphen(filename=pyphen.LANGUAGES['nl_NL'])
assert dic.inserted('lettergrepen') == 'let-ter-gre-pen'
def test_alternative():
"""Test the alternative parser."""
dic = pyphen.Pyphen(lang='hu', left=1, right=1)
assert tuple(dic.iterate('kulissza')) == (
('kulisz', 'sza'), ('ku', 'lissza'))
assert dic.inserted('kulissza') == 'ku-lisz-sza'
def test_upper():
"""Test uppercase."""
dic = pyphen.Pyphen(lang='nl_NL')
assert dic.inserted('LETTERGREPEN') == 'LET-TER-GRE-PEN'
def test_upper_alternative():
"""Test uppercase with alternative parser."""
dic = pyphen.Pyphen(lang='hu', left=1, right=1)
assert tuple(dic.iterate('KULISSZA')) == (
('KULISZ', 'SZA'), ('KU', 'LISSZA'))
assert dic.inserted('KULISSZA') == 'KU-LISZ-SZA'
def test_all_dictionaries():
"""Test that all included dictionaries can be parsed."""
for lang in pyphen.LANGUAGES:
pyphen.Pyphen(lang=lang)
def test_fallback():
"""Test the language fallback algorithm."""
assert pyphen.language_fallback('en') == 'en'
assert pyphen.language_fallback('en_US') == 'en_US'
assert pyphen.language_fallback('en_FR') == 'en'
assert pyphen.language_fallback('en-Latn-US') == 'en_Latn_US'
assert pyphen.language_fallback('en-Cyrl-US') == 'en'
assert pyphen.language_fallback('fr-Latn-FR') == 'fr'
assert pyphen.language_fallback('en-US_variant1-x') == 'en_US'