Accepting request 1046535 from home:mcalabkova:branches:devel:languages:python
- Add patch pygments214.patch to fix tests with new Pygments OBS-URL: https://build.opensuse.org/request/show/1046535 OBS-URL: https://build.opensuse.org/package/show/devel:languages:python:jupyter/python-ipython?expand=0&rev=101
This commit is contained in:
parent
b3a3f28d93
commit
31b4e3d45f
171
pygments214.patch
Normal file
171
pygments214.patch
Normal file
@ -0,0 +1,171 @@
|
|||||||
|
From ed7f35f8b721d4b4dcafea173ce724bee25704c7 Mon Sep 17 00:00:00 2001
|
||||||
|
From: Matthias Bussonnier <bussonniermatthias@gmail.com>
|
||||||
|
Date: Tue, 3 Jan 2023 11:57:18 +0100
|
||||||
|
Subject: [PATCH] Fix tests for pygments > 2.14
|
||||||
|
|
||||||
|
Pygments 2.14+ have the bash lexer return some tokens as
|
||||||
|
Text.Whitespace instead of Text, this update the test to support this.
|
||||||
|
---
|
||||||
|
IPython/lib/tests/test_lexers.py | 52 ++++++++++++++++++--------------
|
||||||
|
1 file changed, 30 insertions(+), 22 deletions(-)
|
||||||
|
|
||||||
|
diff --git a/IPython/lib/tests/test_lexers.py b/IPython/lib/tests/test_lexers.py
|
||||||
|
index efa00d601ea..000b8fe6fd9 100644
|
||||||
|
--- a/IPython/lib/tests/test_lexers.py
|
||||||
|
+++ b/IPython/lib/tests/test_lexers.py
|
||||||
|
@@ -4,11 +4,14 @@
|
||||||
|
# Distributed under the terms of the Modified BSD License.
|
||||||
|
|
||||||
|
from unittest import TestCase
|
||||||
|
+from pygments import __version__ as pygments_version
|
||||||
|
from pygments.token import Token
|
||||||
|
from pygments.lexers import BashLexer
|
||||||
|
|
||||||
|
from .. import lexers
|
||||||
|
|
||||||
|
+pyg214 = tuple(int(x) for x in pygments_version.split(".")[:2]) >= (2, 14)
|
||||||
|
+
|
||||||
|
|
||||||
|
class TestLexers(TestCase):
|
||||||
|
"""Collection of lexers tests"""
|
||||||
|
@@ -18,25 +21,26 @@ def setUp(self):
|
||||||
|
|
||||||
|
def testIPythonLexer(self):
|
||||||
|
fragment = '!echo $HOME\n'
|
||||||
|
- tokens = [
|
||||||
|
+ bash_tokens = [
|
||||||
|
(Token.Operator, '!'),
|
||||||
|
]
|
||||||
|
- tokens.extend(self.bash_lexer.get_tokens(fragment[1:]))
|
||||||
|
- self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
|
||||||
|
+ bash_tokens.extend(self.bash_lexer.get_tokens(fragment[1:]))
|
||||||
|
+ ipylex_token = list(self.lexer.get_tokens(fragment))
|
||||||
|
+ assert bash_tokens[:-1] == ipylex_token[:-1]
|
||||||
|
|
||||||
|
- fragment_2 = '!' + fragment
|
||||||
|
+ fragment_2 = "!" + fragment
|
||||||
|
tokens_2 = [
|
||||||
|
(Token.Operator, '!!'),
|
||||||
|
- ] + tokens[1:]
|
||||||
|
- self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))
|
||||||
|
+ ] + bash_tokens[1:]
|
||||||
|
+ assert tokens_2[:-1] == list(self.lexer.get_tokens(fragment_2))[:-1]
|
||||||
|
|
||||||
|
fragment_2 = '\t %%!\n' + fragment[1:]
|
||||||
|
tokens_2 = [
|
||||||
|
(Token.Text, '\t '),
|
||||||
|
(Token.Operator, '%%!'),
|
||||||
|
(Token.Text, '\n'),
|
||||||
|
- ] + tokens[1:]
|
||||||
|
- self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))
|
||||||
|
+ ] + bash_tokens[1:]
|
||||||
|
+ assert tokens_2 == list(self.lexer.get_tokens(fragment_2))
|
||||||
|
|
||||||
|
fragment_2 = 'x = ' + fragment
|
||||||
|
tokens_2 = [
|
||||||
|
@@ -44,8 +48,8 @@ def testIPythonLexer(self):
|
||||||
|
(Token.Text, ' '),
|
||||||
|
(Token.Operator, '='),
|
||||||
|
(Token.Text, ' '),
|
||||||
|
- ] + tokens
|
||||||
|
- self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))
|
||||||
|
+ ] + bash_tokens
|
||||||
|
+ assert tokens_2[:-1] == list(self.lexer.get_tokens(fragment_2))[:-1]
|
||||||
|
|
||||||
|
fragment_2 = 'x, = ' + fragment
|
||||||
|
tokens_2 = [
|
||||||
|
@@ -54,8 +58,8 @@ def testIPythonLexer(self):
|
||||||
|
(Token.Text, ' '),
|
||||||
|
(Token.Operator, '='),
|
||||||
|
(Token.Text, ' '),
|
||||||
|
- ] + tokens
|
||||||
|
- self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))
|
||||||
|
+ ] + bash_tokens
|
||||||
|
+ assert tokens_2[:-1] == list(self.lexer.get_tokens(fragment_2))[:-1]
|
||||||
|
|
||||||
|
fragment_2 = 'x, = %sx ' + fragment[1:]
|
||||||
|
tokens_2 = [
|
||||||
|
@@ -67,8 +71,10 @@ def testIPythonLexer(self):
|
||||||
|
(Token.Operator, '%'),
|
||||||
|
(Token.Keyword, 'sx'),
|
||||||
|
(Token.Text, ' '),
|
||||||
|
- ] + tokens[1:]
|
||||||
|
- self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))
|
||||||
|
+ ] + bash_tokens[1:]
|
||||||
|
+ if tokens_2[7] == (Token.Text, " ") and pyg214: # pygments 2.14+
|
||||||
|
+ tokens_2[7] = (Token.Text.Whitespace, " ")
|
||||||
|
+ assert tokens_2[:-1] == list(self.lexer.get_tokens(fragment_2))[:-1]
|
||||||
|
|
||||||
|
fragment_2 = 'f = %R function () {}\n'
|
||||||
|
tokens_2 = [
|
||||||
|
@@ -80,7 +86,7 @@ def testIPythonLexer(self):
|
||||||
|
(Token.Keyword, 'R'),
|
||||||
|
(Token.Text, ' function () {}\n'),
|
||||||
|
]
|
||||||
|
- self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))
|
||||||
|
+ assert tokens_2 == list(self.lexer.get_tokens(fragment_2))
|
||||||
|
|
||||||
|
fragment_2 = '\t%%xyz\n$foo\n'
|
||||||
|
tokens_2 = [
|
||||||
|
@@ -89,7 +95,7 @@ def testIPythonLexer(self):
|
||||||
|
(Token.Keyword, 'xyz'),
|
||||||
|
(Token.Text, '\n$foo\n'),
|
||||||
|
]
|
||||||
|
- self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))
|
||||||
|
+ assert tokens_2 == list(self.lexer.get_tokens(fragment_2))
|
||||||
|
|
||||||
|
fragment_2 = '%system?\n'
|
||||||
|
tokens_2 = [
|
||||||
|
@@ -98,7 +104,7 @@ def testIPythonLexer(self):
|
||||||
|
(Token.Operator, '?'),
|
||||||
|
(Token.Text, '\n'),
|
||||||
|
]
|
||||||
|
- self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))
|
||||||
|
+ assert tokens_2[:-1] == list(self.lexer.get_tokens(fragment_2))[:-1]
|
||||||
|
|
||||||
|
fragment_2 = 'x != y\n'
|
||||||
|
tokens_2 = [
|
||||||
|
@@ -109,7 +115,7 @@ def testIPythonLexer(self):
|
||||||
|
(Token.Name, 'y'),
|
||||||
|
(Token.Text, '\n'),
|
||||||
|
]
|
||||||
|
- self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))
|
||||||
|
+ assert tokens_2[:-1] == list(self.lexer.get_tokens(fragment_2))[:-1]
|
||||||
|
|
||||||
|
fragment_2 = ' ?math.sin\n'
|
||||||
|
tokens_2 = [
|
||||||
|
@@ -118,7 +124,7 @@ def testIPythonLexer(self):
|
||||||
|
(Token.Text, 'math.sin'),
|
||||||
|
(Token.Text, '\n'),
|
||||||
|
]
|
||||||
|
- self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))
|
||||||
|
+ assert tokens_2[:-1] == list(self.lexer.get_tokens(fragment_2))[:-1]
|
||||||
|
|
||||||
|
fragment = ' *int*?\n'
|
||||||
|
tokens = [
|
||||||
|
@@ -126,7 +132,7 @@ def testIPythonLexer(self):
|
||||||
|
(Token.Operator, '?'),
|
||||||
|
(Token.Text, '\n'),
|
||||||
|
]
|
||||||
|
- self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
|
||||||
|
+ assert tokens == list(self.lexer.get_tokens(fragment))
|
||||||
|
|
||||||
|
fragment = '%%writefile -a foo.py\nif a == b:\n pass'
|
||||||
|
tokens = [
|
||||||
|
@@ -145,7 +151,9 @@ def testIPythonLexer(self):
|
||||||
|
(Token.Keyword, 'pass'),
|
||||||
|
(Token.Text, '\n'),
|
||||||
|
]
|
||||||
|
- self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
|
||||||
|
+ if tokens[10] == (Token.Text, "\n") and pyg214: # pygments 2.14+
|
||||||
|
+ tokens[10] = (Token.Text.Whitespace, "\n")
|
||||||
|
+ assert tokens[:-1] == list(self.lexer.get_tokens(fragment))[:-1]
|
||||||
|
|
||||||
|
fragment = '%%timeit\nmath.sin(0)'
|
||||||
|
tokens = [
|
||||||
|
@@ -173,4 +181,4 @@ def testIPythonLexer(self):
|
||||||
|
(Token.Punctuation, '>'),
|
||||||
|
(Token.Text, '\n'),
|
||||||
|
]
|
||||||
|
- self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
|
||||||
|
+ assert tokens == list(self.lexer.get_tokens(fragment))
|
@ -1,3 +1,8 @@
|
|||||||
|
-------------------------------------------------------------------
|
||||||
|
Tue Jan 3 14:03:46 UTC 2023 - Markéta Machová <mmachova@suse.com>
|
||||||
|
|
||||||
|
- Add patch pygments214.patch to fix tests with new Pygments
|
||||||
|
|
||||||
-------------------------------------------------------------------
|
-------------------------------------------------------------------
|
||||||
Sun Dec 25 19:08:47 UTC 2022 - Ben Greiner <code@bnavigator.de>
|
Sun Dec 25 19:08:47 UTC 2022 - Ben Greiner <code@bnavigator.de>
|
||||||
|
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
#
|
#
|
||||||
# spec file
|
# spec file
|
||||||
#
|
#
|
||||||
# Copyright (c) 2022 SUSE LLC
|
# Copyright (c) 2023 SUSE LLC
|
||||||
#
|
#
|
||||||
# All modifications and additions to the file contributed by third parties
|
# All modifications and additions to the file contributed by third parties
|
||||||
# remain the property of their copyright owners, unless otherwise agreed
|
# remain the property of their copyright owners, unless otherwise agreed
|
||||||
@ -40,6 +40,8 @@ Group: Development/Languages/Python
|
|||||||
URL: https://github.com/ipython/ipython
|
URL: https://github.com/ipython/ipython
|
||||||
Source: https://files.pythonhosted.org/packages/source/i/ipython/ipython-%{version}.tar.gz
|
Source: https://files.pythonhosted.org/packages/source/i/ipython/ipython-%{version}.tar.gz
|
||||||
Source1: https://raw.githubusercontent.com/jupyter/qtconsole/4.0.0/qtconsole/resources/icon/JupyterConsole.svg
|
Source1: https://raw.githubusercontent.com/jupyter/qtconsole/4.0.0/qtconsole/resources/icon/JupyterConsole.svg
|
||||||
|
# PATCH-FIX-UPSTREAM https://github.com/ipython/ipython/pull/13882 Fix tests for pygments >= 2.14
|
||||||
|
Patch: pygments214.patch
|
||||||
BuildRequires: %{python_module base >= 3.8}
|
BuildRequires: %{python_module base >= 3.8}
|
||||||
BuildRequires: %{python_module pip}
|
BuildRequires: %{python_module pip}
|
||||||
BuildRequires: %{python_module setuptools >= 51.0.0}
|
BuildRequires: %{python_module setuptools >= 51.0.0}
|
||||||
|
Loading…
Reference in New Issue
Block a user