forked from pool/python-smart-open
- Update to 7.0.1:
* Do not touch botocore unless it is installed * Upgrade dev status classifier to stable * Add zstandard compression support * Support moto 4 & 5 * Add logic for handling large files in MultipartWriter uploads to S3 * Add support for SSH connection via aliases from ~/.ssh/config * Secure the connection using SSL when connecting to the FTPS server * Make GCS I/O 1000x faster by avoiding unnecessary API call * Retry finalizing multipart S3 upload * Handle exceptions during writes to Azure * Fix str method in SinglepartWriter * Fix KeyError: 'ContentRange' when received full content from S3 * Propagate exit call to the underlying filestream - Switch to autosetup and pyproject macros. - Less globs in %files. - Update all URLs, the github repository owner has changed. - Add patch skip-gzip-tests-python312.patch, skip broken tests under Python 3.12. OBS-URL: https://build.opensuse.org/package/show/devel:languages:python/python-smart-open?expand=0&rev=7
This commit is contained in:
48
skip-gzip-tests-python312.patch
Normal file
48
skip-gzip-tests-python312.patch
Normal file
@@ -0,0 +1,48 @@
|
||||
Index: smart_open-7.0.1/smart_open/tests/test_smart_open.py
|
||||
===================================================================
|
||||
--- smart_open-7.0.1.orig/smart_open/tests/test_smart_open.py
|
||||
+++ smart_open-7.0.1/smart_open/tests/test_smart_open.py
|
||||
@@ -20,6 +20,7 @@ import tempfile
|
||||
import unittest
|
||||
from unittest import mock
|
||||
import warnings
|
||||
+import sys
|
||||
|
||||
import boto3
|
||||
import pytest
|
||||
@@ -1795,6 +1796,8 @@ def test_s3_gzip_compress_sanity():
|
||||
)
|
||||
def test_s3_read_explicit(url, _compression):
|
||||
"""Can we read using the explicitly specified compression?"""
|
||||
+ if sys.version_info.minor == 12 and _compression == ".gz":
|
||||
+ raise unittest.SkipTest
|
||||
initialize_bucket()
|
||||
with smart_open.open(url, 'rb', compression=_compression) as fin:
|
||||
assert fin.read() == _DECOMPRESSED_DATA
|
||||
@@ -1811,6 +1814,8 @@ def test_s3_read_explicit(url, _compress
|
||||
)
|
||||
def test_s3_write_explicit(_compression, expected):
|
||||
"""Can we write using the explicitly specified compression?"""
|
||||
+ if sys.version_info.minor == 12 and _compression == ".gz":
|
||||
+ raise unittest.SkipTest
|
||||
initialize_bucket()
|
||||
|
||||
with smart_open.open("s3://bucket/key", "wb", compression=_compression) as fout:
|
||||
@@ -1831,6 +1836,8 @@ def test_s3_write_explicit(_compression,
|
||||
)
|
||||
def test_s3_write_implicit(url, _compression, expected):
|
||||
"""Can we determine the compression from the file extension?"""
|
||||
+ if sys.version_info.minor == 12 and _compression == ".gz":
|
||||
+ raise unittest.SkipTest
|
||||
initialize_bucket()
|
||||
|
||||
with smart_open.open(url, "wb", compression=INFER_FROM_EXTENSION) as fout:
|
||||
@@ -1851,6 +1858,8 @@ def test_s3_write_implicit(url, _compres
|
||||
)
|
||||
def test_s3_disable_compression(url, _compression, expected):
|
||||
"""Can we handle the compression parameter when reading/writing?"""
|
||||
+ if sys.version_info.minor == 12 and _compression == ".gz":
|
||||
+ raise unittest.SkipTest
|
||||
initialize_bucket()
|
||||
|
||||
with smart_open.open(url, "wb") as fout:
|
Reference in New Issue
Block a user