SHA256
1
0
forked from pool/salt
salt/accounting-for-when-files-in-an-archive-contain-non-.patch

159 lines
6.4 KiB
Diff
Raw Normal View History

Accepting request 626472 from home:mdinca:branches:systemsmanagement:saltstack - Update to 2018.3.2 See https://docs.saltstack.com/en/latest/topics/releases/2018.3.2.html for full changelog - Added: * accounting-for-when-files-in-an-archive-contain-non-.patch * add-all_versions-parameter-to-include-all-installed-.patch * add-custom-suse-capabilities-as-grains.patch * add-engine-relaying-libvirt-events.patch * add-environment-variable-to-know-if-yum-is-invoked-f.patch * add-other-attribute-to-gecos-fields-to-avoid-inconsi.patch * align-suse-salt-master.service-limitnofiles-limit-wi.patch * avoid-incomprehensive-message-if-crashes.patch * fix-deprecation-warning-bsc-1095507.patch * fix-diffing-binary-files-in-file.get_diff-bsc-109839.patch * fix-unboundlocalerror-in-file.get_diff.patch * fix-zypper.list_pkgs-to-be-aligned-with-pkg-state.patch * prevent-zypper-from-parsing-repo-configuration-from-.patch * remove-old-hack-when-reporting-multiversion-packages.patch * show-recommendations-for-salt-ssh-cross-version-pyth.patch - Modified: * activate-all-beacons-sources-config-pillar-grains.patch * add-saltssh-multi-version-support-across-python-inte.patch * avoid-excessive-syslogging-by-watchdog-cronjob-58.patch * do-not-override-jid-on-returners-only-sending-back-t.patch * enable-passing-a-unix_socket-for-mysql-returners-bsc.patch * fall-back-to-pymysql.patch * feat-add-grain-for-all-fqdns.patch * fix-bsc-1065792.patch * fix-decrease-loglevel-when-unable-to-resolve-addr.patch * fix-for-ec2-rate-limit-failures.patch OBS-URL: https://build.opensuse.org/request/show/626472 OBS-URL: https://build.opensuse.org/package/show/systemsmanagement:saltstack/salt?expand=0&rev=127
2018-07-30 13:52:13 +02:00
From 5305ee8bf07e40dc54aefcbb92016ff868135749 Mon Sep 17 00:00:00 2001
From: "Gareth J. Greenaway" <gareth@wiked.org>
Date: Wed, 9 May 2018 09:33:58 -0700
Subject: [PATCH] Accounting for when files in an archive contain
non-ascii characters
Updating integration/modules/test_archive to include filenames with unicode characters.
only convert to bytes when using Python2
Updating with requested changes.
Ensure member names are decoded before adding to various lists.
Adding a test to ensure archive.list returns the right results when a tar file contains a file with unicode in it's name.
---
salt/modules/archive.py | 13 +++---
salt/states/archive.py | 4 +-
tests/integration/modules/test_archive.py | 52 ++++++++++++++++++++++-
3 files changed, 59 insertions(+), 10 deletions(-)
diff --git a/salt/modules/archive.py b/salt/modules/archive.py
index 48f0efa18e..76cd3eeb97 100644
--- a/salt/modules/archive.py
+++ b/salt/modules/archive.py
@@ -186,12 +186,13 @@ def list_(name,
else {'fileobj': cached.stdout, 'mode': 'r|'}
with contextlib.closing(tarfile.open(**open_kwargs)) as tar_archive:
for member in tar_archive.getmembers():
+ _member = salt.utils.data.decode(member.name)
if member.issym():
- links.append(member.name)
+ links.append(_member)
elif member.isdir():
- dirs.append(member.name + '/')
+ dirs.append(_member + '/')
else:
- files.append(member.name)
+ files.append(_member)
return dirs, files, links
except tarfile.ReadError:
@@ -410,9 +411,9 @@ def list_(name,
item.sort()
if verbose:
- ret = {'dirs': sorted(dirs),
- 'files': sorted(files),
- 'links': sorted(links)}
+ ret = {'dirs': sorted(salt.utils.data.decode_list(dirs)),
+ 'files': sorted(salt.utils.data.decode_list(files)),
+ 'links': sorted(salt.utils.data.decode_list(links))}
ret['top_level_dirs'] = [x for x in ret['dirs']
if x.count('/') == 1]
ret['top_level_files'] = [x for x in ret['files']
diff --git a/salt/states/archive.py b/salt/states/archive.py
index 847c5e9914..6838b2202d 100644
--- a/salt/states/archive.py
+++ b/salt/states/archive.py
@@ -1090,7 +1090,7 @@ def extracted(name,
and not stat.S_ISDIR(x)),
(contents['links'], stat.S_ISLNK)):
for path in path_list:
- full_path = os.path.join(name, path)
+ full_path = salt.utils.path.join(name, path)
try:
path_mode = os.lstat(full_path.rstrip(os.sep)).st_mode
if not func(path_mode):
@@ -1259,7 +1259,7 @@ def extracted(name,
if options is None:
try:
with closing(tarfile.open(cached, 'r')) as tar:
- tar.extractall(name)
+ tar.extractall(salt.utils.stringutils.to_str(name))
files = tar.getnames()
if trim_output:
files = files[:trim_output]
diff --git a/tests/integration/modules/test_archive.py b/tests/integration/modules/test_archive.py
index 59fe2f5f61..4301b9e3b0 100644
--- a/tests/integration/modules/test_archive.py
+++ b/tests/integration/modules/test_archive.py
@@ -47,7 +47,7 @@ class ArchiveTest(ModuleCase):
self.arch = os.path.join(self.base_path, 'archive.{0}'.format(arch_fmt))
self.dst = os.path.join(self.base_path, '{0}_dst_dir'.format(arch_fmt))
- def _set_up(self, arch_fmt):
+ def _set_up(self, arch_fmt, unicode_filename=False):
'''
Create source file tree and destination directory
@@ -62,7 +62,11 @@ class ArchiveTest(ModuleCase):
# Create source
os.makedirs(self.src)
- with salt.utils.files.fopen(os.path.join(self.src, 'file'), 'w') as theorem:
+ if unicode_filename:
+ filename = 'file®'
+ else:
+ filename = 'file'
+ with salt.utils.files.fopen(os.path.join(self.src, filename), 'w') as theorem:
theorem.write(textwrap.dedent(salt.utils.stringutils.to_str(r'''\
Compression theorem of computational complexity theory:
@@ -150,6 +154,50 @@ class ArchiveTest(ModuleCase):
self._tear_down()
+ @skipIf(not salt.utils.path.which('tar'), 'Cannot find tar executable')
+ def test_tar_pack_unicode(self):
+ '''
+ Validate using the tar function to create archives
+ '''
+ self._set_up(arch_fmt='tar', unicode_filename=True)
+
+ # Test create archive
+ ret = self.run_function('archive.tar', ['-cvf', self.arch], sources=self.src)
+ self.assertTrue(isinstance(ret, list), six.text_type(ret))
+ self._assert_artifacts_in_ret(ret)
+
+ self._tear_down()
+
+ @skipIf(not salt.utils.path.which('tar'), 'Cannot find tar executable')
+ def test_tar_unpack_unicode(self):
+ '''
+ Validate using the tar function to extract archives
+ '''
+ self._set_up(arch_fmt='tar', unicode_filename=True)
+ self.run_function('archive.tar', ['-cvf', self.arch], sources=self.src)
+
+ # Test extract archive
+ ret = self.run_function('archive.tar', ['-xvf', self.arch], dest=self.dst)
+ self.assertTrue(isinstance(ret, list), six.text_type(ret))
+ self._assert_artifacts_in_ret(ret)
+
+ self._tear_down()
+
+ @skipIf(not salt.utils.path.which('tar'), 'Cannot find tar executable')
+ def test_tar_list_unicode(self):
+ '''
+ Validate using the tar function to extract archives
+ '''
+ self._set_up(arch_fmt='tar', unicode_filename=True)
+ self.run_function('archive.tar', ['-cvf', self.arch], sources=self.src)
+
+ # Test list archive
+ ret = self.run_function('archive.list', name=self.arch)
+ self.assertTrue(isinstance(ret, list), six.text_type(ret))
+ self._assert_artifacts_in_ret(ret)
+
+ self._tear_down()
+
@skipIf(not salt.utils.path.which('gzip'), 'Cannot find gzip executable')
def test_gzip(self):
'''
--
2.17.1