Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Added a skip_files_list_verify argument to archive.extracted state #55700

Merged
merged 3 commits into from Dec 23, 2019
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
1 change: 1 addition & 0 deletions CHANGELOG.md
Expand Up @@ -31,6 +31,7 @@ Versions are `MAJOR.PATCH`.
- [#54917](https://github.com/saltstack/salt/pull/54917) - Added get_settings, put_settings and flush_synced methods for Elasticsearch module. - [@Oloremo](https://github.com/Oloremo)
- [#55418](https://github.com/saltstack/salt/pull/55418) - Added clean_parent argument for the archive state. - [@Oloremo](https://github.com/Oloremo)
- [#55593](https://github.com/saltstack/salt/issues/55593) - Added a support for a global proxy to pip module. - [@Oloremo](https://github.com/Oloremo)
- [#55443](https://github.com/saltstack/salt/issues/55443) - Added a skip_files_list_verify argument to archive.extracted state. - [@Oloremo](https://github.com/Oloremo)

---

Expand Down
4 changes: 2 additions & 2 deletions salt/modules/cp.py
Expand Up @@ -711,8 +711,8 @@ def list_minion(saltenv='base'):

def is_cached(path, saltenv='base'):
'''
Return a boolean if the given path on the master has been cached on the
minion
Returns the full path to a file if it is cached locally on the minion
otherwise returns a blank string

CLI Example:

Expand Down
54 changes: 52 additions & 2 deletions salt/states/archive.py
Expand Up @@ -176,6 +176,7 @@ def extracted(name,
source_hash=None,
source_hash_name=None,
source_hash_update=False,
skip_files_list_verify=False,
skip_verify=False,
password=None,
options=None,
Expand Down Expand Up @@ -411,14 +412,33 @@ def extracted(name,

source_hash_update : False
Set this to ``True`` if archive should be extracted if source_hash has
changed. This would extract regardless of the ``if_missing`` parameter.
changed and there is a difference between the archive and the local files.
This would extract regardless of the ``if_missing`` parameter.

Note that this is only checked if the ``source`` value has not changed.
If it has (e.g. to increment a version number in the path) then the
archive will not be extracted even if the hash has changed.

.. note::
Setting this to ``True`` along with ``keep_source`` set to ``False``
will result the ``source`` re-download to do a archive file list check.
If it's not desirable please consider the ``skip_files_list_verify``
argument.

.. versionadded:: 2016.3.0

skip_files_list_verify : False
Set this to ``True`` if archive should be extracted if source_hash has
changed but only checksums of the archive will be checked to determine if
the extraction is required.

.. note::
The current limitation of this logic is that you have to set
minions ``hash_type`` config option to the same one that you're going
to pass via ``source_hash`` argument.

.. versionadded:: Neon

skip_verify : False
If ``True``, hash verification of remote file sources (``http://``,
``https://``, ``ftp://``) will be skipped, and the ``source_hash``
Expand Down Expand Up @@ -533,7 +553,7 @@ def extracted(name,
then re-create that directory before extracting. Note that ``clean``
and ``clean_parent`` are mutually exclusive.

.. versionadded:: Sodium
.. versionadded:: Neon
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

not related to this PR, just a mistake from my previous one. I hope it's ok to fix it here too


user
The user to own each extracted file. Not available on Windows.
Expand Down Expand Up @@ -678,6 +698,11 @@ def extracted(name,
# Remove pub kwargs as they're irrelevant here.
kwargs = salt.utils.args.clean_kwargs(**kwargs)

if skip_files_list_verify and skip_verify:
ret['comment'] = ('Only one of "skip_files_list_verify" and '
'"skip_verify" can be set to True')
return ret

if 'keep_source' in kwargs and 'keep' in kwargs:
ret.setdefault('warnings', []).append(
'Both \'keep_source\' and \'keep\' were used. Since these both '
Expand Down Expand Up @@ -947,6 +972,31 @@ def extracted(name,
else:
source_sum = {}

if skip_files_list_verify:
if source_is_local:
cached = source_match
else:
cached = __salt__['cp.is_cached'](source_match, saltenv=__env__)

if cached:
existing_cached_source_sum = _read_cached_checksum(cached)
log.debug('Existing source sum is: "%s". Expected source sum is "%s"',
existing_cached_source_sum, source_sum)
if source_sum and existing_cached_source_sum:
if existing_cached_source_sum['hsum'] == source_sum['hsum']:
ret['result'] = None if __opts__['test'] else True
ret['comment'] = (
'Archive {0} existing source sum is the same as the '
'expected one and skip_files_list_verify argument was set '
'to True. Extraction is not needed'.format(
salt.utils.url.redact_http_basic_auth(source_match)
)
)
return ret
else:
log.debug('There is no cached source %s available on minion',
source_match)

if source_is_local:
cached = source_match
else:
Expand Down
27 changes: 27 additions & 0 deletions tests/integration/states/test_archive.py
Expand Up @@ -28,6 +28,7 @@
ARCHIVE_NAME = 'custom.tar.gz'
ARCHIVE_TAR_SOURCE = 'http://localhost:{0}/{1}'.format(9999, ARCHIVE_NAME)
ARCHIVE_TAR_HASH = 'md5=7643861ac07c30fe7d2310e9f25ca514'
ARCHIVE_TAR_SHA_HASH = 'sha256=9591159d86f0a180e4e0645b2320d0235e23e66c66797df61508bf185e0ac1d2'
ARCHIVE_TAR_BAD_HASH = 'md5=d41d8cd98f00b204e9800998ecf8427e'
ARCHIVE_TAR_HASH_UPPER = 'md5=7643861AC07C30FE7D2310E9F25CA514'

Expand Down Expand Up @@ -256,3 +257,29 @@ def test_archive_extracted_with_non_base_saltenv(self):
saltenv='prod')
self.assertSaltTrueReturn(ret)
self._check_extracted(os.path.join(ARCHIVE_DIR, self.untar_file))

def test_local_archive_extracted_with_skip_files_list_verify(self):
'''
test archive.extracted with local file and skip_files_list_verify set to True
'''
expected_comment = ('existing source sum is the same as the expected one and '
'skip_files_list_verify argument was set to True. '
'Extraction is not needed')
ret = self.run_state('archive.extracted', name=ARCHIVE_DIR,
source=self.archive_local_tar_source, archive_format='tar',
skip_files_list_verify=True,
source_hash_update=True,
source_hash=ARCHIVE_TAR_SHA_HASH)

self.assertSaltTrueReturn(ret)

self._check_extracted(self.untar_file)

ret = self.run_state('archive.extracted', name=ARCHIVE_DIR,
source=self.archive_local_tar_source, archive_format='tar',
skip_files_list_verify=True,
source_hash_update=True,
source_hash=ARCHIVE_TAR_SHA_HASH)

self.assertSaltTrueReturn(ret)
self.assertInSaltComment(expected_comment, ret)
94 changes: 94 additions & 0 deletions tests/unit/states/test_archive.py
Expand Up @@ -266,3 +266,97 @@ def test_clean_parent_conflict(self):
self.assertEqual(ret['result'], False)
self.assertEqual(ret['changes'], {})
self.assertEqual(ret['comment'], ret_comment)

def test_skip_files_list_verify_conflict(self):
'''
Tests the call of extraction with both skip_files_list_verify and skip_verify set to True
'''
gnutar = MagicMock(return_value='tar (GNU tar)')
source = '/tmp/foo.tar.gz'
ret_comment = 'Only one of "skip_files_list_verify" and "skip_verify" can be set to True'
mock_false = MagicMock(return_value=False)
mock_true = MagicMock(return_value=True)
state_single_mock = MagicMock(return_value={'local': {'result': True}})
run_all = MagicMock(return_value={'retcode': 0, 'stdout': 'stdout', 'stderr': 'stderr'})
mock_source_list = MagicMock(return_value=(source, None))
list_mock = MagicMock(return_value={
'dirs': [],
'files': ['stdout'],
'links': [],
'top_level_dirs': [],
'top_level_files': ['stdout'],
'top_level_links': [],
})
isfile_mock = MagicMock(side_effect=_isfile_side_effect)

with patch.dict(archive.__salt__, {'cmd.run': gnutar,
'file.directory_exists': mock_false,
'file.file_exists': mock_false,
'state.single': state_single_mock,
'file.makedirs': mock_true,
'cmd.run_all': run_all,
'archive.list': list_mock,
'file.source_list': mock_source_list}),\
patch.dict(archive.__states__, {'file.directory': mock_true}),\
patch.object(os.path, 'isfile', isfile_mock),\
patch('salt.utils.path.which', MagicMock(return_value=True)):
ret = archive.extracted(os.path.join(os.sep + 'tmp', 'out'),
source,
options='xvzf',
enforce_toplevel=False,
clean=True,
skip_files_list_verify=True,
skip_verify=True,
keep=True)
self.assertEqual(ret['result'], False)
self.assertEqual(ret['changes'], {})
self.assertEqual(ret['comment'], ret_comment)

def test_skip_files_list_verify_success(self):
'''
Test that if the local and expected source hash are the same we won't do anything.
'''

if salt.utils.platform.is_windows():
source = 'c:\\tmp\\foo.tar.gz'
tmp_dir = 'c:\\tmp\\test_extracted_tar'
elif salt.utils.platform.is_darwin():
source = '/private/tmp/foo.tar.gz'
tmp_dir = '/private/tmp/test_extracted_tar'
else:
source = '/tmp/foo.tar.gz'
tmp_dir = '/tmp/test_extracted_tar'

expected_comment = ('Archive {} existing source sum is the same as the '
'expected one and skip_files_list_verify argument '
'was set to True. Extraction is not needed'.format(source))
expected_ret = {'name': tmp_dir,
'result': True,
'changes': {},
'comment': expected_comment
}
mock_true = MagicMock(return_value=True)
mock_false = MagicMock(return_value=False)
mock_cached = MagicMock(return_value='{}/{}'.format(tmp_dir, source))
source_sum = {'hsum': 'testhash', 'hash_type': 'sha256'}
mock_hash = MagicMock(return_value=source_sum)
mock_source_list = MagicMock(return_value=(source, None))
isfile_mock = MagicMock(side_effect=_isfile_side_effect)

with patch('salt.states.archive._read_cached_checksum', mock_hash):
with patch.dict(archive.__opts__, {'test': False,
'cachedir': tmp_dir,
'hash_type': 'sha256'}),\
patch.dict(archive.__salt__, {'file.directory_exists': mock_false,
'file.get_source_sum': mock_hash,
'file.check_hash': mock_true,
'cp.is_cached': mock_cached,
'file.source_list': mock_source_list}),\
patch.object(os.path, 'isfile', isfile_mock):

ret = archive.extracted(tmp_dir,
source,
source_hash='testhash',
skip_files_list_verify=True,
enforce_toplevel=False)
self.assertDictEqual(ret, expected_ret)