Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions docker/base/Dockerfile.j2
Original file line number Diff line number Diff line change
Expand Up @@ -148,9 +148,9 @@ RUN rm -f /etc/rpm/macros.image-language-conf \
{%- if not loop.last %} \{% endif %}
{% endfor -%}

{% for cmd in rpm_setup %}
{% for cmd in rpm_setup %}
{{ cmd }}
{% endfor %}
{% endfor %}

{% block base_centos_repo_overrides_post_rpm %}{% endblock %}

Expand Down
1 change: 1 addition & 0 deletions docker/base/curlrc
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
--fail
--location
--retry 5
--retry-all-errors
--silent
--show-error
--write-out "curl (%{url_effective}): response: %{http_code}, time: %{time_total}, size: %{size_download}\n"
4 changes: 4 additions & 0 deletions docker/cinder/cinder-backup/Dockerfile.j2
Original file line number Diff line number Diff line change
Expand Up @@ -10,22 +10,26 @@ LABEL maintainer="{{ maintainer }}" name="{{ image_name }}" build-date="{{ build
{% if install_type == 'binary' %}
{% if base_package_type == 'rpm' %}
{% set cinder_backup_packages = [
'device-mapper-multipath',
'nfs-utils'
] %}
{% elif base_package_type == 'deb' %}
{% set cinder_backup_packages = [
'cinder-backup',
'multipath-tools',
'nfs-common'
] %}
{% endif %}

{% elif install_type == 'source' %}
{% if base_package_type == 'rpm' %}
{% set cinder_backup_packages = [
'device-mapper-multipath',
'nfs-utils'
] %}
{% elif base_package_type == 'deb' %}
{% set cinder_backup_packages = [
'multipath-tools',
'nfs-common'
] %}
{% endif %}
Expand Down
2 changes: 2 additions & 0 deletions docker/cinder/cinder-volume/Dockerfile.j2
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ LABEL maintainer="{{ maintainer }}" name="{{ image_name }}" build-date="{{ build

{% if base_package_type == 'rpm' %}
{% set cinder_volume_packages = [
'device-mapper-multipath',
'nfs-utils',
'nvmetcli',
'python3-rtslib',
Expand All @@ -24,6 +25,7 @@ LABEL maintainer="{{ maintainer }}" name="{{ image_name }}" build-date="{{ build
{% elif base_package_type == 'deb' %}
{% set cinder_volume_packages = [
'lsscsi',
'multipath-tools',
'nfs-common',
'nvme-cli',
'sysfsutils',
Expand Down
2 changes: 2 additions & 0 deletions docker/nova/nova-compute/Dockerfile.j2
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@ LABEL maintainer="{{ maintainer }}" name="{{ image_name }}" build-date="{{ build
'python3-libguestfs',
'python3-oslo-vmware',
'python3-rtslib',
'qemu-kvm-block-rbd',
'sysfsutils',
'targetcli',
'xfsprogs'
Expand Down Expand Up @@ -68,6 +69,7 @@ LABEL maintainer="{{ maintainer }}" name="{{ image_name }}" build-date="{{ build
'python3-rados',
'python3-rbd',
'python3-rtslib-fb',
'qemu-block-extra',
'sasl2-bin',
'sysfsutils',
'targetcli-fb',
Expand Down
2 changes: 1 addition & 1 deletion kolla/image/build.py
Original file line number Diff line number Diff line change
Expand Up @@ -469,7 +469,7 @@ def update_buildargs(self):
def builder(self, image):

def _test_malicious_tarball(archive, path):
tar_file = tarfile.open(archive, 'r|gz')
tar_file = tarfile.open(archive, 'r|*')
for n in tar_file.getnames():
if not os.path.abspath(os.path.join(path, n)).startswith(path):
tar_file.close()
Expand Down
72 changes: 72 additions & 0 deletions kolla/tests/test_build.py
Original file line number Diff line number Diff line change
Expand Up @@ -300,9 +300,81 @@ def test_process_source(self, mock_get, mock_client,
else:
self.assertIsNotNone(get_result)

@mock.patch.dict(os.environ, clear=True)
@mock.patch('docker.APIClient')
def test_local_directory(self, mock_client):
tmpdir = tempfile.mkdtemp()
file_name = 'test.txt'
file_path = os.path.join(tmpdir, file_name)
saved_umask = os.umask(0o077)

try:
with open(file_path, 'w') as f:
f.write('Hello')

self.dc = mock_client
self.image.plugins = [{
'name': 'fake-image-base-plugin-test',
'type': 'local',
'enabled': True,
'source': tmpdir}
]
push_queue = mock.Mock()
builder = build.BuildTask(self.conf, self.image, push_queue)
builder.run()
self.assertTrue(builder.success)

except IOError:
print('IOError')
else:
os.remove(file_path)
finally:
os.umask(saved_umask)
os.rmdir(tmpdir)

@mock.patch.dict(os.environ, clear=True)
@mock.patch('docker.APIClient')
def test_malicious_tar(self, mock_client):
tmpdir = tempfile.mkdtemp()
file_name = 'test.txt'
archive_name = 'my_archive.tar'
file_path = os.path.join(tmpdir, file_name)
archive_path = os.path.join(tmpdir, archive_name)
# Ensure the file is read/write by the creator only
saved_umask = os.umask(0o077)

try:
with open(file_path, 'w') as f:
f.write('Hello')

with tarfile.open(archive_path, 'w') as tar:
tar.add(file_path, arcname='../test.txt')

self.dc = mock_client
self.image.plugins = [{
'name': 'fake-image-base-plugin-test',
'type': 'local',
'enabled': True,
'source': archive_path}
]

push_queue = mock.Mock()
builder = build.BuildTask(self.conf, self.image, push_queue)
builder.run()
self.assertFalse(builder.success)

except IOError:
print('IOError')
else:
os.remove(file_path)
os.remove(archive_path)
finally:
os.umask(saved_umask)
os.rmdir(tmpdir)

@mock.patch.dict(os.environ, clear=True)
@mock.patch('docker.APIClient')
def test_malicious_tar_gz(self, mock_client):
tmpdir = tempfile.mkdtemp()
file_name = 'test.txt'
archive_name = 'my_archive.tar.gz'
Expand Down
2 changes: 1 addition & 1 deletion specs/logging-with-heka.rst
Original file line number Diff line number Diff line change
Expand Up @@ -256,7 +256,7 @@ An alternative to this proposal involves using Logstash in a centralized
way as done in [1].

Another alternative would be to execute Logstash on each cluster node, as this
spec proposes with Heka. But this would mean running a JVM on each cluster
spec proposes with Heka. But this would mean running a JVM on each cluster
node, and using Redis as a centralized queue.

Also, as described above, we initially considered relying on services writing
Expand Down