From e12a4265dfd14d9e35595ba603102c9886f3c7a1 Mon Sep 17 00:00:00 2001 From: Gustavo Serra Scalet Date: Thu, 7 Jun 2018 21:47:20 -0300 Subject: [PATCH] Refactor python code: commonlize scripts utils and bootstrap And in order to do that, I intended to use the most up-to-date / robust flavor of a functionality found. Took this chance also to solve some flake8 warnings that I could. --- daisy_workflows/image_build/debian/build.py | 40 ++-- .../image_build/debian/debian.wf.json | 9 +- .../enterprise_linux/build_installer.py | 14 +- .../enterprise_linux/enterprise_linux.wf.json | 13 +- .../enterprise_linux/ks_helpers.py | 20 +- .../image_build/enterprise_linux/save_logs.py | 8 +- .../image_build/linux_common/bootstrap.py | 78 ------- .../image_build/linux_common/utils.py | 144 ------------- .../debian/translate_debian.wf.json | 9 +- .../enterprise_linux/translate_el.wf.json | 9 +- .../image_import/linux_common/bootstrap.py | 57 ------ .../image_import/linux_common/utils.py | 163 --------------- .../ubuntu/translate_ubuntu.wf.json | 9 +- .../configuration/configuration.wf.json | 18 +- daisy_workflows/image_test/disk/disk.wf.json | 9 +- .../metadata-ssh/metadata-ssh.wf.json | 9 +- .../oslogin-ssh/oslogin-ssh.wf.json | 9 +- .../linux_common/bootstrap.py | 47 +++-- .../{image_test => }/linux_common/utils.py | 191 ++++++++++++++++-- 19 files changed, 294 insertions(+), 562 deletions(-) delete mode 100644 daisy_workflows/image_build/linux_common/bootstrap.py delete mode 100644 daisy_workflows/image_build/linux_common/utils.py delete mode 100644 daisy_workflows/image_import/linux_common/bootstrap.py delete mode 100644 daisy_workflows/image_import/linux_common/utils.py rename daisy_workflows/{image_test => }/linux_common/bootstrap.py (55%) rename daisy_workflows/{image_test => }/linux_common/utils.py (74%) diff --git a/daisy_workflows/image_build/debian/build.py b/daisy_workflows/image_build/debian/build.py index 387a059e6b..fbfc887d32 100644 --- a/daisy_workflows/image_build/debian/build.py +++ b/daisy_workflows/image_build/debian/build.py @@ -50,39 +50,42 @@ def main(): 'bootstrap_vz_manifest', raise_on_not_found=True) bvz_version = utils.GetMetadataParam( 'bootstrap_vz_version', raise_on_not_found=True) - repo = utils.GetMetadataParam('google_cloud_repo', raise_on_not_found=True).strip() - image_dest = utils.GetMetadataParam('image_dest', raise_on_not_found=True) - outs_path = utils.GetMetadataParam('daisy-outs-path', raise_on_not_found=True) + repo = utils.GetMetadataParam('google_cloud_repo', + raise_on_not_found=True).strip() + image_dest = utils.GetMetadataParam('image_dest', + raise_on_not_found=True) + outs_path = utils.GetMetadataParam('daisy-outs-path', + raise_on_not_found=True) if repo not in REPOS: raise ValueError( 'Metadata "google_cloud_repo" must be one of %s.' % REPOS) - utils.Status('Bootstrap_vz manifest: %s' % bvz_manifest) - utils.Status('Bootstrap_vz version: %s' % bvz_version) - utils.Status('Google Cloud repo: %s' % repo) + utils.LogStatus('Bootstrap_vz manifest: %s' % bvz_manifest) + utils.LogStatus('Bootstrap_vz version: %s' % bvz_version) + utils.LogStatus('Google Cloud repo: %s' % repo) # Download and setup bootstrap_vz. bvz_url = 'https://github.com/andsens/bootstrap-vz/archive/%s.zip' bvz_url %= bvz_version bvz_zip_dir = 'bvz_zip' - utils.Status('Downloading bootstrap-vz at commit %s' % bvz_version) + utils.LogStatus('Downloading bootstrap-vz at commit %s' % bvz_version) urllib.urlretrieve(bvz_url, 'bvz.zip') with zipfile.ZipFile('bvz.zip', 'r') as z: z.extractall(bvz_zip_dir) - utils.Status('Downloaded and extracted %s to bvz.zip.' % bvz_url) + utils.LogStatus('Downloaded and extracted %s to bvz.zip.' % bvz_url) bvz_zip_contents = [d for d in os.listdir(bvz_zip_dir)] bvz_zip_subdir = os.path.join(bvz_zip_dir, bvz_zip_contents[0]) utils.Execute(['mv', bvz_zip_subdir, BVZ_DIR]) - utils.Status('Moved bootstrap_vz from %s to %s.' % (bvz_zip_subdir, BVZ_DIR)) + utils.LogStatus('Moved bootstrap_vz from %s to %s.' % (bvz_zip_subdir, BVZ_DIR)) bvz_bin = os.path.join(BVZ_DIR, 'bootstrap-vz') utils.MakeExecutable(bvz_bin) - utils.Status('Made %s executable.' % bvz_bin) + utils.LogStatus('Made %s executable.' % bvz_bin) bvz_manifest_file = os.path.join(BVZ_DIR, 'manifests', bvz_manifest) # Inject Google Cloud test repo plugin if using staging or unstable repos. # This is used to test new package releases in images. if repo != 'stable': - utils.Status('Adding Google Cloud test repos plugin for bootstrapvz.') + utils.LogStatus('Adding Google Cloud test repos plugin for bootstrapvz.') repo_plugin_dir = '/build_files/google_cloud_test_repos' bvz_plugins = os.path.join(BVZ_DIR, 'bootstrapvz', 'plugins') shutil.move(repo_plugin_dir, bvz_plugins) @@ -96,33 +99,34 @@ def main(): # Run bootstrap_vz build. cmd = [bvz_bin, '--debug', bvz_manifest_file] - utils.Status('Starting build in %s with params: %s' % (BVZ_DIR, str(cmd))) + utils.LogStatus('Starting build in %s with params: %s' % (BVZ_DIR, str(cmd))) utils.Execute(cmd, cwd=BVZ_DIR) # Upload tar. image_tar_gz = '/target/disk.tar.gz' if os.path.exists(image_tar_gz): - utils.Status('Saving %s to %s' % (image_tar_gz, image_dest)) + utils.LogStatus('Saving %s to %s' % (image_tar_gz, image_dest)) utils.Gsutil(['cp', image_tar_gz, image_dest]) # Create and upload the synopsis of the image. - utils.Status('Creating image synopsis.') + utils.LogStatus('Creating image synopsis.') synopsis = {} packages = collections.OrderedDict() - _, output, _ = utils.Execute(['dpkg-query', '-W'], capture_output=True) + _, output = utils.Execute(['dpkg-query', '-W'], capture_output=True) for line in output.split('\n')[:-1]: # Last line is an empty line. parts = line.split() packages[parts[0]] = parts[1] synopsis['installed_packages'] = packages with open('/tmp/synopsis.json', 'w') as f: f.write(json.dumps(synopsis)) - utils.Status('Uploading image synopsis.') + utils.LogStatus('Uploading image synopsis.') synopsis_dest = os.path.join(outs_path, 'synopsis.json') utils.Gsutil(['cp', '/tmp/synopsis.json', synopsis_dest]) + if __name__ == '__main__': try: main() - utils.Success('Debian build was successful!') + utils.LogSuccess('Debian build was successful!') except: - utils.Fail('Debian build failed!') + utils.LogFail('Debian build failed!') diff --git a/daisy_workflows/image_build/debian/debian.wf.json b/daisy_workflows/image_build/debian/debian.wf.json index 70101e1057..8bcee31b6b 100644 --- a/daisy_workflows/image_build/debian/debian.wf.json +++ b/daisy_workflows/image_build/debian/debian.wf.json @@ -13,9 +13,9 @@ }, "Sources": { "build_files/build.py": "./build.py", - "build_files/utils.py": "../linux_common/utils.py", + "build_files/utils.py": "../../linux_common/utils.py", "build_files/google_cloud_test_repos": "./google_cloud_test_repos/", - "startup_script": "../linux_common/bootstrap.py" + "startup_script": "../../linux_common/bootstrap.py" }, "Steps": { "setup": { @@ -37,8 +37,9 @@ "Metadata": { "bootstrap_vz_manifest": "${bootstrap_vz_manifest}", "bootstrap_vz_version": "${bootstrap_vz_version}", - "build_files_gcs_dir": "${SOURCESPATH}/build_files", - "build_script": "build.py", + "files_gcs_dir": "${SOURCESPATH}/build_files", + "script": "build.py", + "prefix": "Build", "google_cloud_repo": "${google_cloud_repo}", "image_dest": "${image_dest}" }, diff --git a/daisy_workflows/image_build/enterprise_linux/build_installer.py b/daisy_workflows/image_build/enterprise_linux/build_installer.py index 5183daf521..96c927404b 100644 --- a/daisy_workflows/image_build/enterprise_linux/build_installer.py +++ b/daisy_workflows/image_build/enterprise_linux/build_installer.py @@ -43,9 +43,9 @@ def main(): sap_apps = utils.GetMetadataParam('rhel_sap_apps', raise_on_not_found=False) sap_apps = sap_apps == 'true' - utils.Status('EL Release: %s' % release) - utils.Status('Google Cloud repo: %s' % repo) - utils.Status('Build working directory: %s' % os.getcwd()) + utils.LogStatus('EL Release: %s' % release) + utils.LogStatus('Google Cloud repo: %s' % repo) + utils.LogStatus('Build working directory: %s' % os.getcwd()) iso_file = 'installer.iso' @@ -59,7 +59,7 @@ def main(): # Write the installer disk. Write extlinux MBR, create partition, # copy installer ISO and ISO boot files over. - utils.Status('Writing installer disk.') + utils.LogStatus('Writing installer disk.') utils.Execute(['parted', '/dev/sdb', 'mklabel', 'msdos']) utils.Execute(['sync']) utils.Execute(['parted', '/dev/sdb', 'mkpart', 'primary', '1MB', '100%']) @@ -105,7 +105,7 @@ def main(): # Print out a the modifications. diff = difflib.Differ().compare(oldcfg.splitlines(1), cfg.splitlines(1)) - utils.Status('Modified extlinux.conf:\n%s' % '\n'.join(diff)) + utils.LogStatus('Modified extlinux.conf:\n%s' % '\n'.join(diff)) f.seek(0) f.write(cfg) @@ -118,6 +118,6 @@ def main(): if __name__ == '__main__': try: main() - utils.Success('EL Installer build successful!') + utils.LogSuccess('EL Installer build successful!') except: - utils.Fail('EL Installer build failed!') + utils.LogFail('EL Installer build failed!') diff --git a/daisy_workflows/image_build/enterprise_linux/enterprise_linux.wf.json b/daisy_workflows/image_build/enterprise_linux/enterprise_linux.wf.json index ede7f15dfb..96d3a9b181 100644 --- a/daisy_workflows/image_build/enterprise_linux/enterprise_linux.wf.json +++ b/daisy_workflows/image_build/enterprise_linux/enterprise_linux.wf.json @@ -38,11 +38,11 @@ "Sources": { "build_files/build_installer.py": "./build_installer.py", "build_files/installer.iso": "${installer_iso}", - "build_files/utils.py": "../linux_common/utils.py", + "build_files/utils.py": "../../linux_common/utils.py", "build_files/kickstart": "./kickstart/", "build_files/ks_helpers.py": "./ks_helpers.py", "build_files/save_logs.py": "./save_logs.py", - "installerprep_startup_script": "../linux_common/bootstrap.py" + "installerprep_startup_script": "../../linux_common/bootstrap.py" }, "Steps": { "setup-disks": { @@ -72,8 +72,9 @@ "Disks": [{"Source": "disk-installerprep"}, {"Source": "disk-installer"}], "MachineType": "n1-standard-4", "Metadata": { - "build_files_gcs_dir": "${SOURCESPATH}/build_files", - "build_script": "build_installer.py", + "files_gcs_dir": "${SOURCESPATH}/build_files", + "script": "build_installer.py", + "prefix": "Build", "el_release": "${el_release}", "el_savelogs": "${el_savelogs}", "google_cloud_repo": "${google_cloud_repo}", @@ -129,8 +130,8 @@ "Disks": [{"Source": "disk-installerprep"}, {"Source": "disk-installer"}], "MachineType": "n1-standard-1", "Metadata": { - "build_files_gcs_dir": "${SOURCESPATH}/build_files", - "build_script": "save_logs.py" + "files_gcs_dir": "${SOURCESPATH}/build_files", + "script": "save_logs.py" }, "Scopes": ["https://www.googleapis.com/auth/devstorage.read_write"], "StartupScript": "installerprep_startup_script" diff --git a/daisy_workflows/image_build/enterprise_linux/ks_helpers.py b/daisy_workflows/image_build/enterprise_linux/ks_helpers.py index 44f40c96a7..867e10d70e 100644 --- a/daisy_workflows/image_build/enterprise_linux/ks_helpers.py +++ b/daisy_workflows/image_build/enterprise_linux/ks_helpers.py @@ -184,11 +184,11 @@ def BuildKsConfig(release, google_cloud_repo, byol, sap_hana, sap_apps): ks_packages = FetchConfigPart('common-packages.cfg') # For BYOL RHEL, don't remove subscription-manager. if byol: - utils.Status('Building RHEL BYOL image.') + utils.LogStatus('Building RHEL BYOL image.') rhel_byol_post = FetchConfigPart('rhel-byol-post.cfg') if release == 'rhel6': - utils.Status('Building RHEL 6 image.') + utils.LogStatus('Building RHEL 6 image.') ks_options = FetchConfigPart('el6-options.cfg') custom_post = FetchConfigPart('el6-post.cfg') if byol: @@ -196,45 +196,45 @@ def BuildKsConfig(release, google_cloud_repo, byol, sap_hana, sap_apps): cleanup = FetchConfigPart('el6-cleanup.cfg') repo_version = 'el6' elif release == "centos6": - utils.Status('Building CentOS 6 image.') + utils.LogStatus('Building CentOS 6 image.') ks_options = FetchConfigPart('el6-options.cfg') custom_post = FetchConfigPart('co6-post.cfg') cleanup = FetchConfigPart('el6-cleanup.cfg') repo_version = 'el6' elif release == "rhel7": - utils.Status('Building RHEL 7 image.') + utils.LogStatus('Building RHEL 7 image.') ks_options = FetchConfigPart('el7-options.cfg') custom_post = FetchConfigPart('el7-post.cfg') if byol: custom_post = '\n'.join([custom_post, rhel_byol_post]) elif sap_hana: - utils.Status('Building RHEL 7 for SAP Hana') + utils.LogStatus('Building RHEL 7 for SAP Hana') custom_post = FetchConfigPart('rhel7-sap-hana-post.cfg') elif sap_apps: - utils.Status('Building RHEL 7 for SAP Apps') + utils.LogStatus('Building RHEL 7 for SAP Apps') custom_post = FetchConfigPart('rhel7-sap-apps-post.cfg') cleanup = FetchConfigPart('el7-cleanup.cfg') repo_version = 'el7' elif release == "centos7": - utils.Status('Building CentOS 7 image.') + utils.LogStatus('Building CentOS 7 image.') ks_options = FetchConfigPart('el7-options.cfg') custom_post = FetchConfigPart('co7-post.cfg') cleanup = FetchConfigPart('el7-cleanup.cfg') repo_version = 'el7' elif release == "oraclelinux6": - utils.Status('Building Oracle Linux 6 image.') + utils.LogStatus('Building Oracle Linux 6 image.') ks_options = FetchConfigPart('el6-options.cfg') custom_post = FetchConfigPart('ol6-post.cfg') cleanup = FetchConfigPart('el6-cleanup.cfg') repo_version = 'el6' elif release == "oraclelinux7": - utils.Status('Building Oracle Linux 7 image.') + utils.LogStatus('Building Oracle Linux 7 image.') ks_options = FetchConfigPart('el7-options.cfg') custom_post = FetchConfigPart('ol7-post.cfg') cleanup = FetchConfigPart('el7-cleanup.cfg') repo_version = 'el7' else: - utils.Fail('Unknown Image Name: %s' % release) + utils.LogFail('Unknown Image Name: %s' % release) ks_post = BuildPost(custom_post, cleanup, repo_version, google_cloud_repo) diff --git a/daisy_workflows/image_build/enterprise_linux/save_logs.py b/daisy_workflows/image_build/enterprise_linux/save_logs.py index 2c84b3908d..a08c019c95 100644 --- a/daisy_workflows/image_build/enterprise_linux/save_logs.py +++ b/daisy_workflows/image_build/enterprise_linux/save_logs.py @@ -27,8 +27,8 @@ def main(): # Mount the installer disk. utils.Execute(['mount', '-t', 'ext4', '/dev/sdb1', '/mnt']) - utils.Status('Installer root: %s' % os.listdir('/mnt')) - utils.Status('Build logs: %s' % os.listdir('/mnt/build-logs')) + utils.LogStatus('Installer root: %s' % os.listdir('/mnt')) + utils.LogStatus('Build logs: %s' % os.listdir('/mnt/build-logs')) # For some reason we need to remove the gsutil credentials. utils.Execute(['rm', '-Rf', '/root/.gsutil']) @@ -42,6 +42,6 @@ def main(): if __name__ == '__main__': try: main() - utils.Success('Build logs successfully saved.') + utils.LogSuccess('Build logs successfully saved.') except: - utils.Fail('Failed to save build logs.') + utils.LogFail('Failed to save build logs.') diff --git a/daisy_workflows/image_build/linux_common/bootstrap.py b/daisy_workflows/image_build/linux_common/bootstrap.py deleted file mode 100644 index d003a20559..0000000000 --- a/daisy_workflows/image_build/linux_common/bootstrap.py +++ /dev/null @@ -1,78 +0,0 @@ -#!/usr/bin/python -# Copyright 2017 Google Inc. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Bootstrapper for running a VM script. - -Args: -build-files-gcs-dir: The Cloud Storage location containing the build files. - This dir of build files must contain a build.py containing the build logic. -""" -import logging -import os -import subprocess -import sys -import urllib2 - - -BUILD_DIR = '/build_files' - - -def GetMetadataAttribute(attribute): - url = 'http://metadata.google.internal/computeMetadata/v1/instance/attributes/%s' % attribute - request = urllib2.Request(url) - request.add_unredirected_header('Metadata-Flavor', 'Google') - return urllib2.urlopen(request).read() - - -def ExecuteScript(script): - """Runs a script and logs the output.""" - process = subprocess.Popen(script, shell=True, executable='/bin/bash', - cwd=BUILD_DIR, stderr=subprocess.STDOUT, - stdout=subprocess.PIPE) - while True: - for line in iter(process.stdout.readline, b''): - message = line.decode('utf-8', 'replace').rstrip('\n') - if message: - logging.info(message) - if process.poll() is not None: - break - logging.info('BuildStatus: %s: return code %s', script, process.returncode) - - -def Bootstrap(): - """Get build files, run build, poweroff.""" - try: - logging.info('BuildStatus: Starting bootstrap.py.') - build_gcs_dir = GetMetadataAttribute('build_files_gcs_dir') - build_script = GetMetadataAttribute('build_script') - full_build_script = os.path.join(BUILD_DIR, build_script) - subprocess.call(['mkdir', BUILD_DIR]) - subprocess.call( - ['gsutil', '-m', 'cp', '-r', os.path.join(build_gcs_dir, '*'), BUILD_DIR]) - logging.info('BuildStatus: Making build script %s executable.', full_build_script) - subprocess.call(['chmod', '+x', build_script], cwd=BUILD_DIR) - logging.info('BuildStatus: Running %s.', full_build_script) - ExecuteScript(full_build_script) - except: - logging.error('BuildFailed: Cannot run %s.', full_build_script) - - -if __name__ == '__main__': - logger = logging.getLogger() - logger.setLevel(logging.DEBUG) - stdout = logging.StreamHandler(sys.stdout) - stdout.setLevel(logging.DEBUG) - logger.addHandler(stdout) - Bootstrap() diff --git a/daisy_workflows/image_build/linux_common/utils.py b/daisy_workflows/image_build/linux_common/utils.py deleted file mode 100644 index 6c20a8e392..0000000000 --- a/daisy_workflows/image_build/linux_common/utils.py +++ /dev/null @@ -1,144 +0,0 @@ -#!/usr/bin/python -# Copyright 2017 Google Inc. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Utility functions for all VM scripts.""" - -import logging -import os -import stat -import subprocess -import sys -import urllib2 - - -def Fail(message): - logging.error('BuildFailed: %s', message) - - -def Status(message): - logging.info('BuildStatus: %s', message) - - -def Success(message): - logging.info('BuildSuccess: %s', message) - - -def YumInstall(package_list): - if YumInstall.first_run: - Execute(['yum', 'update']) - YumInstall.first_run = False - Execute(['yum', '-y', 'install'] + package_list) -YumInstall.first_run = True - - -def AptGetInstall(package_list): - if AptGetInstall.first_run: - try: - Execute(['apt-get', 'update']) - except subprocess.CalledProcessError as error: - Status('Apt update failed, trying again: %s' % error) - Execute(['apt-get', 'update'], raise_errors=False) - AptGetInstall.first_run = False - - env = os.environ.copy() - env['DEBIAN_FRONTEND'] = 'noninteractive' - return Execute(['apt-get', '-q', '-y', 'install'] + package_list, env=env) -AptGetInstall.first_run = True - - -def PipInstall(package_list): - """Install Python modules via pip. Assumes pip is already installed.""" - return Execute(['pip', 'install', '-U'] + package_list) - - -def Gsutil(params): - """Call gsutil.""" - env = os.environ.copy() - return Execute(['gsutil', '-m'] + params, capture_output=True, env=env) - - -def Execute(cmd, cwd=None, capture_output=False, env=None, raise_errors=True): - """Execute an external command (wrapper for Python subprocess).""" - Status('Executing command: %s' % str(cmd)) - returncode = 0 - output = None - try: - if capture_output: - output = subprocess.check_output(cmd, cwd=cwd, env=env) - else: - subprocess.check_call(cmd, cwd=cwd, env=env) - except subprocess.CalledProcessError as e: - if raise_errors: - raise - else: - returncode = e.returncode - output = e.output - Status('Command returned error status %d' % returncode) - if output: - Status(output) - return returncode, output, None - - -def HttpGet(url, headers=None): - request = urllib2.Request(url) - if headers: - for key in headers.keys(): - request.add_unredirected_header(key, headers[key]) - return urllib2.urlopen(request).read() - - -def GetMetadataParam(name, default_value=None, raise_on_not_found=False): - try: - url = 'http://metadata.google.internal/computeMetadata/v1/instance/attributes/%s' % name - return HttpGet(url, headers={'Metadata-Flavor': 'Google'}) - except urllib2.HTTPError: - if raise_on_not_found: - raise ValueError('Metadata key "%s" not found' % name) - else: - return default_value - - -def GetMetadataParamBool(name, default_value): - value = GetMetadataParam(name, default_value) - if not value: - return False - return True if value.lower() == 'yes' else False - - -def MakeExecutable(file_path): - os.chmod(file_path, os.stat(file_path).st_mode | stat.S_IEXEC) - - -def ReadFile(file_path, strip=False): - content = open(file_path).read() - if strip: - return content.strip() - return content - - -def WriteFile(file_path, content, mode='w'): - with open(file_path, mode) as fp: - fp.write(content) - - -def SetupLogging(): - logger = logging.getLogger() - logger.setLevel(logging.DEBUG) - stdout = logging.StreamHandler(sys.stdout) - stdout.setLevel(logging.DEBUG) - logger.addHandler(stdout) - - -SetupLogging() diff --git a/daisy_workflows/image_import/debian/translate_debian.wf.json b/daisy_workflows/image_import/debian/translate_debian.wf.json index 60ab772dca..e369c50d5a 100644 --- a/daisy_workflows/image_import/debian/translate_debian.wf.json +++ b/daisy_workflows/image_import/debian/translate_debian.wf.json @@ -16,8 +16,8 @@ }, "Sources": { "import_files/translate.py": "./translate.py", - "import_files/utils.py": "../linux_common/utils.py", - "startup_script": "../linux_common/bootstrap.py" + "import_files/utils.py": "../../linux_common/utils.py", + "startup_script": "../../linux_common/bootstrap.py" }, "Steps": { "setup-disk": { @@ -40,8 +40,9 @@ ], "MachineType": "n1-standard-2", "Metadata": { - "import_files_gcs_dir": "${SOURCESPATH}/import_files", - "import_script": "translate.py", + "files_gcs_dir": "${SOURCESPATH}/import_files", + "script": "translate.py", + "prefix": "Translate", "debian_release": "${debian_release}", "install_gce_packages": "${install_gce_packages}" }, diff --git a/daisy_workflows/image_import/enterprise_linux/translate_el.wf.json b/daisy_workflows/image_import/enterprise_linux/translate_el.wf.json index 47c252b300..10f19d2523 100644 --- a/daisy_workflows/image_import/enterprise_linux/translate_el.wf.json +++ b/daisy_workflows/image_import/enterprise_linux/translate_el.wf.json @@ -24,8 +24,8 @@ }, "Sources": { "import_files/translate.py": "./translate.py", - "import_files/utils.py": "../linux_common/utils.py", - "startup_script": "../linux_common/bootstrap.py" + "import_files/utils.py": "../../linux_common/utils.py", + "startup_script": "../../linux_common/bootstrap.py" }, "Steps": { "translate-disk-inst": { @@ -38,8 +38,9 @@ ], "MachineType": "n1-standard-2", "Metadata": { - "import_files_gcs_dir": "${SOURCESPATH}/import_files", - "import_script": "translate.py", + "files_gcs_dir": "${SOURCESPATH}/import_files", + "script": "translate.py", + "prefix": "Translate", "el_release": "${el_release}", "install_gce_packages": "${install_gce_packages}", "use_rhel_gce_license": "${use_rhel_gce_license}" diff --git a/daisy_workflows/image_import/linux_common/bootstrap.py b/daisy_workflows/image_import/linux_common/bootstrap.py deleted file mode 100644 index 9b79ad659e..0000000000 --- a/daisy_workflows/image_import/linux_common/bootstrap.py +++ /dev/null @@ -1,57 +0,0 @@ -#!/usr/bin/python -# Copyright 2017 Google Inc. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Bootstrapper for running a VM script. - -Args: -import-files-gcs-dir: The Cloud Storage location containing the import files. - This dir of import files must contain a import.py containing the import logic. -""" -import base64 -import logging -import os -import subprocess -import urllib2 - - -def GetMetadataAttribute(attribute): - url = 'http://metadata/computeMetadata/v1/instance/attributes/%s' % attribute - request = urllib2.Request(url) - request.add_unredirected_header('Metadata-Flavor', 'Google') - return urllib2.urlopen(request).read() - - -def Bootstrap(): - """Get import files, run import.""" - try: - logging.info('Starting bootstrap.py.') - import_gcs_dir = GetMetadataAttribute('import_files_gcs_dir') - import_script = GetMetadataAttribute('import_script') - import_dir = '/import_files' - full_import_script = os.path.join(import_dir, import_script) - subprocess.check_call(['mkdir', import_dir]) - subprocess.check_call( - ['gsutil', '-m', 'cp', '-r', os.path.join(import_gcs_dir, '*'), - import_dir]) - logging.info('Making import script %s executable.', full_import_script) - subprocess.check_call(['chmod', '+x', import_script], cwd=import_dir) - logging.info('Running %s.', full_import_script) - subprocess.check_call([full_import_script], cwd=import_dir) - except Exception as e: - logging.error('TranslateFailed: error: %s', str(e)) - -if __name__ == '__main__': - logging.basicConfig(level=logging.DEBUG) - Bootstrap() diff --git a/daisy_workflows/image_import/linux_common/utils.py b/daisy_workflows/image_import/linux_common/utils.py deleted file mode 100644 index f9dcd563bc..0000000000 --- a/daisy_workflows/image_import/linux_common/utils.py +++ /dev/null @@ -1,163 +0,0 @@ -#!/usr/bin/python -# Copyright 2017 Google Inc. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Utility functions for all VM scripts.""" - -import logging -import os -import subprocess -import sys -import trace -import urllib2 - - -def AptGetInstall(package_list): - if AptGetInstall.first_run: - try: - Execute(['apt-get', 'update']) - except subprocess.CalledProcessError as error: - logging.warning('Apt update failed, trying again: %s', error) - Execute(['apt-get', 'update'], raise_errors=False) - AptGetInstall.first_run = False - - env = os.environ.copy() - env['DEBIAN_FRONTEND'] = 'noninteractive' - return Execute(['apt-get', '-q', '-y', 'install'] + package_list, env=env) -AptGetInstall.first_run = True - - -def Execute(cmd, cwd=None, capture_output=False, env=None, raise_errors=True): - """Execute an external command (wrapper for Python subprocess).""" - logging.info('Command: %s', str(cmd)) - returncode = 0 - output = None - try: - if capture_output: - output = subprocess.check_output(cmd, cwd=cwd, env=env) - else: - subprocess.check_call(cmd, cwd=cwd, env=env) - except subprocess.CalledProcessError as e: - if raise_errors: - raise - else: - returncode = e.returncode - output = e.output - logging.exception('Command returned error status %d', returncode) - if output: - logging.info(output) - return returncode, output, None - - -def HttpGet(url, headers=None): - request = urllib2.Request(url) - if headers: - for key in headers.keys(): - request.add_unredirected_header(key, headers[key]) - return urllib2.urlopen(request).read() - - -def GetMetadataParam(name, default_value=None, raise_on_not_found=False): - try: - url = 'http://metadata/computeMetadata/v1/instance/attributes/%s' % name - return HttpGet(url, headers={'Metadata-Flavor': 'Google'}) - except urllib2.HTTPError: - if raise_on_not_found: - raise ValueError('Metadata key "%s" not found' % name) - else: - return default_value - - -def MountDisk(disk): - # Note: guestfs is not imported in the beginning of the file as it might not be - # installed when this module is loaded - import guestfs - - # All new Python code should pass python_return_dict=True - # to the constructor. It indicates that your program wants - # to receive Python dicts for methods in the API that return - # hashtables. - g = guestfs.GuestFS(python_return_dict=True) - # Set the product name as cloud-init checks it to confirm this is a VM in GCE - g.config('-smbios', 'type=1,product=Google Compute Engine') - g.set_verbose(1) - g.set_trace(1) - - g.set_memsize(4096) - - # Enable network - g.set_network(True) - - # Attach the disk image to libguestfs. - g.add_drive_opts(disk) - - # Run the libguestfs back-end. - g.launch() - - # Ask libguestfs to inspect for operating systems. - roots = g.inspect_os() - if len(roots) == 0: - raise Exception('inspect_vm: no operating systems found') - - # Sort keys by length, shortest first, so that we end up - # mounting the filesystems in the correct order. - mps = g.inspect_get_mountpoints(roots[0]) - def compare(a, b): - return len(a) - len(b) - - for device in sorted(mps.keys(), compare): - try: - g.mount(mps[device], device) - except RuntimeError as msg: - logging.warn('%s (ignored)' % msg) - - return g - - -def UnmountDisk(g): - try: - g.umount_all() - except Exception as e: - logging.debug(str(e)) - logging.warn('Unmount failed. Continuing anyway.') - - -def CommonRoutines(g): - # Remove udev file to force it to be re-generated - logging.info("Removing udev 70-persistent-net.rules.") - g.rm_f('/etc/udev/rules.d/70-persistent-net.rules') - - # Remove SSH host keys. - logging.info("Removing SSH host keys.") - g.sh("rm -f /etc/ssh/ssh_host_*") - - -def RunTranslate(translate_func): - try: - tracer = trace.Trace( - ignoredirs=[sys.prefix, sys.exec_prefix], trace=1, count=0) - tracer.runfunc(translate_func) - logging.info('TranslateSuccess: Translation finished.') - except Exception as e: - logging.error('TranslateFailed: error: %s', str(e)) - - -def SetupLogging(): - logging_level = logging.DEBUG - logging.basicConfig(level=logging_level) - console = logging.StreamHandler() - console.setLevel(logging_level) - logging.getLogger().addHandler(console) - -SetupLogging() diff --git a/daisy_workflows/image_import/ubuntu/translate_ubuntu.wf.json b/daisy_workflows/image_import/ubuntu/translate_ubuntu.wf.json index 70ca2e363b..311ff4fdb0 100644 --- a/daisy_workflows/image_import/ubuntu/translate_ubuntu.wf.json +++ b/daisy_workflows/image_import/ubuntu/translate_ubuntu.wf.json @@ -16,8 +16,8 @@ }, "Sources": { "import_files/translate.py": "./translate.py", - "import_files/utils.py": "../linux_common/utils.py", - "startup_script": "../linux_common/bootstrap.py" + "import_files/utils.py": "../../linux_common/utils.py", + "startup_script": "../../linux_common/bootstrap.py" }, "Steps": { "setup-disk": { @@ -40,8 +40,9 @@ ], "MachineType": "n1-standard-2", "Metadata": { - "import_files_gcs_dir": "${SOURCESPATH}/import_files", - "import_script": "translate.py", + "files_gcs_dir": "${SOURCESPATH}/import_files", + "script": "translate.py", + "prefix": "Translate", "ubuntu_release": "${ubuntu_release}", "install_gce_packages": "${install_gce_packages}" }, diff --git a/daisy_workflows/image_test/configuration/configuration.wf.json b/daisy_workflows/image_test/configuration/configuration.wf.json index 3b8a975493..c8d2ae2653 100644 --- a/daisy_workflows/image_test/configuration/configuration.wf.json +++ b/daisy_workflows/image_test/configuration/configuration.wf.json @@ -4,17 +4,10 @@ "source_image": {"Required": true, "Description": "Image to be tested"} }, "Sources": { - "bootstrap": "../linux_common/bootstrap.py", - "test_files/centos.py": "./centos.py", - "test_files/configuration-test.py": "./configuration-test.py", - "test_files/debian.py": "./debian.py", - "test_files/freebsd.py": "./freebsd.py", - "test_files/generic_distro.py": "./generic_distro.py", - "test_files/redhat.py": "./redhat.py", - "test_files/suse.py": "./suse.py", + "bootstrap": "../../linux_common/bootstrap.py", + "test_files/": "./", "test_files/test.py": "./configuration-test.py", - "test_files/ubuntu.py": "./ubuntu.py", - "test_files/utils.py": "../linux_common/utils.py" + "test_files/utils.py": "../../linux_common/utils.py" }, "Steps": { "create-disk": { @@ -35,8 +28,9 @@ "StartupScript": "bootstrap", "Metadata": { "instance_name": "inst-configuration-${DATETIME}-${ID}", - "test_files_gcs_dir": "${SOURCESPATH}/test_files", - "test_script": "test.py", + "files_gcs_dir": "${SOURCESPATH}/test_files", + "script": "test.py", + "prefix": "Test", "zone": "${ZONE}", "project": "${PROJECT}" }, diff --git a/daisy_workflows/image_test/disk/disk.wf.json b/daisy_workflows/image_test/disk/disk.wf.json index 43de65dbf9..2912a0a84c 100644 --- a/daisy_workflows/image_test/disk/disk.wf.json +++ b/daisy_workflows/image_test/disk/disk.wf.json @@ -5,8 +5,8 @@ }, "Sources": { "test_files/test.py": "./disk-tester.py", - "test_files/utils.py": "../linux_common/utils.py", - "startup_tester": "../linux_common/bootstrap.py", + "test_files/utils.py": "../../linux_common/utils.py", + "startup_tester": "../../linux_common/bootstrap.py", "disk-testee.sh": "./disk-testee.sh", "disk-local-ssd.sh": "./disk-local-ssd.sh" }, @@ -43,8 +43,9 @@ "Disks": [{"Source": "disk-tester"}], "StartupScript": "startup_tester", "metadata": { - "test_files_gcs_dir": "${SOURCESPATH}/test_files", - "test_script": "test.py", + "files_gcs_dir": "${SOURCESPATH}/test_files", + "script": "test.py", + "prefix": "Test", "testee": "inst-disk-testee-${DATETIME}-${ID}", "testee_disk": "disk-testee-${DATETIME}-${ID}", "testee_disk_removable": "disk-testee-removable-${DATETIME}-${ID}", diff --git a/daisy_workflows/image_test/metadata-ssh/metadata-ssh.wf.json b/daisy_workflows/image_test/metadata-ssh/metadata-ssh.wf.json index 32e15dab51..a2611357ee 100644 --- a/daisy_workflows/image_test/metadata-ssh/metadata-ssh.wf.json +++ b/daisy_workflows/image_test/metadata-ssh/metadata-ssh.wf.json @@ -5,8 +5,8 @@ }, "Sources": { "test_files/test.py": "./metadata-ssh-tester.py", - "test_files/utils.py": "../linux_common/utils.py", - "startup_tester": "../linux_common/bootstrap.py" + "test_files/utils.py": "../../linux_common/utils.py", + "startup_tester": "../../linux_common/bootstrap.py" }, "Steps": { "create-disk-tester": { @@ -34,8 +34,9 @@ "Disks": [{"Source": "disk-tester"}], "StartupScript": "startup_tester", "metadata": { - "test_files_gcs_dir": "${SOURCESPATH}/test_files", - "test_script": "test.py", + "files_gcs_dir": "${SOURCESPATH}/test_files", + "script": "test.py", + "prefix": "Test", "testee": "inst-metadata-ssh-testee-${DATETIME}-${ID}", "debian_install_google_api_python_client": "yes", "zone": "${ZONE}", diff --git a/daisy_workflows/image_test/oslogin-ssh/oslogin-ssh.wf.json b/daisy_workflows/image_test/oslogin-ssh/oslogin-ssh.wf.json index f7964eb8ba..eb83e586b4 100644 --- a/daisy_workflows/image_test/oslogin-ssh/oslogin-ssh.wf.json +++ b/daisy_workflows/image_test/oslogin-ssh/oslogin-ssh.wf.json @@ -15,8 +15,8 @@ }, "Sources": { "test_files/test.py": "./oslogin-ssh-master-tester.py", - "test_files/utils.py": "../linux_common/utils.py", - "startup_master_tester": "../linux_common/bootstrap.py", + "test_files/utils.py": "../../linux_common/utils.py", + "startup_master_tester": "../../linux_common/bootstrap.py", "test_files/slave_tester.sh": "./oslogin_slave_tester.sh", "startup_slave_tester": "./oslogin_slave_tester_startup.sh" }, @@ -64,8 +64,9 @@ "Disks": [{"Source": "disk-master-tester"}], "StartupScript": "startup_master_tester", "metadata": { - "test_files_gcs_dir": "${SOURCESPATH}/test_files", - "test_script": "test.py", + "files_gcs_dir": "${SOURCESPATH}/test_files", + "script": "test.py", + "prefix": "Test", "testee": "inst-oslogin-ssh-testee-${DATETIME}-${ID}", "osLoginTester": "inst-oslogin-ssh-tester-${DATETIME}-${ID}", "osAdminLoginTester": "inst-osadminlogin-ssh-tester-${DATETIME}-${ID}", diff --git a/daisy_workflows/image_test/linux_common/bootstrap.py b/daisy_workflows/linux_common/bootstrap.py similarity index 55% rename from daisy_workflows/image_test/linux_common/bootstrap.py rename to daisy_workflows/linux_common/bootstrap.py index dfe6a9bf86..0da7463a1b 100644 --- a/daisy_workflows/image_test/linux_common/bootstrap.py +++ b/daisy_workflows/linux_common/bootstrap.py @@ -16,8 +16,10 @@ """Bootstrapper for running a VM script. Args: -test-files-gcs-dir: The Cloud Storage location containing the test files. - This dir of test files must contain a test.py containing the test logic. + files_gcs_dir: The Cloud Storage location containing the files. + This dir will be used to run the 'script' requested by Metadata. + script: The main script to be run + prefix: a string prefix for outputing status """ import logging import os @@ -25,15 +27,18 @@ import urllib2 +DIR = '/files' + + def GetMetadataAttribute(attribute): - url = 'http://metadata/computeMetadata/v1/instance/attributes/%s' % attribute + url = 'http://metadata.google.internal/computeMetadata/v1/instance/attributes/%s' % attribute request = urllib2.Request(url) request.add_unredirected_header('Metadata-Flavor', 'Google') return urllib2.urlopen(request).read() -def DebianInstallGoogleApiPythonClient(): - logging.info('Installing google-api-python-client') +def DebianInstallGoogleApiPythonClient(prefix): + logging.info('%sStatus: Installing google-api-python-client', prefix) subprocess.check_call(['apt-get', 'update']) env = os.environ.copy() env['DEBIAN_FRONTEND'] = 'noninteractive' @@ -45,31 +50,33 @@ def DebianInstallGoogleApiPythonClient(): def Bootstrap(): - """Get test files, run test.""" + """Get files, run. + """ try: - logging.info('Starting bootstrap.py.') + prefix = GetMetadataAttribute('prefix') + status = prefix + 'Status' + logging.info('%s: Starting bootstrap.py.', status) # Optional flag try: if GetMetadataAttribute('debian_install_google_api_python_client'): - DebianInstallGoogleApiPythonClient() + DebianInstallGoogleApiPythonClient(prefix) except urllib2.HTTPError: pass - test_gcs_dir = GetMetadataAttribute('test_files_gcs_dir') - test_script = GetMetadataAttribute('test_script') - test_dir = '/test_files' - full_test_script = os.path.join(test_dir, test_script) - subprocess.check_call(['mkdir', test_dir]) + gcs_dir = GetMetadataAttribute('files_gcs_dir') + script = GetMetadataAttribute('script') + full_script = os.path.join(DIR, script) + subprocess.check_call(['mkdir', DIR]) subprocess.check_call( - ['gsutil', '-m', 'cp', '-r', os.path.join(test_gcs_dir, '*'), - test_dir]) - logging.info('Making test script %s executable.', full_test_script) - subprocess.check_call(['chmod', '+x', test_script], cwd=test_dir) - logging.info('Running %s.', full_test_script) - subprocess.check_call([full_test_script], cwd=test_dir) + ['gsutil', '-m', 'cp', '-r', os.path.join(gcs_dir, '*'), DIR]) + logging.info('%s: Making script %s executable.', status, full_script) + subprocess.check_call(['chmod', '+x', script], cwd=DIR) + logging.info('%s: Running %s.', status, full_script) + subprocess.check_call([full_script], cwd=DIR) except Exception as e: - print('TestFailed: error: ' + str(e)) + fail = prefix + 'Failed' + print('%s: error: %s' % (fail, str(e))) if __name__ == '__main__': diff --git a/daisy_workflows/image_test/linux_common/utils.py b/daisy_workflows/linux_common/utils.py similarity index 74% rename from daisy_workflows/image_test/linux_common/utils.py rename to daisy_workflows/linux_common/utils.py index d580a17df0..f1c9cc6226 100644 --- a/daisy_workflows/image_test/linux_common/utils.py +++ b/daisy_workflows/linux_common/utils.py @@ -19,6 +19,7 @@ import logging import os import re +import stat import subprocess import sys import time @@ -28,12 +29,52 @@ import uuid +def GetPrefix(): + if GetPrefix.first_run: + GetPrefix.prefix = GetMetadataParam('prefix') + GetPrefix.first_run = False + return GetPrefix.prefix + + +GetPrefix.first_run = True + + +def LogFail(*args, **kwargs): + logging.error('%sFailed: %s', GetPrefix(), *args, **kwargs) + + +def LogStatus(*args, **kwargs): + logging.info('%sStatus: %s', GetPrefix(), *args, **kwargs) + + +def LogWarn(*args, **kwargs): + logging.warn('%sWarn: %s', GetPrefix(), *args, **kwargs) + + +def LogDebug(*args, **kwargs): + logging.debug('%sDebug: %s', GetPrefix(), *args, **kwargs) + + +def LogSuccess(*args, **kwargs): + logging.info('%sSuccess: %s', GetPrefix(), *args, **kwargs) + + +def YumInstall(package_list): + if YumInstall.first_run: + Execute(['yum', 'update']) + YumInstall.first_run = False + Execute(['yum', '-y', 'install'] + package_list) + + +YumInstall.first_run = True + + def AptGetInstall(package_list): if AptGetInstall.first_run: try: Execute(['apt-get', 'update']) except subprocess.CalledProcessError as error: - logging.warning('Apt update failed, trying again: %s', error) + LogStatus('Apt update failed, trying again: %s' % error) Execute(['apt-get', 'update'], raise_errors=False) AptGetInstall.first_run = False @@ -45,9 +86,20 @@ def AptGetInstall(package_list): AptGetInstall.first_run = True +def PipInstall(package_list): + """Install Python modules via pip. Assumes pip is already installed.""" + return Execute(['pip', 'install', '-U'] + package_list) + + +def Gsutil(params): + """Call gsutil.""" + env = os.environ.copy() + return Execute(['gsutil', '-m'] + params, capture_output=True, env=env) + + def Execute(cmd, cwd=None, capture_output=False, env=None, raise_errors=True): """Execute an external command (wrapper for Python subprocess).""" - logging.info('Command: %s', str(cmd)) + LogStatus('Executing command: %s' % str(cmd)) stdout = subprocess.PIPE if capture_output else None p = subprocess.Popen(cmd, cwd=cwd, env=env, stdout=stdout) output = p.communicate()[0] @@ -57,9 +109,9 @@ def Execute(cmd, cwd=None, capture_output=False, env=None, raise_errors=True): if raise_errors: raise subprocess.CalledProcessError(returncode, cmd) else: - logging.exception('Command returned error status %d', returncode) + LogStatus('Command returned error status %d' % returncode) if output: - logging.info(output) + LogStatus(output) return returncode, output @@ -71,6 +123,115 @@ def HttpGet(url, headers=None): return urllib2.urlopen(request).read() +def GetMetadataParam(name, default_value=None, raise_on_not_found=False): + try: + url = 'http://metadata.google.internal/computeMetadata/v1/instance/attributes/%s' % name + return HttpGet(url, headers={'Metadata-Flavor': 'Google'}) + except urllib2.HTTPError: + if raise_on_not_found: + raise ValueError('Metadata key "%s" not found' % name) + else: + return default_value + + +def MountDisk(disk): + # Note: guestfs is not imported in the beginning of the file as it might not + # be installed when this module is loaded + import guestfs + + # All new Python code should pass python_return_dict=True + # to the constructor. It indicates that your program wants + # to receive Python dicts for methods in the API that return + # hashtables. + g = guestfs.GuestFS(python_return_dict=True) + # Set the product name as cloud-init checks it to confirm this is a VM in GCE + g.config('-smbios', 'type=1,product=Google Compute Engine') + g.set_verbose(1) + g.set_trace(1) + + g.set_memsize(4096) + + # Enable network + g.set_network(True) + + # Attach the disk image to libguestfs. + g.add_drive_opts(disk) + + # Run the libguestfs back-end. + g.launch() + + # Ask libguestfs to inspect for operating systems. + roots = g.inspect_os() + if len(roots) == 0: + raise Exception('inspect_vm: no operating systems found') + + # Sort keys by length, shortest first, so that we end up + # mounting the filesystems in the correct order. + mps = g.inspect_get_mountpoints(roots[0]) + + def compare(a, b): + return len(a) - len(b) + + for device in sorted(mps.keys(), compare): + try: + g.mount(mps[device], device) + except RuntimeError as msg: + LogWarn('%s (ignored)' % msg) + + return g + + +def UnmountDisk(g): + try: + g.umount_all() + except Exception as e: + LogDebug(str(e)) + LogWarn('Unmount failed. Continuing anyway.') + + +def CommonRoutines(g): + # Remove udev file to force it to be re-generated + LogStatus("Removing udev 70-persistent-net.rules.") + g.rm_f('/etc/udev/rules.d/70-persistent-net.rules') + + # Remove SSH host keys. + LogStatus("Removing SSH host keys.") + g.sh("rm -f /etc/ssh/ssh_host_*") + + +def RunTranslate(translate_func): + try: + tracer = trace.Trace( + ignoredirs=[sys.prefix, sys.exec_prefix], trace=1, count=0) + tracer.runfunc(translate_func) + LogSuccess('Translation finished.') + except Exception as e: + LogFail('error: %s', str(e)) + + +def GetMetadataParamBool(name, default_value): + value = GetMetadataParam(name, default_value) + if not value: + return False + return True if value.lower() == 'yes' else False + + +def MakeExecutable(file_path): + os.chmod(file_path, os.stat(file_path).st_mode | stat.S_IEXEC) + + +def ReadFile(file_path, strip=False): + content = open(file_path).read() + if strip: + return content.strip() + return content + + +def WriteFile(file_path, content, mode='w'): + with open(file_path, mode) as fp: + fp.write(content) + + def GenSshKey(user): """Generate ssh key for user. @@ -103,14 +264,14 @@ def Wrapper(*args, **kwargs): try: response = func(*args, **kwargs) except Exception as e: - logging.info(str(e)) - logging.info( + LogStatus(str(e)) + LogStatus( 'Function %s failed, waiting %d seconds, retrying %d ...', str(func), wait, ntries) time.sleep(wait) wait = wait * ratio else: - logging.info( + LogStatus( 'Function %s executed in less then %d sec, with %d tentative(s)', str(func), time.time() - start_time, ntries) return response @@ -164,7 +325,7 @@ def GetCompute(discovery, credentials): def RunTest(test_func): - """Run main test function and print TestSuccess or TestFailed. + """Run main test function and print LogSuccess() or LogFail(). Args: test_func: function, the function to be tested. @@ -173,19 +334,19 @@ def RunTest(test_func): tracer = trace.Trace( ignoredirs=[sys.prefix, sys.exec_prefix], trace=1, count=0) tracer.runfunc(test_func) - print('TestSuccess: Test finished.') + LogSuccess('Test finished.') except Exception as e: - print('TestFailed: error: ' + str(e)) + LogFail('error: ' + str(e)) traceback.print_exc() def SetupLogging(): """Configure Logging system.""" - logging_level = logging.DEBUG - logging.basicConfig(level=logging_level) - console = logging.StreamHandler() - console.setLevel(logging_level) - logging.getLogger().addHandler(console) + logger = logging.getLogger() + logger.setLevel(logging.DEBUG) + stdout = logging.StreamHandler(sys.stdout) + stdout.setLevel(logging.DEBUG) + logger.addHandler(stdout) SetupLogging()