diff --git a/.gitignore b/.gitignore
new file mode 100644
index 00000000..a6a64a60
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,3 @@
+.eggs/
+thoth_common.egg-info/
+
diff --git a/Pipfile b/Pipfile
index 4dd40f70..a9ebc909 100644
--- a/Pipfile
+++ b/Pipfile
@@ -6,6 +6,7 @@ name = "pypi"
[packages]
"rfc5424-logging-handler" = "*"
daiquiri = "*"
+requests = "*"
[dev-packages]
pytest = "*"
diff --git a/Pipfile.lock b/Pipfile.lock
index d833d234..fbc7f8ce 100644
--- a/Pipfile.lock
+++ b/Pipfile.lock
@@ -1,7 +1,7 @@
{
"_meta": {
"hash": {
- "sha256": "0ef11bbb1e7c911848ece01156edd2fa804b6f7d8bd549e0f63a8eab58ead402"
+ "sha256": "c377132881d98ee3a5feab3bf0ffd657b8805897f62aba73c262258f190bae06"
},
"pipfile-spec": 6,
"requires": {},
@@ -14,6 +14,20 @@
]
},
"default": {
+ "certifi": {
+ "hashes": [
+ "sha256:13e698f54293db9f89122b0581843a782ad0934a4fe0172d2a980ba77fc61bb7",
+ "sha256:9fa520c1bacfb634fa7af20a76bcbd3d5fb390481724c597da32c719a7dca4b0"
+ ],
+ "version": "==2018.4.16"
+ },
+ "chardet": {
+ "hashes": [
+ "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae",
+ "sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691"
+ ],
+ "version": "==3.0.4"
+ },
"daiquiri": {
"hashes": [
"sha256:8832f28e110165b905993b4bdab638a3c245f5671d5976f226f2628e7d2e7862",
@@ -22,6 +36,13 @@
"index": "pypi",
"version": "==1.5.0"
},
+ "idna": {
+ "hashes": [
+ "sha256:156a6814fb5ac1fc6850fb002e0852d56c0c8d2531923a51032d1b70760e186e",
+ "sha256:684a38a6f903c1d71d6d5fac066b58d7768af4de2b832e426ec79c30daa94a16"
+ ],
+ "version": "==2.7"
+ },
"pytz": {
"hashes": [
"sha256:a061aa0a9e06881eb8b3b2b43f05b9439d6583c206d0a6c340ff72a7b6669053",
@@ -29,6 +50,14 @@
],
"version": "==2018.5"
},
+ "requests": {
+ "hashes": [
+ "sha256:63b52e3c866428a224f97cab011de738c36aec0185aa91cfacd418b5d58911d1",
+ "sha256:ec22d826a36ed72a7358ff3fe56cbd4ba69dd7a6718ffd450ff0e9df7a47ce6a"
+ ],
+ "index": "pypi",
+ "version": "==2.19.1"
+ },
"rfc5424-logging-handler": {
"hashes": [
"sha256:0fa181ba9ef4b517c938c90608f4c3a5c1abf3ea29250f0df294bef439ba2c9e",
@@ -42,6 +71,14 @@
"sha256:4ebeb848845ac898da6519b9b31879cf13b6626f7184c496037b818e238f2c4e"
],
"version": "==1.5.1"
+ },
+ "urllib3": {
+ "hashes": [
+ "sha256:a68ac5e15e76e7e5dd2b8f94007233e01effe3e50e8daddf69acfd81cb686baf",
+ "sha256:b5725a0bd4ba422ab0e66e89e030c806576753ea3ee08554382c14e685d117b5"
+ ],
+ "markers": "python_version >= '2.6' and python_version != '3.2.*' and python_version != '3.3.*' and python_version != '3.0.*' and python_version < '4' and python_version != '3.1.*'",
+ "version": "==1.23"
}
},
"develop": {
@@ -149,7 +186,7 @@
"sha256:e05cb4d9aad6233d67e0541caa7e511fa4047ed7750ec2510d466e806e0255d6",
"sha256:f3f501f345f24383c0000395b26b726e46758b71393267aeae0bd36f8b3ade80"
],
- "markers": "python_version < '4' and python_version != '3.1.*' and python_version >= '2.6' and python_version != '3.0.*' and python_version != '3.2.*'",
+ "markers": "python_version >= '2.6' and python_version != '3.2.*' and python_version < '4' and python_version != '3.0.*' and python_version != '3.1.*'",
"version": "==4.5.1"
},
"dependency-management": {
@@ -172,7 +209,7 @@
"sha256:b9c40e9750f3d77e6e4d441d8b0266cf555e7cdabdcff33c4fd06366ca761ef8",
"sha256:ec9ef8f4a9bc6f71eec99e1806bfa2de401650d996c59330782b89a5555c1497"
],
- "markers": "python_version >= '2.7' and python_version != '3.1.*' and python_version != '3.2.*' and python_version != '3.0.*' and python_version != '3.3.*'",
+ "markers": "python_version != '3.2.*' and python_version != '3.3.*' and python_version != '3.0.*' and python_version != '3.1.*' and python_version >= '2.7'",
"version": "==4.3.4"
},
"lazy-object-proxy": {
@@ -235,7 +272,7 @@
"sha256:6e3836e39f4d36ae72840833db137f7b7d35105079aee6ec4a62d9f80d594dd1",
"sha256:95eb8364a4708392bae89035f45341871286a333f749c3141c20573d2b3876e1"
],
- "markers": "python_version != '3.3.*' and python_version != '3.1.*' and python_version >= '2.7' and python_version != '3.0.*' and python_version != '3.2.*'",
+ "markers": "python_version != '3.2.*' and python_version != '3.3.*' and python_version != '3.0.*' and python_version != '3.1.*' and python_version >= '2.7'",
"version": "==0.7.1"
},
"py": {
@@ -243,7 +280,7 @@
"sha256:3fd59af7435864e1a243790d322d763925431213b6b8529c6ca71081ace3bbf7",
"sha256:e31fb2767eb657cbde86c454f02e99cb846d3cd9d61b318525140214fdc0e98e"
],
- "markers": "python_version != '3.3.*' and python_version != '3.1.*' and python_version >= '2.7' and python_version != '3.0.*' and python_version != '3.2.*'",
+ "markers": "python_version != '3.2.*' and python_version != '3.3.*' and python_version != '3.0.*' and python_version != '3.1.*' and python_version >= '2.7'",
"version": "==1.5.4"
},
"pycodestyle": {
@@ -341,6 +378,7 @@
"sha256:63b52e3c866428a224f97cab011de738c36aec0185aa91cfacd418b5d58911d1",
"sha256:ec22d826a36ed72a7358ff3fe56cbd4ba69dd7a6718ffd450ff0e9df7a47ce6a"
],
+ "index": "pypi",
"version": "==2.19.1"
},
"sarge": {
@@ -417,7 +455,7 @@
"sha256:a68ac5e15e76e7e5dd2b8f94007233e01effe3e50e8daddf69acfd81cb686baf",
"sha256:b5725a0bd4ba422ab0e66e89e030c806576753ea3ee08554382c14e685d117b5"
],
- "markers": "python_version >= '2.6' and python_version != '3.2.*' and python_version < '4' and python_version != '3.0.*' and python_version != '3.3.*' and python_version != '3.1.*'",
+ "markers": "python_version >= '2.6' and python_version != '3.2.*' and python_version != '3.3.*' and python_version != '3.0.*' and python_version < '4' and python_version != '3.1.*'",
"version": "==1.23"
},
"wrapt": {
diff --git a/requirements.txt b/requirements.txt
index b9c90ccc..2b0f73d4 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -1,2 +1,3 @@
rfc5424-logging-handler
daiquiri
+requests
diff --git a/setup.py b/setup.py
index 48271695..39e6cd37 100644
--- a/setup.py
+++ b/setup.py
@@ -48,6 +48,9 @@ def get_version():
packages=[
'thoth.common',
],
+ extras_require={
+ 'openshift': ['openshift', 'kubernetes']
+ },
zip_safe=False,
install_requires=get_install_requires()
)
diff --git a/thoth/common/__init__.py b/thoth/common/__init__.py
index 76c87e44..ac913af5 100644
--- a/thoth/common/__init__.py
+++ b/thoth/common/__init__.py
@@ -26,6 +26,7 @@
from .json import SafeJSONEncoder
from .logging import init_logging
from .logging import logger_setup
+from .openshift import OpenShift
__name__ = 'thoth-common'
__version__ = "0.2.2"
diff --git a/thoth/common/exceptions.py b/thoth/common/exceptions.py
new file mode 100644
index 00000000..424bc6a5
--- /dev/null
+++ b/thoth/common/exceptions.py
@@ -0,0 +1,29 @@
+# thoth-common
+# Copyright(C) 2018 Fridolin Pokorny
+#
+# This program is free software: you can redistribute it and / or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see .
+
+"""Exceptions used within thoth-common package."""
+
+
+class ThothCommonException(Exception):
+ """A base class for Thoth-common exception hierarchy."""
+
+
+class NotFoundException(ThothCommonException):
+ """Raised if the given resource cannot be found."""
+
+
+class ConfigurationError(ThothCommonException):
+ """Raised on miss-configuration issues."""
diff --git a/thoth/common/logging.py b/thoth/common/logging.py
index 7301c8e5..fb5a9a3a 100644
--- a/thoth/common/logging.py
+++ b/thoth/common/logging.py
@@ -38,8 +38,7 @@ def _init_log_levels(logging_configuration: dict) -> None:
}
for logger, level in env_logging_conf.items():
- logger = 'thoth.' + \
- logger[len(_LOGGING_CONF_START):].lower().replace('__', '.')
+ logger = 'thoth.' + logger[len(_LOGGING_CONF_START):].lower().replace('__', '.')
level = getattr(logging, level)
logging.getLogger(logger).setLevel(level)
diff --git a/thoth/common/openshift.py b/thoth/common/openshift.py
new file mode 100644
index 00000000..557a2407
--- /dev/null
+++ b/thoth/common/openshift.py
@@ -0,0 +1,353 @@
+#!/usr/bin/env python3
+# thoth-common
+# Copyright(C) 2018 Fridolin Pokorny
+#
+# This program is free software: you can redistribute it and / or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see .
+
+"""Handling OpenShift and Kubernetes objects across project."""
+
+import logging
+import requests
+
+from .exceptions import NotFoundException
+from .exceptions import ConfigurationError
+from .helpers import get_service_account_token
+
+_LOGGER = logging.getLogger(__name__)
+
+
+class OpenShift(object):
+ """Interaction with OpenShift Master."""
+
+ def __init__(self, *,
+ frontend_namespace: str = None, middletier_namespace: str = None, backend_namespace: str = None,
+ infra_namespace: str = None, kubernetes_api_url: str = None, kubernetes_verify_tls: bool = True,
+ openshift_api_url: str = None, token: str = None):
+ """Initialize OpenShift class responsible for handling objects in deployment."""
+ try:
+ from kubernetes import client, config
+ from openshift.dynamic import DynamicClient
+ except ImportError as exc:
+ raise ImportError(
+ "Unable to import OpenShift and Kubernetes packages. Was thoth-common library "
+ "installed with openshift extras?"
+ ) from exc
+
+ # Load in-cluster configuration that is exposed by OpenShift/k8s configuration.
+ config.load_incluster_config()
+
+ self.ocp_client = DynamicClient(client.ApiClient(configuration=client.Configuration()))
+ self.frontend_namespace = frontend_namespace
+ self.middletier_namespace = middletier_namespace
+ self.backend_namespace = backend_namespace
+ self.infra_namespace = infra_namespace
+ self.kubernetes_api_url = kubernetes_api_url or 'https://kubernetes.default.svc.cluster.local'
+ self.kubernetes_verify_tls = kubernetes_verify_tls
+ self.openshift_api_url = openshift_api_url or 'https://openshift.default.svc.cluster.local'
+ self._token = token
+
+ @property
+ def token(self):
+ """Access service account token mounted to the pod."""
+ if self._token is None:
+ self._token = get_service_account_token()
+
+ return self._token
+
+ @staticmethod
+ def _set_env_var(template: dict, **env_var):
+ """Set environment in the given template."""
+ for env_var_name, env_var_value in env_var.items():
+ for entry in template['spec']['containers'][0]['env']:
+ if entry['name'] == env_var_name:
+ entry['value'] = env_var_value
+ break
+ else:
+ template['spec']['containers'][0]['env'].append(
+ {'name': env_var_name, 'value': str(env_var_value)}
+ )
+
+ @staticmethod
+ def _set_template_parameters(template: dict, **parameters: object) -> None:
+ """Set parameters in the template - replace existing ones or append to parameter list if not exist.
+
+ >>> _set_template_parameters(template, THOTH_LOG_ADVISER='DEBUG')
+ """
+ if 'parameters' not in template:
+ template['parameters'] = []
+
+ for parameter_name, parameter_value in parameters.items():
+ for entry in template['parameters']:
+ if entry['name'] == parameter_name:
+ entry['value'] = str(parameter_value)
+ break
+ else:
+ template['parameters'].append({
+ 'name': parameter_name,
+ 'value': str(parameter_value)
+ })
+
+ def run_sync(self, force_analysis_results_sync: bool = False, force_solver_results_sync: bool = False) -> str:
+ """Run graph sync, base pod definition based on job definition."""
+ # Let's reuse pod definition from the cronjob definition so any changes in
+ # deployed application work out of the box.
+ if not self.frontend_namespace:
+ raise ConfigurationError("Graph sync requires frontend namespace configuration")
+
+ _LOGGER.debug("Retrieving graph-sync CronJob definition")
+ response = self.ocp_client.resources.get(api_version='v2alpha1', kind='CronJob').get(
+ namespace=self.frontend_namespace,
+ name='graph-sync'
+ )
+ template = response.to_dict()
+ labels = template['metadata']['labels']
+ labels.pop('template', None) # remove template label
+ job_template = template['spec']['jobTemplate']['spec']['template']
+ self._set_env_var(
+ job_template,
+ THOTH_GRAPH_SYNC_FORCE_ANALYSIS_RESULTS_SYNC=int(force_analysis_results_sync),
+ THOTH_GRAPH_SYNC_FORCE_SOLVER_RESULTS_SYNC=int(force_solver_results_sync)
+ )
+
+ # Construct a Pod spec.
+ pod_template = {
+ "apiVersion": "v1",
+ "kind": "Pod",
+ "metadata": {
+ "generateName": 'graph-sync-',
+ "labels": labels
+ },
+ "spec": job_template['spec']
+ }
+
+ response = self.ocp_client.resources.get(api_version='v1', kind='Pod').create(
+ body=pod_template,
+ namespace=self.frontend_namespace
+ )
+
+ _LOGGER.debug(f"Started graph-sync pod with name {response.metadata.name}")
+ return response.metadata.name
+
+ def get_pod_log(self, pod_id: str, namespace: str) -> str:
+ """Get log of a pod based on assigned pod ID."""
+ # TODO: rewrite to OpenShift rest client once it will support it.
+ endpoint = "{}/api/v1/namespaces/{}/pods/{}/log".format(
+ self.kubernetes_api_url,
+ namespace,
+ pod_id
+ )
+
+ response = requests.get(
+ endpoint,
+ headers={
+ 'Authorization': 'Bearer {}'.format(self.token),
+ 'Content-Type': 'application/json'
+ },
+ verify=self.kubernetes_verify_tls
+ )
+ _LOGGER.debug("Kubernetes master response for pod log (%d): %r", response.status_code, response.text)
+ response.raise_for_status()
+
+ return response.text
+
+ def get_pod_status(self, pod_id: str, namespace: str) -> dict:
+ """Get status entry for a pod - this applies only for solver and package-extract pods."""
+ import openshift
+
+ try:
+ response = self.ocp_client.resources.get(api_version='v1', kind='Pod').get(
+ namespace=namespace,
+ name=pod_id
+ )
+ except openshift.dynamic.exceptions.NotFoundError as exc:
+ raise NotFoundException(f"The given pod with id {pod_id} could not be found") from exc
+
+ _LOGGER.debug("OpenShift master response for pod status (%d): %r", response.to_dict())
+ return response.to_dict()['status']['containerStatuses'][0]['state']
+
+ def get_solver_names(self) -> list:
+ """Retrieve name of solvers available in installation."""
+ if not self.infra_namespace:
+ raise ConfigurationError("Infra namespace is required in order to list solvers")
+
+ response = self.ocp_client.resources.get(api_version='v1', kind='Template').get(
+ namespace=self.infra_namespace,
+ label_selector='template=solver'
+ )
+ _LOGGER.debug("OpenShift response for getting solver template: %r", response.to_dict())
+ self._raise_on_invalid_response_size(response)
+ return [obj['metadata']['labels']['component'] for obj in response.to_dict()['items'][0]['objects']]
+
+ def run_solver(self, packages: str, output: str, debug: bool = False,
+ transitive: bool = True, solver: str = None) -> dict:
+ """Run solver or all solver to solve the given requirements."""
+ if not self.middletier_namespace:
+ ConfigurationError("Solver requires middletier namespace to be specified")
+
+ if not self.infra_namespace:
+ raise ConfigurationError("Infra namespace is required to gather solver template when running solver")
+
+ response = self.ocp_client.resources.get(api_version='v1', kind='Template').get(
+ namespace=self.infra_namespace,
+ label_selector='template=solver'
+ )
+ _LOGGER.debug("OpenShift response for getting solver template: %r", response.to_dict())
+
+ self._raise_on_invalid_response_size(response)
+ template = response.to_dict()['items'][0]
+
+ self._set_template_parameters(
+ template,
+ THOTH_SOLVER_NO_TRANSITIVE=int(not transitive),
+ THOTH_SOLVER_PACKAGES=packages.replace('\n', '\\n'),
+ THOTH_LOG_SOLVER='DEBUG' if debug else 'INFO',
+ THOTH_SOLVER_OUTPUT=output
+ )
+
+ template = self._oc_process(self.middletier_namespace, template)
+
+ solvers = {}
+ for obj in template['objects']:
+ solver_name = obj['metadata']['labels']['component']
+ if solver and solver != solver_name:
+ _LOGGER.debug(f"Skipping solver %r as the requested solver is %r", solver_name, solver)
+ continue
+
+ response = self.ocp_client.resources.get(api_version='v1', kind=obj['kind']).create(
+ body=obj,
+ namespace=self.middletier_namespace
+ )
+
+ _LOGGER.debug("Starting solver %r", solver_name)
+ _LOGGER.debug("OpenShift response for creating a pod: %r", response.to_dict())
+ solvers[solver_name] = response.metadata.name
+
+ return solvers
+
+ def run_package_extract(self, image: str, output: str, debug: bool = False,
+ registry_user: str = None, registry_password: str = None, verify_tls: bool = True) -> str:
+ """Run package-extract analyzer to extract information from the provided image."""
+ if not self.middletier_namespace:
+ raise ConfigurationError("Running package-extract requires middletier namespace to be specified")
+
+ if not self.infra_namespace:
+ raise ConfigurationError("Infra namespace is required to gather package-extract template when running it")
+
+ response = self.ocp_client.resources.get(api_version='v1', kind='Template').get(
+ namespace=self.infra_namespace,
+ label_selector='template=package-extract'
+ )
+ _LOGGER.debug("OpenShift response for getting package-extract template: %r", response.to_dict())
+ self._raise_on_invalid_response_size(response)
+ template = response.to_dict()['items'][0]
+
+ self._set_template_parameters(
+ template,
+ THOTH_LOG_PACKAGE__EXTRACT='DEBUG' if debug else 'INFO',
+ THOTH_ANALYZED_IMAGE=image,
+ THOTH_ANALYZER_NO_TLS_VERIFY=int(not verify_tls),
+ THOTH_ANALYZER_OUTPUT=output
+ )
+
+ if registry_user and registry_password:
+ self._set_template_parameters(
+ template,
+ THOTH_REGISTRY_CREDENTIALS=f"{registry_user}:{registry_password}"
+ )
+
+ template = self._oc_process(self.middletier_namespace, template)
+ analyzer = template['objects'][0]
+
+ response = self.ocp_client.resources.get(api_version='v1', kind=analyzer['kind']).create(
+ body=analyzer,
+ namespace=self.middletier_namespace
+ )
+
+ _LOGGER.debug("OpenShift response for creating a pod: %r", response.to_dict())
+ return response.metadata.name
+
+ def run_adviser(self, application_stack: dict, output: str, type: str,
+ runtime_environment: str = None, debug: bool = False) -> str:
+ """Run adviser on the provided user input."""
+ if not self.backend_namespace:
+ raise ConfigurationError("Running adviser requires backand namespace configuration")
+
+ if not self.infra_namespace:
+ raise ConfigurationError("Infra namespace is required to gather adviser template when running it")
+
+ response = self.ocp_client.resources.get(api_version='v1', kind='Template').get(
+ namespace=self.infra_namespace,
+ label_selector='template=adviser'
+ )
+ _LOGGER.debug("OpenShift response for getting adviser template: %r", response.to_dict())
+ self._raise_on_invalid_response_size(response)
+
+ template = response.to_dict()['items'][0]
+ self._set_template_parameters(
+ template,
+ THOTH_ADVISER_REQUIREMENTS=application_stack.pop('requirements').replace('\n', '\\n'),
+ THOTH_ADVISER_REQUIREMENTS_LOCKED=application_stack.get('requirements_lock', '').replace('\n', '\\n'),
+ THOTH_ADVISER_REQUIREMENTS_FORMAT=application_stack.get('requirements_formant', 'pipenv'),
+ THOTH_ADVISER_RECOMMENDATION_TYPE=type,
+ THOTH_ADVISER_RUNTIME_ENVIRONMENT=runtime_environment,
+ THOTH_ADVISER_OUTPUT=output,
+ THOTH_LOG_ADVISER='DEBUG' if debug else 'INFO'
+ )
+
+ template = self._oc_process(self.backend_namespace, template)
+ adviser = template['objects'][0]
+
+ response = self.ocp_client.resources.get(api_version='v1', kind=adviser['kind']).create(
+ body=adviser,
+ namespace=self.backend_namespace
+ )
+
+ _LOGGER.debug("OpenShift response for creating a pod: %r", response.to_dict())
+ return response.metadata.name
+
+ def _raise_on_invalid_response_size(self, response):
+ """It is expected that there is only one object type for the given item."""
+ if len(response.items) != 1:
+ raise RuntimeError(
+ f"Application misconfiguration - number of templates available in the infra namespace "
+ f"{self.infra_namespace!r} is {len(response.items)}, should be 1."
+ )
+
+ def _oc_process(self, namespace: str, template: dict) -> dict:
+ """Process the given template in OpenShift."""
+ # This does not work - see issue reported upstream:
+ # https://github.com/openshift/openshift-restclient-python/issues/190
+ # return TemplateOpenshiftIoApi().create_namespaced_processed_template_v1(namespace, template)
+ endpoint = "{}/apis/template.openshift.io/v1/namespaces/{}/processedtemplates".format(
+ self.openshift_api_url,
+ namespace
+ )
+ response = requests.post(
+ endpoint,
+ json=template,
+ headers={
+ 'Authorization': 'Bearer {}'.format(self.token),
+ 'Content-Type': 'application/json'
+ },
+ verify=self.kubernetes_verify_tls
+ )
+ _LOGGER.debug("OpenShift master response template (%d): %r", response.status_code, response.text)
+
+ try:
+ response.raise_for_status()
+ except Exception:
+ _LOGGER.error("Failed to process template: %s", response.text)
+ raise
+
+ return response.json()