Skip to content
Permalink
Browse files

Prevent users from installing conflicting packages

  • Loading branch information...
vishalbollu committed Mar 5, 2019
1 parent 6a53a8c commit edae83bb5e49788516120765f250ec3d461dffbb
@@ -37,20 +37,6 @@ def aggregate_spark(data, columns, args):
return discretizer.getSplits()
```

## Pre-installed Packages

The following packages have been pre-installed and can be used in your implementations:

```text
pyspark==2.4.0
numpy==1.15.4
pandas==0.23.4
scipy==1.2.0
sympy==1.3
statsmodels==0.9.0
python-dateutil==2.7.5
six==1.11.0
wrapt==1.11.0
```
## Customization

You can install additional PyPI packages and import your own Python packages. See [Python Packages](../advanced/python-packages.md) for more details.
You can import PyPI packages or your own Python packages to help create more complex aggregators. See [Python Packages](../advanced/python-packages.md) for more details.
@@ -46,23 +46,6 @@ def create_estimator(run_config, model_config):
)
```

## Pre-installed Packages
## Customization

The following packages have been pre-installed and can be used in your implementations:

```text
tensorflow==1.12.0
numpy==1.15.4
pandas==0.23.4
scipy==1.2.0
sympy==1.3
statsmodels==0.9.0
python-dateutil==2.7.5
six==1.11.0
wrapt==1.11.0
requests==2.21.0
oauthlib==3.0.0
httplib2==0.12.0
```

You can install additional PyPI packages and import your own Python packages. See [Python Packages](../advanced/python-packages.md) for more details.
You can import PyPI packages or your own Python packages to help create more complex models. See [Python Packages](../advanced/python-packages.md) for more details.
@@ -81,20 +81,6 @@ def reverse_transform_python(transformed_value, args):
return args["mean"] + (transformed_value * args["stddev"])
```

## Pre-installed Packages

The following packages have been pre-installed and can be used in your implementations:

```text
pyspark==2.4.0
numpy==1.15.4
pandas==0.23.4
scipy==1.2.0
sympy==1.3
statsmodels==0.9.0
python-dateutil==2.7.5
six==1.11.0
wrapt==1.11.0
```
## Customization

You can install additional PyPI packages and import your own Python packages. See [Python Packages](../advanced/python-packages.md) for more details.
You can import PyPI packages or your own Python packages to help create more complex transformers. See [Python Packages](../advanced/python-packages.md) for more details.
@@ -26,13 +26,9 @@ RUN apt-get update -qq && apt-get install -y -q \
COPY pkg/workloads/consts.py /src/
COPY pkg/workloads/lib /src/lib

COPY pkg/workloads/spark_job/requirements.txt /src/spark_job/requirements.txt
COPY pkg/workloads/tf_api/requirements.txt /src/tf_api/requirements.txt
COPY pkg/workloads/tf_train/requirements.txt /src/tf_train/requirements.txt

RUN pip3 install -r /src/lib/requirements.txt && \
pip3 install -r /src/tf_train/requirements.txt && \
pip3 install -r /src/spark_job/requirements.txt && \
pip3 install -r /src/tf_api/requirements.txt && \
rm -rf /root/.cache/pip*

@@ -36,9 +36,7 @@ RUN sed -i "/^set -ex$/c\set -e" /opt/entrypoint.sh
ENV PYTHONPATH="/src:${PYTHONPATH}"

COPY pkg/workloads/lib/requirements.txt /src/lib/requirements.txt
COPY pkg/workloads/spark_job/requirements.txt /src/spark_job/requirements.txt
RUN pip3 install -r /src/lib/requirements.txt && \
pip3 install -r /src/spark_job/requirements.txt && \
rm -rf /root/.cache/pip*

COPY pkg/workloads/consts.py /src/
@@ -3,9 +3,7 @@ FROM cortexlabs/tf-base-gpu
ENV PYTHONPATH="/src:${PYTHONPATH}"

COPY pkg/workloads/lib/requirements.txt /src/lib/requirements.txt
COPY pkg/workloads/tf_train/requirements.txt /src/tf_train/requirements.txt
RUN pip3 install -r /src/lib/requirements.txt && \
pip3 install -r /src/tf_train/requirements.txt && \
rm -rf /root/.cache/pip*

COPY pkg/workloads/consts.py /src/
@@ -3,9 +3,7 @@ FROM cortexlabs/tf-base
ENV PYTHONPATH="/src:${PYTHONPATH}"

COPY pkg/workloads/lib/requirements.txt /src/lib/requirements.txt
COPY pkg/workloads/tf_train/requirements.txt /src/tf_train/requirements.txt
RUN pip3 install -r /src/lib/requirements.txt && \
pip3 install -r /src/tf_train/requirements.txt && \
rm -rf /root/.cache/pip*

COPY pkg/workloads/consts.py /src/
@@ -92,7 +92,7 @@ func loadPythonPackages(files map[string][]byte) (context.PythonPackages, error)
id := hash.Bytes(buf.Bytes())
pythonPackage := context.PythonPackage{
ResourceConfigFields: userconfig.ResourceConfigFields{
Name: consts.RequirementsTxt,
Name: packageName,
},
ComputedResourceFields: &context.ComputedResourceFields{
ResourceFields: &context.ResourceFields{
@@ -124,7 +124,6 @@ func (client *Client) sendErrorEvent(operatorID string, err error) {

func ReportEvent(name string) {
if cc.EnableTelemetry {
fmt.Println(cc.EnableTelemetry)
go getDefaultClient().sendUsageEvent(aws.HashedAccountID, name)
}
}
@@ -15,16 +15,19 @@
import os
import sys
import argparse
import glob
from subprocess import run

from lib import util, aws
from lib.context import Context
from lib.log import get_logger
from lib.exceptions import UserException, CortexException

import requirements

logger = get_logger()

LOCAL_PACKAGE_PATH = "/src/package"
LOCAL_PACKAGE_PATH = "/packages"
WHEELHOUSE_PATH = "/wheelhouse"


@@ -35,6 +38,16 @@ def get_build_order(python_packages):
return build_order + sorted([name for name in python_packages if name != "requirements.txt"])


def get_restricted_packages():
cortex_packages = {"pyspark": "2.4.0", "tensorflow": "1.12.0"}
req_files = glob.glob("/src/**/requirements.txt", recursive=True)
for req_file in req_files:
with open(req_file) as f:
for req in requirements.parse(f):
cortex_packages[req.name] = req.specs[0][1]
return cortex_packages


def build_packages(python_packages, bucket):
cmd_partial = {}
build_order = get_build_order(python_packages)
@@ -50,22 +63,35 @@ def build_packages(python_packages, bucket):

logger.info("Setting up packages")

restricted_packages = get_restricted_packages()

for package_name in build_order:
package_wheel_path = os.path.join(WHEELHOUSE_PATH, package_name)
requirement = cmd_partial[package_name]
logger.info("Building package {}".format(package_name))
logger.info("Building: {}".format(package_name))
completed_process = run(
"pip3 wheel -w {} {}".format(
os.path.join(WHEELHOUSE_PATH, package_name), requirement
).split()
"pip3 wheel -w {} {}".format(package_wheel_path, requirement).split()
)

if completed_process.returncode != 0:
raise UserException("creating wheels", package_name)

for wheelname in os.listdir(package_wheel_path):
name_split = wheelname.split("-")
dist_name, version = name_split[0], name_split[1]
expected_version = restricted_packages.get(dist_name, None)
if expected_version is not None and version != expected_version:
raise UserException(
"when installing {}, found {}=={} but cortex requires {}=={}".format(
package_name, dist_name, version, dist_name, expected_version
)
)

logger.info("Validating packages")

for package_name in build_order:
requirement = cmd_partial[package_name]
logger.info("Installing package {}".format(package_name))
logger.info("Installing: {}".format(package_name))
completed_process = run(
"pip3 install --no-index --find-links={} {}".format(
os.path.join(WHEELHOUSE_PATH, package_name), requirement
@@ -94,6 +120,10 @@ def build(args):
try:
build_packages(python_packages, ctx.bucket)
util.log_job_finished(ctx.workload_id)
except CortexException as e:
e.wrap("error")
logger.exception(e)
ctx.upload_resource_status_failed(*python_packages_list)
except Exception as e:
logger.exception(e)
ctx.upload_resource_status_failed(*python_packages_list)
@@ -1,2 +1,6 @@
# specify exact version for each dependency, name==version

boto3==1.9.78
msgpack==0.6.1
numpy==1.15.4
requirements-parser==0.2.0

This file was deleted.

Oops, something went wrong.
@@ -1,16 +1,6 @@
# specify exact version for each dependency, name==version

flask==1.0.2
Flask-API==1.1
flask-api==1.1
waitress==1.2.1
tensorflow-serving-api==1.12.0

numpy==1.15.4
pandas==0.23.4
scipy==1.2.0
sympy==1.3
statsmodels==0.9.0
python-dateutil==2.7.5
six==1.11.0
wrapt==1.11.0
requests==2.21.0
oauthlib==3.0.0
httplib2==0.12.0

This file was deleted.

Oops, something went wrong.

0 comments on commit edae83b

Please sign in to comment.
You can’t perform that action at this time.