diff --git a/docker/1.2-1-1/base/Dockerfile.cpu b/docker/1.2-1-1/base/Dockerfile.cpu deleted file mode 100644 index 16278337..00000000 --- a/docker/1.2-1-1/base/Dockerfile.cpu +++ /dev/null @@ -1,198 +0,0 @@ -ARG UBUNTU_VERSION=20.04 -ARG UBUNTU_IMAGE_DIGEST=874aca52f79ae5f8258faff03e10ce99ae836f6e7d2df6ecd3da5c1cad3a912b - -# Build stage for SQLite compilation -FROM ubuntu:${UBUNTU_VERSION}@sha256:${UBUNTU_IMAGE_DIGEST} as sqlite-builder -RUN apt-get update && apt-get install -y --no-install-recommends \ - build-essential \ - wget \ - ca-certificates \ - && \ - cd /tmp && \ - wget https://www.sqlite.org/2025/sqlite-autoconf-3500200.tar.gz && \ - tar xzf sqlite-autoconf-3500200.tar.gz && \ - cd sqlite-autoconf-3500200 && \ - ./configure --prefix=/usr/local && \ - make && \ - make install && \ - ldconfig && \ - cd / && \ - rm -rf /tmp/sqlite-autoconf-3500200 /tmp/sqlite-autoconf-3500200.tar.gz && \ - apt-get clean && \ - rm -rf /var/lib/apt/lists/* - -# Main image -FROM ubuntu:${UBUNTU_VERSION}@sha256:${UBUNTU_IMAGE_DIGEST} - -ARG MINICONDA_VERSION=24.7.1 -ARG CONDA_CHECKSUM=684cda724bc37e3bbbb342e440fc4cac515c92e91a489eb4359feca35382894b -ARG CONDA_PY_VERSION=310 -ARG CONDA_PKG_VERSION=24.7.1 -ARG PYTHON_VERSION=3.10 -ARG PYARROW_VERSION=14.0.1 -ARG MLIO_VERSION=v0.9.0 - -ENV DEBIAN_FRONTEND=noninteractive - -# Install python and other scikit-learn runtime dependencies -# Dependency list from http://scikit-learn.org/stable/developers/advanced_installation.html#installing-build-dependencies -RUN apt-get update && \ - apt-get -y upgrade && \ - apt-get -y install --no-install-recommends \ - build-essential \ - curl \ - git \ - jq \ - libatlas-base-dev \ - nginx \ - openjdk-8-jdk-headless \ - unzip \ - wget \ - expat \ - tzdata \ - apparmor\ - libgstreamer1.0-0 \ - linux-libc-dev \ - libxml2 \ - libsqlite3-0 \ - && \ - # MLIO build dependencies - # Official Ubuntu APT repositories do not contain an up-to-date version of CMake required to build MLIO. - # Kitware contains the latest version of CMake. - wget http://es.archive.ubuntu.com/ubuntu/pool/main/libf/libffi/libffi7_3.3-4_amd64.deb && \ - dpkg -i libffi7_3.3-4_amd64.deb && \ - apt-get -y install --no-install-recommends \ - apt-transport-https \ - ca-certificates \ - gnupg \ - software-properties-common \ - && \ - wget -O - https://apt.kitware.com/keys/kitware-archive-latest.asc 2>/dev/null | \ - gpg --dearmor - | \ - tee /usr/share/keyrings/kitware-archive-keyring.gpg >/dev/null && \ - echo 'deb [signed-by=/usr/share/keyrings/kitware-archive-keyring.gpg] https://apt.kitware.com/ubuntu/ bionic main' | tee /etc/apt/sources.list.d/kitware.list >/dev/null && \ - apt-get update && \ - rm /usr/share/keyrings/kitware-archive-keyring.gpg && \ - apt-get install -y --no-install-recommends \ - autoconf \ - automake \ - build-essential \ - cmake \ - cmake-data \ - doxygen \ - kitware-archive-keyring \ - libcurl4-openssl-dev \ - libssl-dev \ - libtool \ - ninja-build \ - python3-dev \ - python3-distutils \ - python3-pip \ - zlib1g-dev \ - && \ - python3 -m pip install --upgrade pip && \ - python3 -m pip install --upgrade certifi && \ - apt-get clean && \ - # Node.js setup - mkdir -p /etc/apt/keyrings && \ - curl -fsSL https://deb.nodesource.com/gpgkey/nodesource-repo.gpg.key | \ - gpg --dearmor -o /etc/apt/keyrings/nodesource.gpg && \ - echo "deb [signed-by=/etc/apt/keyrings/nodesource.gpg] https://deb.nodesource.com/node_20.x nodistro main" | \ - tee /etc/apt/sources.list.d/nodesource.list && \ - apt-get update && \ - apt-get install -y nodejs && \ - npm install -g npm@latest && \ - rm -rf /var/lib/apt/lists/* - -RUN ln -fs /usr/share/zoneinfo/UTC /etc/localtime && \ - dpkg-reconfigure --frontend noninteractive tzdata - -RUN cd /tmp && \ - curl -L --output /tmp/Miniconda3.sh https://repo.anaconda.com/miniconda/Miniconda3-py${CONDA_PY_VERSION}_${MINICONDA_VERSION}-0-Linux-x86_64.sh && \ - echo "${CONDA_CHECKSUM} /tmp/Miniconda3.sh" | sha256sum -c - && \ - bash /tmp/Miniconda3.sh -bfp /miniconda3 && \ - rm /tmp/Miniconda3.sh && \ - # Remove this when we move to Miniconda version with conda package version 4.13.0+ - rm -rf /miniconda3/pkgs/conda-4.12.0-py38h06a4308_0/info/test/* - -ENV PATH=/miniconda3/bin:${PATH} -ENV PIP_ROOT_USER_ACTION=ignore - -# Install MLIO with Apache Arrow integration -# We could install mlio-py from conda, but it comes with extra support such as image reader that increases image size -# which increases training time. We build from source to minimize the image size. -RUN echo "conda ${CONDA_PKG_VERSION}" >> /miniconda3/conda-meta/pinned && \ - # Conda configuration see https://conda.io/projects/conda/en/latest/configuration.html - conda config --system --set auto_update_conda false && \ - conda config --system --set show_channel_urls true && \ - echo "python ${PYTHON_VERSION}.*" >> /miniconda3/conda-meta/pinned && \ - conda install -c conda-forge python=${PYTHON_VERSION} --solver classic && \ - conda install conda=${CONDA_PKG_VERSION} --solver classic && \ - conda update -y conda && \ - conda install -c conda-forge pyarrow=${PYARROW_VERSION} --solver classic && \ - cd /miniconda3/pkgs/libgrpc-*/info/test/examples/node && \ - npm install minimist@latest protobufjs@latest && \ - # Remove Node.js, npm, and their dependencies - apt-get purge -y nodejs npm && \ - apt-get autoremove -y && \ - # Final cleanup - rm -rf /etc/apt/sources.list.d/nodesource.list \ - /etc/apt/keyrings/nodesource.gpg \ - /etc/apt/sources.list.d/kitware.list && \ - apt-get clean && \ - rm -rf /var/lib/apt/lists/* && \ - # Continue with the rest of the build process - conda install pip --force-reinstall && \ - python3 -m pip install --upgrade pip && \ - python3 -m pip install wheel && \ - cd /tmp && \ - git clone --branch ${MLIO_VERSION} https://github.com/awslabs/ml-io.git mlio && \ - cd mlio && \ - build-tools/build-dependency build/third-party all && \ - mkdir -p build/release && \ - cd build/release && \ - cmake -GNinja -DCMAKE_BUILD_TYPE=RelWithDebInfo -DCMAKE_PREFIX_PATH="$(pwd)/../third-party" ../.. && \ - cmake --build . && \ - cmake --build . --target install && \ - cmake -DMLIO_INCLUDE_PYTHON_EXTENSION=ON -DPYTHON_EXECUTABLE="/miniconda3/bin/python3" \ - -DMLIO_INCLUDE_ARROW_INTEGRATION=ON ../.. && \ - cmake --build . --target mlio-py && \ - cmake --build . --target mlio-arrow && \ - cd ../../src/mlio-py && \ - python3 setup.py bdist_wheel && \ - python3 -m pip install dist/*.whl && \ - cp -r /tmp/mlio/build/third-party/lib/libtbb* /usr/local/lib/ && \ - ldconfig && \ - rm -rf /tmp/mlio - -# Copy compiled SQLite from builder stage -COPY --from=sqlite-builder /usr/local/bin/sqlite3 /usr/local/bin/sqlite3 -COPY --from=sqlite-builder /usr/local/lib/libsqlite3.* /usr/local/lib/ -COPY --from=sqlite-builder /usr/local/include/sqlite3*.h /usr/local/include/ - -# Update library cache and ensure /usr/local/bin is in PATH -RUN ldconfig && \ - echo "/usr/local/lib" > /etc/ld.so.conf.d/sqlite3.conf && \ - ldconfig - -ENV PATH="/usr/local/bin:${PATH}" - -# This command will check the version and print it to the build logs -RUN sqlite3 --version - -RUN apt list --installed - -# Install awscli -RUN curl "https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip" -o "awscliv2.zip" && \ - unzip awscliv2.zip && \ - ./aws/install && \ - rm -r aws awscliv2.zip - -# Python won’t try to write .pyc or .pyo files on the import of source modules -# Force stdin, stdout and stderr to be totally unbuffered. Good for logging -ENV PYTHONDONTWRITEBYTECODE=1 PYTHONUNBUFFERED=1 PYTHONIOENCODING=UTF-8 LANG=C.UTF-8 LC_ALL=C.UTF-8 - -# Install Scikit-Learn -# Scikit-learn 0.20 was the last version to support Python 2.7 and Python 3.4. -# Scikit-learn now requires Python 3.6 or newer. -RUN python3 -m pip install --no-cache -I scikit-learn==1.2.1 \ No newline at end of file diff --git a/docker/1.4-2/base/Dockerfile.cpu b/docker/1.4-2/base/Dockerfile.cpu new file mode 100644 index 00000000..9ac17115 --- /dev/null +++ b/docker/1.4-2/base/Dockerfile.cpu @@ -0,0 +1,190 @@ +ARG UBUNTU_VERSION=24.04 +ARG UBUNTU_IMAGE_DIGEST=b359f1067efa76f37863778f7b6d0e8d911e3ee8efa807ad01fbf5dc1ef9006b + +# Build stage for SQLite compilation +FROM ubuntu:${UBUNTU_VERSION}@sha256:${UBUNTU_IMAGE_DIGEST} as sqlite-builder +RUN apt-get update && apt-get install -y --no-install-recommends \ + build-essential wget ca-certificates && \ + cd /tmp && \ + wget https://www.sqlite.org/2025/sqlite-autoconf-3500200.tar.gz && \ + tar xzf sqlite-autoconf-3500200.tar.gz && \ + cd sqlite-autoconf-3500200 && \ + ./configure --prefix=/usr/local && \ + make && \ + make install && \ + ldconfig && \ + cd / && \ + rm -rf /tmp/sqlite-autoconf-3500200 /tmp/sqlite-autoconf-3500200.tar.gz && \ + apt-get clean && \ + rm -rf /var/lib/apt/lists/* + +# MLIO builder stage with Ubuntu 20.04 +FROM ubuntu:20.04@sha256:874aca52f79ae5f8258faff03e10ce99ae836f6e7d2df6ecd3da5c1cad3a912b as mlio-builder + +ARG PYTHON_VERSION=3.10 +ARG MLIO_VERSION=v0.9.0 +ARG PYARROW_VERSION=17.0.0 + +ENV DEBIAN_FRONTEND=noninteractive + +# Install python and other scikit-learn runtime dependencies +RUN apt-get update && \ + apt-get -y install --no-install-recommends \ + build-essential curl git wget ca-certificates lsb-release software-properties-common && \ + # Add Apache Arrow repository + wget https://packages.apache.org/artifactory/arrow/$(lsb_release --id --short | tr 'A-Z' 'a-z')/apache-arrow-apt-source-latest-$(lsb_release --codename --short).deb && \ + apt install -y -V ./apache-arrow-apt-source-latest-$(lsb_release --codename --short).deb && \ + apt-get update && \ + apt-get install -y -V libarrow-dev=17.0.0-1 libarrow-dataset-dev=17.0.0-1 libparquet-dev=17.0.0-1 libarrow-acero-dev=17.0.0-1 && \ + # Add deadsnakes PPA for Python 3.10 + add-apt-repository ppa:deadsnakes/ppa && \ + apt-get update && \ + apt-get -y install --no-install-recommends \ + python3.10 python3.10-dev python3.10-distutils && \ + # MLIO build dependencies + wget http://es.archive.ubuntu.com/ubuntu/pool/main/libf/libffi/libffi7_3.3-4_amd64.deb && \ + dpkg -i libffi7_3.3-4_amd64.deb && \ + apt-get -y install --no-install-recommends \ + apt-transport-https gnupg && \ + wget -O - https://apt.kitware.com/keys/kitware-archive-latest.asc 2>/dev/null | \ + gpg --dearmor - | \ + tee /usr/share/keyrings/kitware-archive-keyring.gpg >/dev/null && \ + echo 'deb [signed-by=/usr/share/keyrings/kitware-archive-keyring.gpg] https://apt.kitware.com/ubuntu/ bionic main' | tee /etc/apt/sources.list.d/kitware.list >/dev/null && \ + apt-get update && \ + rm /usr/share/keyrings/kitware-archive-keyring.gpg && \ + apt-get install -y --no-install-recommends \ + autoconf automake cmake cmake-data doxygen kitware-archive-keyring libcurl4-openssl-dev libssl-dev libtool ninja-build zlib1g-dev && \ + update-alternatives --install /usr/bin/python3 python3 /usr/bin/python3.10 1 && \ + curl -sS https://bootstrap.pypa.io/get-pip.py | python3 && \ + apt-get clean && \ + rm -rf /var/lib/apt/lists/* + +RUN ln -fs /usr/share/zoneinfo/UTC /etc/localtime && \ + dpkg-reconfigure --frontend noninteractive tzdata + +ENV PIP_ROOT_USER_ACTION=ignore + +# Build MLIO from scratch +# Clone MLIO repository +RUN cd /tmp && \ + git clone --branch ${MLIO_VERSION} https://github.com/awslabs/ml-io.git mlio + +# Patch MLIO for Arrow 17.0.0 +RUN cd /tmp/mlio && \ + sed -i 's/find_package(Arrow 14.0.1 REQUIRED/find_package(Arrow 17.0.0 REQUIRED/g' CMakeLists.txt && \ + sed -i 's/pyarrow==14.0.1/pyarrow==17.0.0/g' src/mlio-py/setup.py + +# Build MLIO third-party dependencies (includes Arrow C++) +RUN cd /tmp/mlio && \ + build-tools/build-dependency build/third-party all + +# Configure MLIO build +RUN cd /tmp/mlio && \ + mkdir -p build/release && \ + cd build/release && \ + cmake -GNinja -DCMAKE_BUILD_TYPE=RelWithDebInfo -DCMAKE_PREFIX_PATH="$(pwd)/../third-party" ../.. + +# Build MLIO core +RUN cd /tmp/mlio/build/release && \ + cmake --build . && \ + cmake --build . --target install + +# Configure MLIO Python extension +RUN cd /tmp/mlio/build/release && \ + cmake -DMLIO_INCLUDE_PYTHON_EXTENSION=ON -DPYTHON_EXECUTABLE="/usr/bin/python3" \ + -DMLIO_INCLUDE_ARROW_INTEGRATION=ON -DCMAKE_PREFIX_PATH="$(pwd)/../third-party" ../.. + +# Build MLIO Python extension +RUN cd /tmp/mlio/build/release && \ + cmake --build . --target mlio-py && \ + cmake --build . --target mlio-arrow + +# Build MLIO Python wheel +RUN cd /tmp/mlio/src/mlio-py && \ + python3 setup.py bdist_wheel + +# Copy TBB libraries and MLIO shared libraries to a location we can copy from +RUN mkdir -p /mlio-artifacts && \ + cp -r /tmp/mlio/build/third-party/lib/libtbb* /mlio-artifacts/ && \ + cp /usr/local/lib/libmlio* /mlio-artifacts/ 2>/dev/null || true && \ + cp /tmp/mlio/src/mlio-py/dist/*.whl /mlio-artifacts/ + +# Main image +FROM ubuntu:${UBUNTU_VERSION}@sha256:${UBUNTU_IMAGE_DIGEST} + +ARG PYTHON_VERSION=3.10 +ARG PYARROW_VERSION=17.0.0 + +ENV DEBIAN_FRONTEND=noninteractive + +# Install runtime dependencies only +RUN apt-get update && \ + apt-get -y upgrade && \ + apt-get -y install --no-install-recommends \ + curl git jq libatlas-base-dev nginx openjdk-8-jdk-headless unzip wget expat tzdata apparmor \ + libgstreamer1.0-0 libxml2 libsqlite3-0 software-properties-common ca-certificates lsb-release \ + build-essential linux-libc-dev && \ + # Add Apache Arrow repository for runtime libraries only + wget https://packages.apache.org/artifactory/arrow/$(lsb_release --id --short | tr 'A-Z' 'a-z')/apache-arrow-apt-source-latest-$(lsb_release --codename --short).deb && \ + apt install -y -V ./apache-arrow-apt-source-latest-$(lsb_release --codename --short).deb && \ + apt-get update && \ + apt-get install -y -V libarrow-dev=17.0.0-1 libarrow-dataset-dev=17.0.0-1 libparquet-dev=17.0.0-1 libarrow-acero-dev=17.0.0-1 && \ + # Add deadsnakes PPA for Python 3.10 + add-apt-repository ppa:deadsnakes/ppa && \ + apt-get update && \ + apt-get -y install --no-install-recommends \ + python3.10 python3.10-distutils python3.10-dev && \ + update-alternatives --install /usr/bin/python3 python3 /usr/bin/python3.10 1 && \ + curl -sS https://bootstrap.pypa.io/get-pip.py | python3 && \ + apt-get clean && \ + rm -rf /var/lib/apt/lists/* + +RUN ln -fs /usr/share/zoneinfo/UTC /etc/localtime && \ + dpkg-reconfigure --frontend noninteractive tzdata + +# Install uv for fast Python package management +RUN curl -LsSf https://astral.sh/uv/install.sh | sh && \ + mv /root/.local/bin/uv /usr/local/bin/uv + +ENV PATH=/usr/local/bin:${PATH} +ENV PIP_ROOT_USER_ACTION=ignore + +# Copy MLIO wheel, TBB libraries, and MLIO shared libraries from builder stage +COPY --from=mlio-builder /mlio-artifacts/*.whl /tmp/ +COPY --from=mlio-builder /mlio-artifacts/libtbb* /usr/local/lib/ +COPY --from=mlio-builder /mlio-artifacts/libmlio* /usr/local/lib/ + +# Install MLIO wheel +RUN uv pip install --system /tmp/*.whl && \ + rm /tmp/*.whl + +# Copy compiled SQLite from builder stage +COPY --from=sqlite-builder /usr/local/bin/sqlite3 /usr/local/bin/sqlite3 +COPY --from=sqlite-builder /usr/local/lib/libsqlite3.* /usr/local/lib/ +COPY --from=sqlite-builder /usr/local/include/sqlite3*.h /usr/local/include/ + +# Update library cache and ensure /usr/local/bin is in PATH +RUN ldconfig && \ + echo "/usr/local/lib" > /etc/ld.so.conf.d/sqlite3.conf && \ + ldconfig + +ENV PATH="/usr/local/bin:${PATH}" + +# This command will check the version and print it to the build logs +RUN sqlite3 --version + +# Install awscli +RUN curl "https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip" -o "awscliv2.zip" && \ + unzip awscliv2.zip && \ + ./aws/install && \ + rm -r aws awscliv2.zip + +# Python won't try to write .pyc or .pyo files on the import of source modules +# Force stdin, stdout and stderr to be totally unbuffered. Good for logging +ENV PYTHONDONTWRITEBYTECODE=1 PYTHONUNBUFFERED=1 PYTHONIOENCODING=UTF-8 LANG=C.UTF-8 LC_ALL=C.UTF-8 + +# Install core scientific packages with exact versions +RUN uv pip install --system --no-cache \ + numpy==2.1.0 \ + scikit-learn==1.4.2 \ + pyarrow==17.0.0 diff --git a/docker/1.2-1-1/extension/Dockerfile.cpu b/docker/1.4-2/extension/Dockerfile.cpu similarity index 63% rename from docker/1.2-1-1/extension/Dockerfile.cpu rename to docker/1.4-2/extension/Dockerfile.cpu index 1317c8ec..8550725e 100644 --- a/docker/1.2-1-1/extension/Dockerfile.cpu +++ b/docker/1.4-2/extension/Dockerfile.cpu @@ -1,9 +1,9 @@ -FROM preprod-sklearn:1.2-1 +FROM preprod-sklearn:1.4-2 -RUN pip freeze | grep -q 'scikit-learn==1.2.1'; \ +RUN pip freeze | grep -q 'scikit-learn==1.4.2'; \ if [ $? -eq 0 ]; \ - then echo 'scikit-learn version 1.2.1 requirement met'; \ - else echo 'ERROR: Expected scikit-learn version is 1.2.1, check base images for scikit-learn version' && \ + then echo 'scikit-learn version 1.4.2 requirement met'; \ + else echo 'ERROR: Expected scikit-learn version is 1.4.2, check base images for scikit-learn version' && \ exit 1; fi RUN pip install --upgrade --no-cache --no-deps sagemaker-scikit-learn-extension==2.5.0 diff --git a/docker/1.2-1-1/extension/README.md b/docker/1.4-2/extension/README.md similarity index 100% rename from docker/1.2-1-1/extension/README.md rename to docker/1.4-2/extension/README.md diff --git a/docker/1.2-1-1/final/Dockerfile.cpu b/docker/1.4-2/final/Dockerfile.cpu similarity index 66% rename from docker/1.2-1-1/final/Dockerfile.cpu rename to docker/1.4-2/final/Dockerfile.cpu index d6bb03e9..bd7148c9 100644 --- a/docker/1.2-1-1/final/Dockerfile.cpu +++ b/docker/1.4-2/final/Dockerfile.cpu @@ -1,25 +1,19 @@ -FROM sklearn-base:1.2-1-1 -ENV SAGEMAKER_SKLEARN_VERSION 1.2-1-1 +FROM sklearn-base:1.4-2 +ENV SAGEMAKER_SKLEARN_VERSION 1.4-2 ENV PIP_ROOT_USER_ACTION=ignore LABEL com.amazonaws.sagemaker.capabilities.accept-bind-to-port=true +# Install remaining packages via pip COPY requirements.txt /requirements.txt -RUN python -m pip install -r /requirements.txt && \ +RUN uv pip install --system -r /requirements.txt && \ rm /requirements.txt # Fix Python 3.10 compatibility for sagemaker-containers -RUN python3 -c "import sys; sys.path.insert(0, '/miniconda3/lib/python3.10/site-packages'); \ - import sagemaker_containers._mapping as m; \ - import collections.abc; \ - setattr(collections, 'Mapping', collections.abc.Mapping); \ - exec(open('/miniconda3/lib/python3.10/site-packages/sagemaker_containers/_mapping.py').read().replace('collections.Mapping', 'collections.abc.Mapping'))" || \ - sed -i 's/collections\.Mapping/collections.abc.Mapping/g' /miniconda3/lib/python3.10/site-packages/sagemaker_containers/_mapping.py +RUN python3 -c "import sys; import os; site_packages = '/usr/local/lib/python3.10/dist-packages'; mapping_file = os.path.join(site_packages, 'sagemaker_containers/_mapping.py'); exec('if os.path.exists(mapping_file):\\n with open(mapping_file, \"r\") as f:\\n content = f.read()\\n content = content.replace(\"collections.Mapping\", \"collections.abc.Mapping\")\\n with open(mapping_file, \"w\") as f:\\n f.write(content)')" COPY dist/sagemaker_sklearn_container-2.0-py3-none-any.whl /sagemaker_sklearn_container-2.0-py3-none-any.whl -RUN rm /miniconda3/lib/python3.10/site-packages/**/REQUESTED && \ - rm /miniconda3/lib/python3.10/site-packages/**/direct_url.json -RUN python3 -m pip install --no-cache /sagemaker_sklearn_container-2.0-py3-none-any.whl && \ +RUN uv pip install --system --no-cache /sagemaker_sklearn_container-2.0-py3-none-any.whl && \ rm /sagemaker_sklearn_container-2.0-py3-none-any.whl ENV SAGEMAKER_TRAINING_MODULE sagemaker_sklearn_container.training:main diff --git a/docker/1.2-1-1/resources/libffi7_3.3-6_arm64.deb b/docker/1.4-2/resources/libffi7_3.3-6_arm64.deb similarity index 100% rename from docker/1.2-1-1/resources/libffi7_3.3-6_arm64.deb rename to docker/1.4-2/resources/libffi7_3.3-6_arm64.deb diff --git a/docker/1.2-1-1/resources/mms/ExecutionParameters.java b/docker/1.4-2/resources/mms/ExecutionParameters.java similarity index 100% rename from docker/1.2-1-1/resources/mms/ExecutionParameters.java rename to docker/1.4-2/resources/mms/ExecutionParameters.java diff --git a/docker/1.2-1-1/resources/mms/config.properties.tmp b/docker/1.4-2/resources/mms/config.properties.tmp similarity index 100% rename from docker/1.2-1-1/resources/mms/config.properties.tmp rename to docker/1.4-2/resources/mms/config.properties.tmp diff --git a/docker/1.2-1-1/resources/mms/endpoints-1.0.jar b/docker/1.4-2/resources/mms/endpoints-1.0.jar similarity index 100% rename from docker/1.2-1-1/resources/mms/endpoints-1.0.jar rename to docker/1.4-2/resources/mms/endpoints-1.0.jar diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 00000000..af835272 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,34 @@ +[project] +name = "sagemaker-sklearn-container" +version = "2.0" +description = "SageMaker Scikit-learn Container" +requires-python = "==3.10.*" +license = "Apache-2.0" +authors = [{name = "Amazon Web Services"}] +classifiers = [ + "Development Status :: 5 - Production/Stable", + "Intended Audience :: Developers", + "Natural Language :: English", + "Programming Language :: Python", + "Programming Language :: Python :: 3.10", +] +dynamic = ["readme", "dependencies", "optional-dependencies"] + +[project.scripts] +serve = "sagemaker_sklearn_container.serving:serving_entrypoint" + +[build-system] +requires = ["setuptools", "wheel"] +build-backend = "setuptools.build_meta" + +[tool.setuptools.dynamic] +readme = {file = ["README.rst"]} +dependencies = {file = ["requirements.txt"]} +optional-dependencies.test = {file = ["test-requirements.txt"]} + +[tool.setuptools.packages.find] +where = ["src"] +exclude = ["test*"] + +[tool.setuptools.package-dir] +"" = "src" diff --git a/requirements.txt b/requirements.txt index 34de7e78..bcc8bc22 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,29 +1,30 @@ +numpy==2.1.0 +scikit-learn==1.4.2 +scipy>=1.9.0 +pandas>=2.0.0 +pyarrow==17.0.0 boto3==1.28.57 botocore>=1.31.57,<1.32.0 +certifi cryptography Flask==1.1.1 -itsdangerous==2.0.1 +gevent==23.9.1 gunicorn==23.0.0 +itsdangerous==2.0.1 +jinja2<3.0 +MarkupSafe<2.0 model-archiver==1.0.3 multi-model-server==1.1.1 -pandas==1.5.3 protobuf==3.20.2 psutil==5.7.2 -python-dateutil==2.8.1 +python-dateutil +PyYAML==6.0.1 retrying==1.3.3 sagemaker-containers==2.8.6.post2 sagemaker-inference==1.2.0 sagemaker-training==4.8.0 -scikit-learn==1.2.1 -scipy==1.9.3 -urllib3==1.26.17 +setuptools six==1.15.0 -jinja2==3.0.3 -MarkupSafe==2.1.1 -numpy==1.24.1 -gevent==23.9.1 +urllib3==1.26.17 Werkzeug==2.0.3 -setuptools -wheel -certifi -PyYAML==6.0.1 \ No newline at end of file +wheel==0.45.1 diff --git a/setup.py b/setup.py index 3f4e84cc..5be88421 100644 --- a/setup.py +++ b/setup.py @@ -47,5 +47,5 @@ def read(fname): 'console_scripts': 'serve=sagemaker_sklearn_container.serving:serving_entrypoint' }, - python_requires='>=3.6', + python_requires='>=3.10', ) diff --git a/test-requirements.txt b/test-requirements.txt index 8e94ffe5..c79be276 100755 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -1,14 +1,19 @@ -Flask -PyYAML boto3>=1.24.17 coverage flake8 +Flask mock +numpy==2.1.0 +pandas +pyarrow==17.0.0 +pyOpenSSL==23.1.0 pytest pytest-cov pytest-xdist -python-dateutil==2.8.1 +python-dateutil>=2.8.2 PyYAML requests>=2.23.0 sagemaker>=1.3.0,<2 -tox \ No newline at end of file +scikit-learn==1.4.2 +scipy>=1.9.0 +tox diff --git a/test/conftest.py b/test/conftest.py index 4b46dde4..2c7708d4 100644 --- a/test/conftest.py +++ b/test/conftest.py @@ -43,7 +43,7 @@ def pytest_addoption(parser): parser.addoption('--install-container-support', '-C', action='store_true') parser.addoption('--docker-base-name', default='sk-learn') parser.addoption('--region', default='us-west-2') - parser.addoption('--framework-version', default='1.2.1') + parser.addoption('--framework-version', default='1.4.2') parser.addoption('--py-version', choices=['2', '3'], default=str(sys.version_info.major)) parser.addoption('--processor', choices=['cpu'], default='cpu') # If not specified, will default to {framework-version}-{processor}-py{py-version} diff --git a/test/integration/test_multiple_model_endpoint.py b/test/integration/test_multiple_model_endpoint.py index f6acf79f..ba6b417d 100644 --- a/test/integration/test_multiple_model_endpoint.py +++ b/test/integration/test_multiple_model_endpoint.py @@ -121,6 +121,27 @@ def make_unload_model_request(model_name): return response.status_code, response.content.decode(encodings.utf_8.getregentry().name) +@pytest.fixture(autouse=True) +def cleanup_models(): + """Cleanup fixture to unload all models between tests""" + yield # Run the test + # Cleanup after test + try: + code, res = make_list_model_request() + if code == 200: + models_data = json.loads(res) if isinstance(res, str) else res + models = models_data.get('models', []) + for model in models: + model_name = model.get('modelName') + if model_name: + try: + make_unload_model_request(model_name) + except Exception: + pass # Ignore individual unload errors + except Exception: + pass # Ignore cleanup errors + + def test_ping(): res = requests.get(PING_URL) assert res.status_code == 200 diff --git a/test/resources/models/pickled-model-1/sklearn-model b/test/resources/models/pickled-model-1/sklearn-model index b5bf1770..d775e119 100644 Binary files a/test/resources/models/pickled-model-1/sklearn-model and b/test/resources/models/pickled-model-1/sklearn-model differ diff --git a/test/resources/models/pickled-model-2/sklearn-model b/test/resources/models/pickled-model-2/sklearn-model index bcea8bbe..d775e119 100644 Binary files a/test/resources/models/pickled-model-2/sklearn-model and b/test/resources/models/pickled-model-2/sklearn-model differ diff --git a/test/unit/test_modules.py b/test/unit/test_modules.py index 41acffa8..f56053f9 100644 --- a/test/unit/test_modules.py +++ b/test/unit/test_modules.py @@ -4,7 +4,7 @@ def test_pandas_version(): import pandas as pd major, minor, patch = pd.__version__.split('.') - assert major == '1' + assert major == '2' def test_pyarrow_to_parquet_conversion_regression_issue_106(): diff --git a/tox.ini b/tox.ini index b268e6d2..1613ed13 100644 --- a/tox.ini +++ b/tox.ini @@ -6,7 +6,7 @@ max-line-length = 120 [testenv] deps = - sklearn1.2: scikit-learn==1.2.1 + sklearn1.4.2: scikit-learn==1.4.2 -r{toxinidir}/requirements.txt -r{toxinidir}/test-requirements.txt conda_deps=