Skip to content

Commit

Permalink
Fix CVEs for Alibi images (#4612)
Browse files Browse the repository at this point in the history
  • Loading branch information
Adrian Gonzalez-Martin committed Jan 30, 2023
1 parent 20fe3ed commit 24467f0
Show file tree
Hide file tree
Showing 21 changed files with 1,802 additions and 1,473 deletions.
11 changes: 9 additions & 2 deletions .github/workflows/alibiexplainer_tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -24,10 +24,17 @@ jobs:
python-tests:

runs-on: ubuntu-latest
container: seldonio/python-builder:0.6

steps:
- uses: actions/checkout@v2
- name: Set up Python 3.7
uses: actions/setup-python@v2
with:
python-version: 3.7
- name: Install Poetry
uses: snok/install-poetry@v1
with:
version: 1.1.15
virtualenvs-create: false
- name: test-python
run: |
pip install --upgrade pip setuptools
Expand Down
8 changes: 4 additions & 4 deletions components/alibi-detect-server/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -23,10 +23,10 @@ RUN microdnf update -y && \
gcc-c++

# Install Rclone Binary to be present in the image
RUN wget https://downloads.rclone.org/v1.55.1/rclone-v1.55.1-linux-amd64.zip && \
unzip rclone-v1.55.1-linux-amd64.zip && \
mv rclone-v1.55.1-linux-amd64/rclone /usr/bin/rclone && \
rm -rf rclone-v1.55.1-linux-amd64.zip rclone-v1.55.1-linux-amd64
RUN wget https://downloads.rclone.org/v1.61.1/rclone-v1.61.1-linux-amd64.zip && \
unzip rclone-v1.61.1-linux-amd64.zip && \
mv rclone-v1.61.1-linux-amd64/rclone /usr/bin/rclone && \
rm -rf rclone-v1.61.1-linux-amd64.zip rclone-v1.61.1-linux-amd64

# Install Python / Conda
# Note that we need to force Conda to use the system's std-c++ library, as
Expand Down
7 changes: 3 additions & 4 deletions components/alibi-detect-server/adserver/__main__.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,9 @@

import tensorflow as tf

gpus = tf.config.experimental.list_physical_devices('GPU')
gpus = tf.config.experimental.list_physical_devices("GPU")
for gpu in gpus:
tf.config.experimental.set_memory_growth(gpu, True)
tf.config.experimental.set_memory_growth(gpu, True)

tf.keras.backend.clear_session()

Expand All @@ -18,8 +18,7 @@
from adserver.server import CEServer
from adserver.protocols import Protocol
from adserver.server import DEFAULT_HTTP_PORT
from alibi_detect.utils.saving import Data

from adserver.base import Data


class AlibiDetectMethod(Enum):
Expand Down
6 changes: 3 additions & 3 deletions components/alibi-detect-server/adserver/ad_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,8 @@
from adserver.constants import HEADER_RETURN_INSTANCE_SCORE
from .numpy_encoder import NumpyEncoder
from adserver.protocols.util import read_inputs_as_numpy
from alibi_detect.utils.saving import load_detector, Data
from adserver.base import CEModel, ModelResponse
from alibi_detect.utils.saving import load_detector
from adserver.base import CEModel, ModelResponse, Data
from adserver.base.storage import download_model


Expand Down Expand Up @@ -76,5 +76,5 @@ def process_event(self, inputs: Union[List, Dict], headers: Dict) -> ModelRespon

ad_preds = self.model.predict(X, return_instance_score=ret_instance_score)

data = json.loads(json.dumps(ad_preds, cls=NumpyEncoder))
data = json.loads(json.dumps(ad_preds, cls=NumpyEncoder))
return ModelResponse(data=data, metrics=None)
2 changes: 1 addition & 1 deletion components/alibi-detect-server/adserver/base/__init__.py
Original file line number Diff line number Diff line change
@@ -1,2 +1,2 @@
from .model import CEModel, ModelResponse
from .alibi_model import AlibiDetectModel
from .alibi_model import AlibiDetectModel, Data
7 changes: 5 additions & 2 deletions components/alibi-detect-server/adserver/base/alibi_model.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,10 @@
from typing import Optional
from typing import Optional, Union
from adserver.base.model import CEModel
from adserver.base.storage import download_model
from alibi_detect.utils.saving import load_detector, Data
from alibi_detect.base import ConfigurableDetector, Detector
from alibi_detect.utils.saving import load_detector

Data = Union[Detector, ConfigurableDetector]


class AlibiDetectModel(CEModel): # pylint:disable=c-extension-no-member
Expand Down
11 changes: 7 additions & 4 deletions components/alibi-detect-server/adserver/cd_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,12 +4,13 @@
import numpy as np
import os
from .numpy_encoder import NumpyEncoder
from adserver.base import AlibiDetectModel, ModelResponse
from alibi_detect.utils.saving import load_detector, Data
from adserver.base import AlibiDetectModel, ModelResponse, Data
from alibi_detect.utils.saving import load_detector
from adserver.constants import ENV_DRIFT_TYPE_FEATURE

DRIFT_TYPE_FEATURE = os.environ.get(ENV_DRIFT_TYPE_FEATURE, "").upper() == "TRUE"


def _append_drift_metrcs(metrics, drift, name):
metric_found = drift.get(name)

Expand Down Expand Up @@ -58,7 +59,9 @@ def __init__(
self.batch: Optional[np.ndarray] = None
self.model: Data = model

def process_event(self, inputs: Union[List, Dict], headers: Dict) -> Optional[ModelResponse]:
def process_event(
self, inputs: Union[List, Dict], headers: Dict
) -> Optional[ModelResponse]:
"""
Process the event and return Alibi Detect score
Expand Down Expand Up @@ -98,7 +101,7 @@ def process_event(self, inputs: Union[List, Dict], headers: Dict) -> Optional[Mo
self.drift_batch_size,
)
if DRIFT_TYPE_FEATURE:
cd_preds = self.model.predict(self.batch, drift_type='feature')
cd_preds = self.model.predict(self.batch, drift_type="feature")
else:
cd_preds = self.model.predict(self.batch)

Expand Down
8 changes: 5 additions & 3 deletions components/alibi-detect-server/adserver/od_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,8 @@
import numpy as np
from .numpy_encoder import NumpyEncoder
from adserver.protocols.util import read_inputs_as_numpy
from adserver.base import CEModel, ModelResponse
from alibi_detect.utils.saving import load_detector, Data
from adserver.base import CEModel, ModelResponse, Data
from alibi_detect.utils.saving import load_detector
from adserver.base.storage import download_model
from adserver.constants import (
HEADER_RETURN_INSTANCE_SCORE,
Expand Down Expand Up @@ -66,7 +66,9 @@ def load(self):
self.model: Data = load_detector(model_folder)
self.ready = True

def process_event(self, inputs: Union[List, Dict], headers: Dict) -> Optional[ModelResponse]:
def process_event(
self, inputs: Union[List, Dict], headers: Dict
) -> Optional[ModelResponse]:
"""
Process the event and return Alibi Detect score
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ def test_basic(self):
model = DummyCDModel()
ad_model = AlibiDetectConceptDriftModel(
"imdb_text_drift",
"gs://seldon-models/alibi-detect/cd/ks/imdb-0_6_2",
"gs://seldon-models/alibi-detect/cd/ks/imdb-0_10_4",
drift_batch_size=2,
)
req = [
Expand Down

0 comments on commit 24467f0

Please sign in to comment.