Skip to content

Commit

Permalink
Add V2 data plane support for alibi detect server
Browse files Browse the repository at this point in the history
  • Loading branch information
ukclivecox authored and seldondev committed Jan 12, 2021
1 parent 4a960a0 commit 0e9eee5
Show file tree
Hide file tree
Showing 17 changed files with 4,090 additions and 58 deletions.
32 changes: 16 additions & 16 deletions components/alibi-detect-server/Makefile
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
SHELL := /bin/bash
VERSION := $(shell cat ../../version.txt)
REPO := seldonio
REPO=seldonio
IMAGE=alibi-detect-server

.PHONY: install_dev
Expand All @@ -23,11 +23,11 @@ lint:
# Local Run
#

run-outlier-detector-cifar10:
python -m adserver --model_name cifar10od --http_port 8080 --protocol tensorflow.http --event_type org.kubeflow.serving.inference.outlier --storage_uri gs://seldon-models/alibi-detect/cd/ks/cifar10-0_4_4 --event_source http://localhost:8080 OutlierDetector
run-outlier-detector-tensorflow:
python -m adserver --model_name cifar10od --http_port 8080 --protocol tensorflow.http --event_type org.kubeflow.serving.inference.outlier --storage_uri gs://seldon-models/alibi-detect/od/OutlierVAE/cifar10 --event_source http://localhost:8080 OutlierDetector

run-drift-detector-imdb:
python -m adserver --model_name imdbcd --http_port 8080 --protocol tensorflow.http --event_type org.kubeflow.serving.inference.drift --storage_uri gs://seldon-models/alibi-detect/cd/ks/imdb-0_4_4 --event_source http://localhost:8080 DriftDetector --drift_batch_size=1 DriftDetector
run-outlier-detector-v2:
python -m adserver --model_name cifar10od --http_port 8080 --protocol v2.http --event_type org.kubeflow.serving.inference.outlier --storage_uri gs://seldon-models/alibi-detect/od/OutlierVAE/cifar10 --event_source http://localhost:8080 OutlierDetector

run-metrics-server:
SELDON_DEPLOYMENT_ID="sdepname" PREDICTIVE_UNIT_ID="modelname" PREDICTIVE_UNIT_IMAGE="adserver:0.1" PREDICTOR_ID="pred" \
Expand All @@ -37,28 +37,28 @@ run-metrics-server:
# Docker Run
#

docker-run-outlier-detector:
docker-run-outlier-detector-tensorflow:
docker run --name cifar10od -it --rm -p 8080:8080 ${REPO}/${IMAGE}:${VERSION} --model_name cifar10od --http_port 8080 --protocol tensorflow.http --event_type org.kubeflow.serving.inference.outlier --storage_uri gs://seldon-models/alibi-detect/od/OutlierVAE/cifar10 --event_source http://localhost:8080 OutlierDetector

docker-run-drift-detector-cifar10:
docker run --name cifar10cd -it --rm -p 8080:8080 ${REPO}/${IMAGE}:${VERSION} --model_name cifar10cd --http_port 8080 --protocol tensorflow.http --event_type org.kubeflow.serving.inference.drift --storage_uri gs://seldon-models/alibi-detect/cd/ks/cifar10-0_4_4 --event_source http://localhost:8080 DriftDetector --drift_batch_size=2

docker-run-drift-detector-imdb:
docker run --name cifar10cd -it --rm -p 8080:8080 ${REPO}/${IMAGE}:${VERSION} --model_name imdbcd --http_port 8080 --protocol tensorflow.http --event_type org.kubeflow.serving.inference.drift --storage_uri gs://seldon-models/alibi-detect/cd/ks/imdb-0_4_4 --event_source http://localhost:8080 DriftDetector --drift_batch_size=2
docker-run-drift-detector-tensorflow:
docker run --name cifar10cd -it --rm -p 8080:8080 ${REPO}/${IMAGE}:${VERSION} --model_name cifar10cd --http_port 8080 --protocol tensorflow.http --event_type org.kubeflow.serving.inference.drift --storage_uri gs://seldon-models/alibi-detect/cd/ks/cifar10-0_4_3 --event_source http://localhost:8080 DriftDetector --drift_batch_size=2


#
# Test curls
#

curl-detector-cifar10:
curl-detector-tensorflow:
curl -v localhost:8080/ -d @./cifar10.json -H "ce-namespace: default" -H "ce-modelid: cifar10" -H "ce-type: io.seldon.serving.inference.request" -H "ce-id: 1234" -H "ce-source: localhost" -H "ce-specversion: 1.0"

curl-detector-imdb:
curl -v localhost:8080/ -d @./imdb.json -H "ce-namespace: default" -H "ce-modelid: imdb" -H "ce-type: io.seldon.serving.inference.request" -H "ce-id: 1234" -H "ce-source: localhost" -H "ce-specversion: 1.0"
curl-detector-v2:
curl -v localhost:8080/ -d @./cifar10-v2.json -H "ce-namespace: default" -H "ce-modelid: cifar10" -H "ce-type: io.seldon.serving.inference.request" -H "ce-id: 1234" -H "ce-source: localhost" -H "ce-specversion: 1.0"

curl-detector-v2-outlier:
curl -v localhost:8080/ -d @./cifar10-v2-outlier.json -H "ce-namespace: default" -H "ce-modelid: cifar10" -H "ce-type: io.seldon.serving.inference.request" -H "ce-id: 1234" -H "ce-source: localhost" -H "ce-specversion: 1.0"

curl-outlier-detector-scores:
curl -v localhost:8080/ -d @./input.json -H "Alibi-Detect-Return-Feature-Score: true" -H "Alibi-Detect-Return-Instance-Score: true"
curl-tensorflow-outlier-detector-scores:
curl -v localhost:8080/ -d @./cifar10.json -H "Alibi-Detect-Return-Feature-Score: true" -H "Alibi-Detect-Return-Instance-Score: true"

curl-metrics-server:
curl -v -X POST -H 'Content-Type: application/json' \
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ class Protocol(Enum):
tensorflow_http = "tensorflow.http"
seldon_http = "seldon.http"
seldonfeedback_http = "seldonfeedback.http"
v2_http = "v2.http"

def __str__(self):
return self.value
70 changes: 70 additions & 0 deletions components/alibi-detect-server/adserver/protocols/v2.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,70 @@
from http import HTTPStatus
from typing import Dict, List

import numpy as np
import tornado
from adserver.protocols.request_handler import (
RequestHandler,
) # pylint: disable=no-name-in-module

def _create_np_from_v2(data: list,ty: str, shape: list) -> np.array:
npty = np.float
if ty == "BOOL":
npty = np.bool
elif ty == "UINT8":
npty = np.uint8
elif ty == "UINT16":
npty = np.uint16
elif ty == "UINT32":
npty = np.uint32
elif ty == "UINT64":
npty = np.uint64
elif ty == "INT8":
npty = np.int8
elif ty == "INT16":
npty = np.int16
elif ty == "INT32":
npty = np.int32
elif ty == "INT64":
npty = np.int64
elif ty == "FP16":
npty = np.float32
elif ty == "FP32":
npty = np.float32
elif ty == "FP64":
npty = np.float64
else:
raise ValueError(f"V2 unknown type or type that can't be coerced {ty}")

arr = np.array(data, dtype=npty)
arr.shape = tuple(shape)
return arr


class V2RequestHandler(RequestHandler):
def __init__(self, request: Dict): # pylint: disable=useless-super-delegation
super().__init__(request)

def validate(self):
if not "inputs" in self.request:
raise tornado.web.HTTPError(
status_code=HTTPStatus.BAD_REQUEST,
reason='Expected key "data" in request body',
)
# assumes single input
inputs = self.request["inputs"][0]
data_type = inputs["datatype"]

if data_type == "BYTES":
raise tornado.web.HTTPError(
status_code=HTTPStatus.BAD_REQUEST,
reason='v2 protocol BYTES data can not be presently handled"',
)

def extract_request(self) -> List:
inputs = self.request["inputs"][0]
data_type = inputs["datatype"]
shape = inputs["shape"]
data = inputs["data"]
arr = _create_np_from_v2(data, data_type, shape)
return arr.tolist()
3 changes: 3 additions & 0 deletions components/alibi-detect-server/adserver/server.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@
from adserver.protocols.seldon_http import SeldonRequestHandler
from adserver.protocols.seldonfeedback_http import SeldonFeedbackRequestHandler
from adserver.protocols.tensorflow_http import TensorflowRequestHandler
from adserver.protocols.v2 import V2RequestHandler
from cloudevents.sdk import converters
from cloudevents.sdk import marshaller
from cloudevents.sdk.event import v1
Expand Down Expand Up @@ -143,6 +144,8 @@ def get_request_handler(protocol, request: Dict) -> RequestHandler:
return SeldonRequestHandler(request)
elif protocol == Protocol.seldonfeedback_http:
return SeldonFeedbackRequestHandler(request)
elif protocol == Protocol.v2_http:
return V2RequestHandler(request)


def sendCloudEvent(event: v1.Event, url: str):
Expand Down
43 changes: 42 additions & 1 deletion components/alibi-detect-server/adserver/tests/test_server.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ def process_event(self, inputs: List, headers: Dict) -> Dict:
return DummyModel.getResponse()


class TestModel(AsyncHTTPTestCase):
class TestSeldonHttpModel(AsyncHTTPTestCase):
def setupEnv(self):
self.replyUrl = "http://reply-location"
self.eventSource = "x.y.z"
Expand Down Expand Up @@ -76,6 +76,47 @@ def test_basic(self):
self.assertEqual(headers["ce-type"], self.eventType)


class TestV2HttpModel(AsyncHTTPTestCase):
def setupEnv(self):
self.replyUrl = "http://reply-location"
self.eventSource = "x.y.z"
self.eventType = "a.b.c"

def get_app(self):
self.setupEnv()
server = CEServer(
Protocol.v2_http, self.eventType, self.eventSource, 9000, self.replyUrl
)
model = DummyModel("name")
server.register_model(model)
return server.create_application()

def test_basic(self):
data = {"inputs": [{"name":"input_1",
"datatype": "FP32",
"shape": [1, 3],
"data": [1, 2, 3]}]}
dataStr = json.dumps(data)
with requests_mock.Mocker() as m:
m.post(self.replyUrl, text="resp")

response = self.fetch(
"/",
method="POST",
body=dataStr,
headers={customHeaderKey: customHeaderVal, "ce-source":"a.b.c","ce-type":"d.e.f","ce-id":"1234","ce-specversion":"1.0"},
)
self.assertEqual(response.code, 200)
expectedResponse = json.dumps(DummyModel.getResponse())
self.assertEqual(response.body.decode("utf-8"), expectedResponse)
self.assertEqual(
m.request_history[0].json(), json.dumps(DummyModel.getResponse())
)
headers: Dict = m.request_history[0]._request.headers
self.assertEqual(headers["ce-source"], self.eventSource)
self.assertEqual(headers["ce-type"], self.eventType)


class TestModelNoResponse(AsyncHTTPTestCase):
def setupEnv(self):
self.replyUrl = "http://reply-location"
Expand Down
Loading

0 comments on commit 0e9eee5

Please sign in to comment.