Skip to content
Permalink
Browse files

Delete bento services after complete e2e tests (#503)

* delete bentoservices after e2e tests

* update base on comment

* refactor e2e
  • Loading branch information
yubozhao committed Feb 4, 2020
1 parent c2c2dc6 commit d82c2f1e368aaec43103733d449b7493cc2cf4ac
@@ -25,10 +25,11 @@ include bentoml/alembic.ini
# include ".conf" file
include bentoml/deployment/sagemaker/sagemaker_nginx.conf

# Don't include examples, tests directory
# Don't include examples, tests, docs and scripts directory
prune examples
prune tests
prune docs
prune scripts

# Patterns to exclude from any directory
global-exclude *~
@@ -1,18 +1,32 @@
#!/usr/bin/env python

import subprocess
import logging
import uuid
import sys

import requests
import json
from sklearn import svm, datasets

from bentoml import BentoService, load, api, env, artifacts
from bentoml.artifact import PickleArtifact
from bentoml.handlers import DataframeHandler

try:
from scripts.e2e_tests.aws_lambda.utils import (
test_deployment_with_sample_data,
run_lambda_create_or_update_command,
)
except ModuleNotFoundError:
# Put dummy lambda here to stop deployment complains
run_lambda_create_or_update_command = lambda x: None # noqa
test_deployment_with_sample_data = lambda x, y, z: None # noqa

try:
from scripts.e2e_tests.cli_operations import delete_deployment, delete_bento
except ModuleNotFoundError:
# Put dummy lambda here to stop deployment complains
delete_deployment = lambda x, y: None # noqa
delete_bento = lambda x: None # noqa

logger = logging.getLogger('bentoml.test')

@@ -32,27 +46,24 @@ def predict(self, df):

args = sys.argv
bento_name = None
if len(args) > 1:
bento_name = args[1]
if bento_name is None:
logger.info('Training iris classifier with sklearn..')
clf = svm.SVC(gamma='scale')
iris = datasets.load_iris()
X, y = iris.data, iris.target
clf.fit(X, y)

logger.info('Creating iris classifier BentoService bundle..')
iris_clf_service = IrisClassifier()
iris_clf_service.pack('clf', clf)
saved_path = iris_clf_service.save()

loaded_service = load(saved_path)
sample_data = X[0:1]

logger.info(
'Result from sample data is: %s', str(loaded_service.predict(sample_data))
)
bento_name = f'{loaded_service.name}:{loaded_service.version}'
logger.info('Training iris classifier with sklearn..')
clf = svm.SVC(gamma='scale')
iris = datasets.load_iris()
X, y = iris.data, iris.target
clf.fit(X, y)

logger.info('Creating iris classifier BentoService bundle..')
iris_clf_service = IrisClassifier()
iris_clf_service.pack('clf', clf)
saved_path = iris_clf_service.save()

loaded_service = load(saved_path)
sample_data = X[0:1]

logger.info(
'Result from sample data is: %s', str(loaded_service.predict(sample_data))
)
bento_name = f'{loaded_service.name}:{loaded_service.version}'
create_deployment_command = [
'bentoml',
'lambda',
@@ -64,61 +75,16 @@ def predict(self, df):
'us-west-2',
'--verbose',
]
logger.info(
f"Running bentoml deploy command: {' '.join(create_deployment_command)}"
deployment_failed, deployment_endpoint = run_lambda_create_or_update_command(
create_deployment_command
)
with subprocess.Popen(
create_deployment_command, stdout=subprocess.PIPE, stderr=subprocess.PIPE
) as proc:
create_deployment_stdout = proc.stdout.read().decode('utf-8')
logger.info('Finish deploying to AWS Lambda')
logger.info(create_deployment_stdout)
if create_deployment_stdout.startswith('Failed to create AWS Lambda deployment'):
deployment_failed = True
create_deployment_output_list = create_deployment_stdout.split('\n')
deployment_endpoint = ''
for index, message in enumerate(create_deployment_output_list):
if '"endpoints": [' in message:
deployment_endpoint = (
create_deployment_output_list[index + 1].strip().replace('"', '')
)

if not deployment_failed:
logger.info('Test deployment with sample request')
try:
request_result = requests.post(
deployment_endpoint,
data=json.dumps(sample_data.tolist()),
headers={'Content-Type': 'application/json'},
)
if request_result.status_code != 200:
deployment_failed = True
if request_result.content.decode('utf-8') != '[0]':
logger.info(
'Test request failed. {}:{}'.format(
request_result.status_code,
request_result.content.decode('utf-8'),
)
)
deployment_failed = True
except Exception as e:
logger.error(str(e))
deployment_failed = True

logger.info('Delete test deployment with BentoML CLI')
delete_deployment_command = [
'bentoml',
'lambda',
'delete',
deployment_name,
'--force',
]
logger.info(f'Delete command: {delete_deployment_command}')
with subprocess.Popen(
delete_deployment_command, stdout=subprocess.PIPE, stderr=subprocess.PIPE
) as proc:
delete_deployment_stdout = proc.stdout.read().decode('utf-8')
logger.info(delete_deployment_stdout)
if not deployment_failed and deployment_endpoint:
deployment_failed = test_deployment_with_sample_data(
deployment_endpoint, '[0]', json.dumps(sample_data.tolist())
)

delete_deployment('lambda', deployment_name)
delete_bento(bento_name)

logger.info('Finished')
if deployment_failed:
@@ -1,15 +1,30 @@
#!/usr/bin/env python

import subprocess
import logging
import uuid
import sys

import requests

from bentoml import BentoService, load, api
from bentoml.handlers import JsonHandler

try:
from scripts.e2e_tests.aws_lambda.utils import (
test_deployment_with_sample_data,
run_lambda_create_or_update_command,
)
except ModuleNotFoundError:
# Put dummy lambda here to stop deployment complains
run_lambda_create_or_update_command = lambda x: None # noqa
test_deployment_with_sample_data = lambda x, y, z: None # noqa

try:
from scripts.e2e_tests.cli_operations import delete_deployment, delete_bento
except ModuleNotFoundError:
# Put dummy lambda here to stop deployment complains
delete_deployment = lambda x, y: None # noqa
delete_bento = lambda x: None # noqa


logger = logging.getLogger('bentoml.test')

@@ -26,74 +41,6 @@ def predict(self, data):
return 'dog'


def run_lambda_create_or_update_command(deploy_command):
logger.info(f"Running bentoml deploy command: {' '.join(deploy_command)}")
deployment_failed = False
deployment_endpoint = ''

with subprocess.Popen(
deploy_command, stdout=subprocess.PIPE, stderr=subprocess.PIPE
) as proc:
deploy_command_stdout = proc.stdout.read().decode('utf-8')
logger.info('Finish deploying to AWS Lambda')
logger.info(deploy_command_stdout)
if deploy_command_stdout.startswith(
'Failed to create deployment'
) or deploy_command_stdout.startswith('Failed to update deployment'):
deployment_failed = True
deploy_command_stdout_list = deploy_command_stdout.split('\n')
for index, message in enumerate(deploy_command_stdout_list):
if '"endpoints": [' in message:
deployment_endpoint = (
deploy_command_stdout_list[index + 1].strip().replace('"', '')
)
return deployment_failed, deployment_endpoint


def test_deployment_with_sample_data(
deployment_endpoint, expect_result, sample_data=None
):
logger.info('Test deployment with sample request')
sample_data = sample_data or '"{}"'
deployment_failed = False
try:
request_result = requests.post(
deployment_endpoint,
data=sample_data,
headers={'Content-Type': 'application/json'},
)
if request_result.status_code != 200:
deployment_failed = True
if request_result.content.decode('utf-8') != expect_result:
logger.info(
'Test request failed. {}:{}'.format(
request_result.status_code, request_result.content.decode('utf-8'),
)
)
deployment_failed = True
except Exception as e:
logger.error(str(e))
deployment_failed = True
return deployment_failed


def delete_deployment(deployment_name):
logger.info('Delete test deployment with BentoML CLI')
delete_deployment_command = [
'bentoml',
'lambda',
'delete',
deployment_name,
'--force',
]
logger.info(f'Delete command: {delete_deployment_command}')
with subprocess.Popen(
delete_deployment_command, stdout=subprocess.PIPE, stderr=subprocess.PIPE
) as proc:
delete_deployment_stdout = proc.stdout.read().decode('utf-8')
logger.info(delete_deployment_stdout)


if __name__ == '__main__':
deployment_failed = False
random_hash = uuid.uuid4().hex[:6]
@@ -149,7 +96,10 @@ def delete_deployment(deployment_name):
deployment_failed = True
logger.debug('Update Lambda failed')

delete_deployment(deployment_name)
delete_deployment('lambda', deployment_name)
delete_bento(bento_name)
if updated_bento_name:
delete_bento(updated_bento_name)

logger.info('Finished')
if deployment_failed:
@@ -0,0 +1,57 @@
import logging
import subprocess

import requests

logger = logging.getLogger('bentoml.test')


def test_deployment_with_sample_data(
deployment_endpoint, expect_result, sample_data=None
):
logger.info('Test deployment with sample request')
sample_data = sample_data or '"{}"'
deployment_failed = False
try:
request_result = requests.post(
deployment_endpoint,
data=sample_data,
headers={'Content-Type': 'application/json'},
)
if request_result.status_code != 200:
deployment_failed = True
if request_result.content.decode('utf-8') != expect_result:
logger.info(
'Test request failed. {}:{}'.format(
request_result.status_code, request_result.content.decode('utf-8'),
)
)
deployment_failed = True
except Exception as e:
logger.error(str(e))
deployment_failed = True
return deployment_failed


def run_lambda_create_or_update_command(deploy_command):
logger.info(f"Running bentoml deploy command: {' '.join(deploy_command)}")
deployment_failed = False
deployment_endpoint = ''

with subprocess.Popen(
deploy_command, stdout=subprocess.PIPE, stderr=subprocess.PIPE
) as proc:
deploy_command_stdout = proc.stdout.read().decode('utf-8')
logger.info('Finish deploying to AWS Lambda')
logger.info(deploy_command_stdout)
if deploy_command_stdout.startswith(
'Failed to create deployment'
) or deploy_command_stdout.startswith('Failed to update deployment'):
deployment_failed = True
deploy_command_stdout_list = deploy_command_stdout.split('\n')
for index, message in enumerate(deploy_command_stdout_list):
if '"endpoints": [' in message:
deployment_endpoint = (
deploy_command_stdout_list[index + 1].strip().replace('"', '')
)
return deployment_failed, deployment_endpoint

0 comments on commit d82c2f1

Please sign in to comment.
You can’t perform that action at this time.