Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Auto formatting with python/black #157

Merged
merged 11 commits into from Jun 6, 2019

fixes

  • Loading branch information...
parano committed Jun 6, 2019
commit 0fa3a60f7a52fc86a17c387f9d9957a2dabd3458
@@ -16,6 +16,7 @@
from __future__ import division
from __future__ import print_function


BENTO_MODEL_SETUP_PY_TEMPLATE = """\
import os
import pip
@@ -95,7 +96,7 @@ def _parse_requirements(file_path):
# Run Gunicorn server with path to module.
CMD ["bentoml serve-gunicorn /bento"]
"""
""" # noqa: E501

BENTO_SERVICE_DOCKERFILE_SAGEMAKER_TEMPLATE = """\
FROM continuumio/miniconda3
@@ -125,7 +126,7 @@ def _parse_requirements(file_path):
RUN if [ -f /opt/program/setup.sh ]; then /bin/bash -c /opt/program/setup.sh; fi
ENV PATH="/opt/program:${PATH}"
"""
""" # noqa: E501


INIT_PY_TEMPLATE = """\
@@ -123,8 +123,8 @@ def save(self, dst):

@classmethod
def load(cls, path, artifacts_spec):
"""
bulk operation for loading all artifacts from path based on a list of ArtifactSpec
"""bulk operation for loading all artifacts from path based on a list of
ArtifactSpec
"""
load_path = os.path.join(path, ARTIFACTS_SUBPATH)
artifacts = cls()
@@ -122,7 +122,8 @@ def serve(port, archive_path=installed_archive_path):
server = BentoAPIServer(model_service, port=port)
server.start()

# Example Usage: bentoml serve-gunicorn ./SAVED_ARCHIVE_PATH --port=PORT --workers=WORKERS
# Example Usage:
# bentoml serve-gunicorn ./SAVED_ARCHIVE_PATH --port=PORT --workers=WORKERS
@bentoml_cli.command()
@conditional_argument(
installed_archive_path is None, "archive-path", type=click.STRING
@@ -147,7 +148,7 @@ def serve_gunicorn(port, workers, archive_path=installed_archive_path):
def cli():
_cli = create_bentoml_cli()

# Commands created here aren't mean to be used from generated service archive. They
# Commands created here aren't mean to be used from generated service archive. They
# are used as part of BentoML cli commands only.

# pylint: disable=unused-variable
@@ -265,8 +266,8 @@ def check_deployment_status(archive_path, platform, region, stage, api_name):
deployment = SagemakerDeployment(archive_path, api_name, region)
else:
raise BentoMLException(
"check deployment status with --platform=%s" % platform
+ "is not supported in the current version of BentoML"
"check deployment status with --platform=%s is not supported in the "
"current version of BentoML" % platform
)

deployment.check_status()
@@ -101,7 +101,8 @@ def get_arn_role_from_current_user():
arn = role["Arn"]
if arn is None:
raise ValueError(
"Can't find proper Arn role for Sagemaker, please create one and try again"
"Can't find proper Arn role for Sagemaker, please create one and try "
"again"
)
return arn
elif type_role[0] == "role":
@@ -192,7 +193,8 @@ def __init__(
self.api = apis[0]
else:
raise BentoMLException(
"Please specify api-name, when more than one API is present in the archive"
"Please specify api-name, when more than one API is present in the "
"archive"
)
self.sagemaker_client = boto3.client("sagemaker", region_name=self.region)
self.model_name = generate_aws_compatible_string(
@@ -229,7 +231,7 @@ def deploy(self):
with open(os.path.join(snapshot_path, "serve"), "w") as f:
f.write(DEFAULT_SERVE_SCRIPT)

# We want to give serve '755' permission. Since chmod take octal number, 755 => 493
# permission 755 is required for entry script 'serve'
permission = "755"
octal_permission = int(permission, 8)
os.chmod(os.path.join(snapshot_path, "serve"), octal_permission)
@@ -316,8 +318,8 @@ def delete(self):
logger.info("AWS delete endpoint response: %s", delete_endpoint_response)
if delete_endpoint_response["ResponseMetadata"]["HTTPStatusCode"] == 200:
# We will also try to delete both model and endpoint configuration for user.
# Since they are not critical, even they failed, we will still count delete deployment
# a success
# Since they are not critical, even they failed, we will still count delete
# deployment a success action
delete_model_response = self.sagemaker_client.delete_model(
ModelName=self.model_name
)
@@ -327,7 +329,7 @@ def delete(self):
"Encounter error when deleting model: %s", delete_model_response
)

delete_endpoint_config_response = self.sagemaker_client.delete_endpoint_config(
delete_endpoint_config_response = self.sagemaker_client.delete_endpoint_config( # noqa: E501
EndpointConfigName=self.endpoint_config_name
)
logger.info(
@@ -130,4 +130,4 @@ def _serve():
if __name__ == '__main__':
_serve()
"""
""" # noqa: E501
@@ -66,9 +66,8 @@ def handle_request(self, request, func):
else:
return make_response(
jsonify(
message="Request content-type not supported, "
"only application/json and text/csv are "
"supported"
message="Request content-type not supported, only application/json "
"and text/csv are supported"
),
400,
)
@@ -103,16 +102,17 @@ def handle_cli(self, args, func):
df = pd.read_json(cli_input, orient=orient, typ=self.typ, dtype=False)
else:
raise ValueError(
"Input file format not supported, BentoML cli only accepts .json and .csv file"
"Input file format not supported, BentoML cli only accepts .json "
"and .csv file"
)
else:
# Assuming input string is JSON format
try:
df = pd.read_json(cli_input, orient=orient, typ=self.typ, dtype=False)
except ValueError as e:
raise ValueError(
"Unexpected input format, BentoML DataframeHandler expects json string as"
"input: {}".format(e)
"Unexpected input format, BentoML DataframeHandler expects json "
"string as input: {}".format(e)
)

if self.typ == "frame" and self.input_columns is not None:
@@ -133,8 +133,8 @@ def handle_aws_lambda_event(self, event, func):
else:
return {
"statusCode": 400,
"body": "Request content-type not supported, only application/json and text/csv"
" are supported",
"body": "Request content-type not supported, only application/json and "
"text/csv are supported",
}

if self.typ == "frame" and self.input_columns is not None:
@@ -121,9 +121,8 @@ def handle_aws_lambda_event(self, event, func):
image = cv2.imdecode(nparr, cv2.IMREAD_COLOR)
else:
raise BentoMLException(
"BentoML currently doesn't support Content-Type: {content_type} for AWS Lambda".format(
content_type=event["headers"]["Content-Type"]
)
"BentoML currently doesn't support Content-Type: {content_type} for "
"AWS Lambda".format(content_type=event["headers"]["Content-Type"])
)

result = func(image)
@@ -49,13 +49,13 @@ def index_view_func(bento_service):
# TODO: Generate a html page for user and swagger definitions
endpoints = {
"/feedback": {
"description": "Predictions feedback endpoint. Expecting feedback request in JSON"
"format and must contain a `request_id` field, which can be obtained from "
"any BentoService API response header"
"description": "Predictions feedback endpoint. Expecting feedback request "
"in JSON format and must contain a `request_id` field, which can be "
"obtained from any BentoService API response header"
},
"/healthz": {
"description": "Health check endpoint. Expecting an empty response with status code "
"200 when the service is in health state"
"description": "Health check endpoint. Expecting an empty response with"
"status code 200 when the service is in health state"
},
"/metrics": {"description": "Prometheus metrics endpoint"},
}
@@ -39,7 +39,7 @@ def get_prediction_logger():

# prediction.log json fields - request / result / time
formatter = jsonlogger.JsonFormatter(
"(service_name) (service_version) (api_name) (request_id) (request) (response) (asctime)"
"(service_name) (service_version) (api_name) (request_id) (request) (response) (asctime)" # noqa: E501
)

prediction_logger = logging.getLogger(PREDICTION_LOGGER_NAME)
@@ -47,7 +47,7 @@ def upload_to_s3(s3_url, file_path):
for root, _, files in os.walk(file_path):
for file_name in files:
abs_file_path = os.path.join(root, file_name)
relative_file_path = abs_file_path[(len(file_path) + 1):]
relative_file_path = abs_file_path[(len(file_path) + 1) :]
s3_path = os.path.join(base_path, relative_file_path)
s3_client.upload_file(Filename=abs_file_path, Bucket=bucket, Key=s3_path)

@@ -65,7 +65,7 @@ def download_from_s3(s3_url, file_path):
result_content = list_object_result["Contents"]

for content in result_content:
relative_file_path = content["Key"][(len(base_path) + 1):]
relative_file_path = content["Key"][(len(base_path) + 1) :]
local_file_path = os.path.join(file_path, relative_file_path)
Path(os.path.dirname(local_file_path)).mkdir(parents=True, exist_ok=True)
s3_client.download_file(
@@ -1,14 +1,11 @@
import os
import sys

import pytest
import pandas as pd
import numpy as np

from bentoml.handlers.dataframe_handler import (
DataframeHandler,
check_dataframe_column_contains,
) # noqa: E402
)


def test_dataframe_handle_cli(capsys, tmpdir):
@@ -1,9 +1,6 @@
import os
import sys

from bentoml import BentoService, api, artifacts # noqa: E402
from bentoml.artifact import PickleArtifact # noqa: E402
from bentoml.handlers import ImageHandler # noqa: E402
from bentoml import BentoService, api, artifacts
from bentoml.artifact import PickleArtifact
from bentoml.handlers import ImageHandler


class TestImageModel(object):
@@ -1,8 +1,4 @@
import os
import sys

from bentoml import BentoService, api, artifacts # noqa: E402
from bentoml.handlers import JsonHandler # noqa: E402
from bentoml.handlers import JsonHandler


def test_json_handle_cli(capsys, tmpdir):
@@ -11,8 +7,6 @@ def test_func(obj):

handler = JsonHandler()

import json

json_file = tmpdir.join("test.json")
with open(str(json_file), "w") as f:
f.write('[{"name": "john","game": "mario","city": "sf"}]')
@@ -1,8 +1,6 @@
import os
import json
import sys

from bentoml.server import BentoAPIServer # noqa: E402
from bentoml.server import BentoAPIServer


def test_api_function_route(bento_service):
ProTip! Use n and p to navigate between commits in a pull request.
You can’t perform that action at this time.