Skip to content

Commit

Permalink
- detector name validation + samples
Browse files Browse the repository at this point in the history
  • Loading branch information
avarone committed May 5, 2021
1 parent 2444dfe commit f8324a8
Show file tree
Hide file tree
Showing 6 changed files with 190 additions and 143 deletions.
4 changes: 4 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,10 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0

### Fixed

## [2.3.0] - 2021-05-05

* Disabled mandatory validation for detectors' names to allow for [Writeprint detection](https://docs.expert.ai/nlapi/latest/guide/detectors/#writeprint-detector)

## [2.2.0] - 2021-03-15

* Added support for [information detection](https://docs.expert.ai/nlapi/latest/guide/detection)
Expand Down
108 changes: 54 additions & 54 deletions expertai/nlapi/cloud/validate.py
Original file line number Diff line number Diff line change
@@ -1,54 +1,54 @@
# Copyright (c) 2020 original authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an \"AS IS\" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

from expertai.nlapi.common import constants
from expertai.nlapi.common.errors import ParameterError


class ExpertAiValidation:
"""
To be consistent, every new method added to verify a value should be
name according this pattern: [value_name]_value_is_correct
"""

def language_value_is_correct(self, language):
return language in constants.LANGUAGES.keys()
def detector_value_is_correct(self, detector):
return detector in constants.DETECTORS.keys()

def resource_value_is_correct(self, resource):
return resource in constants.RESOURCES_NAMES

def context_value_is_correct(self, context):
return True

def taxonomy_value_is_correct(self, taxonomy):
return True

def check_name(self, param_name):
if param_name not in constants.PARAMETER_NAMES:
raise ParameterError("{} - invalid name".format(param_name))

def check_value(self, param_name, value):
method_name = "{}_value_is_correct".format(param_name)
method = getattr(self, method_name)
if not method(**{param_name: value}):
raise ParameterError(
"{} - invalid value: {}".format(param_name, value)
)

def check_parameters(self, params):
for p_name, p_value in params.items():
self.check_name(p_name)
self.check_value(p_name, p_value)
# Copyright (c) 2020 original authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an \"AS IS\" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

from expertai.nlapi.common import constants
from expertai.nlapi.common.errors import ParameterError


class ExpertAiValidation:
"""
To be consistent, every new method added to verify a value should be
name according this pattern: [value_name]_value_is_correct
"""

def language_value_is_correct(self, language):
return language in constants.LANGUAGES.keys()
def detector_value_is_correct(self, detector):
return True;

def resource_value_is_correct(self, resource):
return resource in constants.RESOURCES_NAMES

def context_value_is_correct(self, context):
return True

def taxonomy_value_is_correct(self, taxonomy):
return True

def check_name(self, param_name):
if param_name not in constants.PARAMETER_NAMES:
raise ParameterError("{} - invalid name".format(param_name))

def check_value(self, param_name, value):
method_name = "{}_value_is_correct".format(param_name)
method = getattr(self, method_name)
if not method(**{param_name: value}):
raise ParameterError(
"{} - invalid value: {}".format(param_name, value)
)

def check_parameters(self, params):
for p_name, p_value in params.items():
self.check_name(p_name)
self.check_value(p_name, p_value)
175 changes: 86 additions & 89 deletions expertai/nlapi/common/constants.py
Original file line number Diff line number Diff line change
@@ -1,89 +1,86 @@
# Copyright (c) 2020 original authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an \"AS IS\" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

BASE_API_URL = "https://nlapi.expert.ai/v2"
BASE_EDGE_URL = "https://edgeapi.expert.ai"

OAUTH2_TOKEN_URL = "https://developer.expert.ai/oauth2/token"

USERNAME_ENV_VARIABLE = "EAI_USERNAME"
PASSWORD_ENV_VARIABLE = "EAI_PASSWORD"
TOKEN_ENV_VARIABLE = "AUTH_TOKEN"
AUTH_HEADER_KEY = "Authorization"
AUTH_HEADER_VALUE = "Bearer {}"

TK_TIMESTAMP_FILENAME = ".timestamp"

# No leading slash
FULL_ANALYSIS_PATH = "analyze/{context}/{language}"
ANALYSIS_PATH = "analyze/{context}/{language}/{resource}"
CLASSIFICATION_PATH = "categorize/{taxonomy}/{language}"
EXECUTION_KEY_PATH = "edge/key/{footprint}"
DETECTION_PATH = "detect/{detector}/{language}"

CONTEXTS_PATH = "contexts"
DETECTORS_PATH = "detectors"
TAXONOMIES_PATH = "taxonomies"
TAXONOMY_PATH = "taxonomies/{taxonomy}/{language}"

CONTENT_TYPE_HEADER = {"Content-Type": "application/json"}

URLS_AND_METHODS = (
(FULL_ANALYSIS_PATH, "POST"),
(ANALYSIS_PATH, "POST"),
(CLASSIFICATION_PATH, "POST"),
(CONTEXTS_PATH, "GET"),
(TAXONOMIES_PATH, "GET"),
(TAXONOMY_PATH, "GET"),
(EXECUTION_KEY_PATH, "GET"),
(DETECTION_PATH, "POST"),
(DETECTORS_PATH, "GET"),
)

HTTP_GET = "GET"
HTTP_SUCCESSFUL = 200
HTTP_BAD_REQUEST = 400
HTTP_UNAUTHORIZED = 401
HTTP_FORBIDDEN = 403
HTTP_NOT_FOUND = 404
HTTP_INTERNAL_SERVER_ERROR = 500

# Strings used when print out the status of a EaiResponse
HTTP_ERRORS = {
HTTP_UNAUTHORIZED: "UNAUTHORIZED",
HTTP_FORBIDDEN: "FORBIDDEN",
HTTP_NOT_FOUND: "NOT FOUND",
HTTP_INTERNAL_SERVER_ERROR: "INTERNAL SERVER ERROR",
}

UNKNOWN = "UNKNOWN_STATUS"
SUCCESSFUL = "SUCCESSFUL"
BAD_REQUEST = "BAD REQUEST"

PARAMETER_NAMES = ["language", "resource", "context", "taxonomy", "footprint","detector"]

LANGUAGES = {
"de": "German",
"en": "English",
"es": "Spanish",
"fr": "French",
"it": "Italian",
}
DETECTORS = {
"pii": "Protected Information"
}

RESOURCES_NAMES = ["disambiguation", "relevants", "entities", "relations", "sentiment"]

RESPONSE_KEYS_TO_IGNORE = ["language", "version", "content"]
# Copyright (c) 2020 original authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an \"AS IS\" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

BASE_API_URL = "https://nlapi.expert.ai/v2"
BASE_EDGE_URL = "https://edgeapi.expert.ai"

OAUTH2_TOKEN_URL = "https://developer.expert.ai/oauth2/token"

USERNAME_ENV_VARIABLE = "EAI_USERNAME"
PASSWORD_ENV_VARIABLE = "EAI_PASSWORD"
TOKEN_ENV_VARIABLE = "AUTH_TOKEN"
AUTH_HEADER_KEY = "Authorization"
AUTH_HEADER_VALUE = "Bearer {}"

TK_TIMESTAMP_FILENAME = ".timestamp"

# No leading slash
FULL_ANALYSIS_PATH = "analyze/{context}/{language}"
ANALYSIS_PATH = "analyze/{context}/{language}/{resource}"
CLASSIFICATION_PATH = "categorize/{taxonomy}/{language}"
EXECUTION_KEY_PATH = "edge/key/{footprint}"
DETECTION_PATH = "detect/{detector}/{language}"

CONTEXTS_PATH = "contexts"
DETECTORS_PATH = "detectors"
TAXONOMIES_PATH = "taxonomies"
TAXONOMY_PATH = "taxonomies/{taxonomy}/{language}"

CONTENT_TYPE_HEADER = {"Content-Type": "application/json"}

URLS_AND_METHODS = (
(FULL_ANALYSIS_PATH, "POST"),
(ANALYSIS_PATH, "POST"),
(CLASSIFICATION_PATH, "POST"),
(CONTEXTS_PATH, "GET"),
(TAXONOMIES_PATH, "GET"),
(TAXONOMY_PATH, "GET"),
(EXECUTION_KEY_PATH, "GET"),
(DETECTION_PATH, "POST"),
(DETECTORS_PATH, "GET"),
)

HTTP_GET = "GET"
HTTP_SUCCESSFUL = 200
HTTP_BAD_REQUEST = 400
HTTP_UNAUTHORIZED = 401
HTTP_FORBIDDEN = 403
HTTP_NOT_FOUND = 404
HTTP_INTERNAL_SERVER_ERROR = 500

# Strings used when print out the status of a EaiResponse
HTTP_ERRORS = {
HTTP_UNAUTHORIZED: "UNAUTHORIZED",
HTTP_FORBIDDEN: "FORBIDDEN",
HTTP_NOT_FOUND: "NOT FOUND",
HTTP_INTERNAL_SERVER_ERROR: "INTERNAL SERVER ERROR",
}

UNKNOWN = "UNKNOWN_STATUS"
SUCCESSFUL = "SUCCESSFUL"
BAD_REQUEST = "BAD REQUEST"

PARAMETER_NAMES = ["language", "resource", "context", "taxonomy", "footprint","detector"]

LANGUAGES = {
"de": "German",
"en": "English",
"es": "Spanish",
"fr": "French",
"it": "Italian",
}

RESOURCES_NAMES = ["disambiguation", "relevants", "entities", "relations", "sentiment"]

RESPONSE_KEYS_TO_IGNORE = ["language", "version", "content"]
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
# Demonstrates the IPTC Media Topics document classification capability of the (Cloud based) expert.ai Natural Language API

from expertai.nlapi.cloud.client import ExpertAiClient
client = ExpertAiClient()

text = "Michael Jordan was one of the best basketball players of all time. Scoring was Jordan's stand-out skill, but he still holds a defensive NBA record, with eight steals in a half."
taxonomy = 'iptc'
language= 'en'

output = client.classification(body={"document": {"text": text}}, params={'taxonomy': taxonomy, 'language': language})

print("Tab separated list of categories:")

for category in output.categories:
print(category.id_, category.hierarchy, sep="\t")
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
# Demonstrates the IPTC Media Topics document classification capability of the (Cloud based) expert.ai Natural Language API

from expertai.nlapi.cloud.client import ExpertAiClient
client = ExpertAiClient()

text = "I experience a mix of conflicting emotions: the approach of the fateful date scares me, but at the same time I can't wait for it to arrive. I have moments of elation and others of pure panic, but I would say that I am mostly happy."
taxonomy = 'emotional-traits'
language= 'en'

output = client.classification(body={"document": {"text": text}}, params={'taxonomy': taxonomy, 'language': language})

print("Tab separated list of categories:")

for category in output.categories:
print(category.id_, category.hierarchy, sep="\t")
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
# Demonstrates the Writeprint detection capability of the expert.ai (Cloud based) Natural Language API

import json
from expertai.nlapi.cloud.client import ExpertAiClient
client = ExpertAiClient()

text = "My dear Bagginses and Boffins, Tooks and Brandybucks, Grubbs, Chubbs, Hornblowers, Bolgers, Bracegirdles and Proudfoots. Today is my one hundred and eleventh birthday! Alas, eleventy-one years is far too short a time to live among such excellent and admirable hobbits. I don't know half of you half as well as I should like, and I like less than half of you half as well as you deserve."

detector = 'writeprint'
language= 'en'

output = client.detection(body={"document": {"text": text}}, params={'detector': detector, 'language': language})

# Output extra data containing the JSON-LD object

print("extra_data: ",json.dumps(output.extra_data, indent=4, sort_keys=True))

0 comments on commit f8324a8

Please sign in to comment.