diff --git a/bothub/api/v2/routers.py b/bothub/api/v2/routers.py index 3c53b33fe..bf1b756e3 100644 --- a/bothub/api/v2/routers.py +++ b/bothub/api/v2/routers.py @@ -18,13 +18,13 @@ from .nlp.views import RepositoryAuthorizationEvaluateViewSet from .nlp.views import RepositoryAuthorizationInfoViewSet from .nlp.views import RepositoryAuthorizationParseViewSet -from .nlp.views import RepositoryAuthorizationTrainLanguagesViewSet from .nlp.views import ( RepositoryAuthorizationTrainViewSet, RepositoryNLPLogsViewSet, RepositoryAuthorizationKnowledgeBaseViewSet, RepositoryAuthorizationExamplesViewSet, RepositoryAuthorizationAutomaticEvaluateViewSet, + RepositoryAuthorizationTrainLanguagesViewSet, ) from .nlp.views import RepositoryUpdateInterpretersViewSet from .organization.views import ( diff --git a/bothub/authentication/migrations/0009_auto_20210506_1453.py b/bothub/authentication/migrations/0009_auto_20210506_1453.py index 9c79bd5e1..87bd9fbf6 100644 --- a/bothub/authentication/migrations/0009_auto_20210506_1453.py +++ b/bothub/authentication/migrations/0009_auto_20210506_1453.py @@ -5,14 +5,17 @@ class Migration(migrations.Migration): - dependencies = [ - ('authentication', '0008_user_language'), - ] + dependencies = [("authentication", "0008_user_language")] operations = [ migrations.AlterField( - model_name='user', - name='language', - field=models.CharField(blank=True, choices=[('en-us', 'English'), ('pt-br', 'Brazilian Portuguese')], max_length=5, null=True), - ), + model_name="user", + name="language", + field=models.CharField( + blank=True, + choices=[("en-us", "English"), ("pt-br", "Brazilian Portuguese")], + max_length=5, + null=True, + ), + ) ] diff --git a/bothub/common/models.py b/bothub/common/models.py index 5d9422451..8d36fad19 100644 --- a/bothub/common/models.py +++ b/bothub/common/models.py @@ -470,10 +470,7 @@ def request_nlp_analyze(self, user_authorization, data): def request_nlp_debug_parse(self, user_authorization, data): try: # pragma: no cover - payload = { - "text": data.get("text"), - "language": data.get("language"), - } + payload = {"text": data.get("text"), "language": data.get("language")} repository_version = data.get("repository_version") @@ -483,9 +480,7 @@ def request_nlp_debug_parse(self, user_authorization, data): r = requests.post( # pragma: no cover "{}v2/debug_parse/".format(self.nlp_base_url), json=payload, - headers={ - "Authorization": "Bearer {}".format(user_authorization.uuid) - } + headers={"Authorization": "Bearer {}".format(user_authorization.uuid)}, ) return r # pragma: no cover @@ -497,9 +492,7 @@ def request_nlp_debug_parse(self, user_authorization, data): def request_nlp_words_distribution(self, user_authorization, data): try: # pragma: no cover - payload = { - "language": data.get("language"), - } + payload = {"language": data.get("language")} repository_version = data.get("repository_version") @@ -509,9 +502,7 @@ def request_nlp_words_distribution(self, user_authorization, data): r = requests.post( # pragma: no cover "{}v2/words_distribution/".format(self.nlp_base_url), json=payload, - headers={ - "Authorization": "Bearer {}".format(user_authorization.uuid) - }, + headers={"Authorization": "Bearer {}".format(user_authorization.uuid)}, ) return r # pragma: no cover @@ -533,9 +524,7 @@ def request_nlp_manual_evaluate(self, user_authorization, data): r = requests.post( "{}v2/evaluate/".format(self.nlp_base_url), json=payload, - headers={ - "Authorization": f"Bearer {user_authorization.uuid}" - }, + headers={"Authorization": f"Bearer {user_authorization.uuid}"}, ) return r # pragma: no cover @@ -557,9 +546,7 @@ def request_nlp_automatic_evaluate(self, user_authorization, data): r = requests.post( "{}v2/evaluate/".format(self.nlp_base_url), json=payload, - headers={ - "Authorization": f"Bearer {user_authorization.uuid}" - }, + headers={"Authorization": f"Bearer {user_authorization.uuid}"}, ) return r # pragma: no cover