diff --git a/.vscode/cspell.json b/.vscode/cspell.json index 0d9f61e1c1a4..5cb55d7e1faf 100644 --- a/.vscode/cspell.json +++ b/.vscode/cspell.json @@ -155,6 +155,7 @@ "sdk/ai/azure-ai-voicelive/samples/**" ], "words": [ + "qnas", "msedge", "spinup", "cibuildwheel", @@ -530,7 +531,7 @@ "logprobs", "pyaudio", "PyAudio", - "libasound", + "libasound", "ingraph" ], "overrides": [ diff --git a/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/CHANGELOG.md b/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/CHANGELOG.md new file mode 100644 index 000000000000..46e84408c8ac --- /dev/null +++ b/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/CHANGELOG.md @@ -0,0 +1,10 @@ +# Release History + +## 1.0.0b1 + +### Features Added +- Initial preview release of `azure-ai-language-questionanswering-authoring` separated from the combined `azure-ai-language-questionanswering` package. +- Supports project listing, creation, update, deletion, import/export, deployments, synonym/source/QnA management operations aligned with the TypeSpec service definition (includes preview API version 2025-05-15-preview where applicable). + +### Other Changes +- Generated from TypeSpec definitions in `specification\cognitiveservices\data-plane\LanguageQuestionAnsweringAuthoring`. \ No newline at end of file diff --git a/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/LICENSE b/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/LICENSE new file mode 100644 index 000000000000..63447fd8bbbf --- /dev/null +++ b/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/LICENSE @@ -0,0 +1,21 @@ +Copyright (c) Microsoft Corporation. + +MIT License + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. \ No newline at end of file diff --git a/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/MANIFEST.in b/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/MANIFEST.in new file mode 100644 index 000000000000..15961389c84e --- /dev/null +++ b/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/MANIFEST.in @@ -0,0 +1,9 @@ +include *.md +include LICENSE +include azure/ai/language/questionanswering/authoring/py.typed +recursive-include tests *.py +recursive-include samples *.py *.md +include azure/__init__.py +include azure/ai/__init__.py +include azure/ai/language/__init__.py +include azure/ai/language/questionanswering/__init__.py diff --git a/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/README.md b/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/README.md new file mode 100644 index 000000000000..6be80abab620 --- /dev/null +++ b/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/README.md @@ -0,0 +1,213 @@ +# Azure AI Language Question Answering Authoring client library for Python + +The `azure-ai-language-questionanswering-authoring` package provides **authoring / management capabilities** for Azure AI Language Question Answering: create and configure projects, add knowledge sources, manage QnA pairs and synonyms, and deploy versions. Runtime (query) operations live in the separate `azure-ai-language-questionanswering` package. + +> NOTE: This is a preview (`1.0.0b1`) targeting a preview service API version (`2025-05-15-preview`). APIs, models, and LRO result payloads may change before GA. + +[Product documentation][product_docs] + +## Getting started + +### Prerequisites + +- Python 3.9+ (preview requires 3.9 or later) +- An Azure subscription +- An Azure AI Language resource with Question Answering enabled (custom subdomain endpoint recommended for AAD) + +### Install the package + +```bash +pip install --pre azure-ai-language-questionanswering-authoring +``` + +Optional (for Azure Active Directory auth): + +```bash +pip install azure-identity +``` + +### Authenticate the client + +You can authenticate with: + +1. Azure Active Directory via `DefaultAzureCredential` (recommended) +2. A resource key via `AzureKeyCredential` (quick start / local experimentation) + +AAD example: +```python +from azure.identity import DefaultAzureCredential +from azure.ai.language.questionanswering.authoring import QuestionAnsweringAuthoringClient + +endpoint = "https://.cognitiveservices.azure.com" +credential = DefaultAzureCredential() +client = QuestionAnsweringAuthoringClient(endpoint, credential) +``` + +Key credential example: +```python +from azure.core.credentials import AzureKeyCredential +from azure.ai.language.questionanswering.authoring import QuestionAnsweringAuthoringClient + +client = QuestionAnsweringAuthoringClient( + endpoint="https://.cognitiveservices.azure.com", + credential=AzureKeyCredential("") +) +``` + +## Key concepts + +- **Project**: A logical container for your knowledge sources, QnA pairs, synonyms, and deployments. +- **Knowledge Source**: A URL/file describing content from which QnA pairs can be extracted. +- **QnA Record**: A question and its answer plus metadata/alternative questions. +- **Synonyms**: Word alteration groups to normalize variations in user questions. +- **Deployment**: A named (e.g., `production`) deployed snapshot of your project used by runtime clients. +- **Long‑running operation (LRO)**: Certain operations (update sources/QnAs, import, export, deploy) return an `LROPoller`. In the current preview these resolve to `None`—treat `.result()` strictly as a completion signal. + +## Examples + +Below are minimal synchronous examples. More complete samples (including async equivalents) are in the samples directory. Environment variables used by samples: `AZURE_QUESTIONANSWERING_ENDPOINT`, `AZURE_QUESTIONANSWERING_KEY`. + +### Create a project +```python +metadata = { + "language": "en", + "description": "FAQ project", + "settings": {"defaultAnswer": "no answer"}, + "multilingualResource": True, +} +client.create_project(project_name="FAQ", body=metadata) +``` + +### List projects +```python +for proj in client.list_projects(): + print(proj.get("projectName"), proj.get("lastModifiedDateTime")) +``` + +### Add / update a knowledge source +```python +from azure.ai.language.questionanswering.authoring.models import UpdateSourceRecord,UpdateQnaSourceRecord + +poller = client.begin_update_sources( + project_name="FAQ", + body=[ + UpdateSourceRecord( + op="add", + value=UpdateQnaSourceRecord( + display_name="ContosoFAQ", + source="https://contoso.com/faq", + source_uri="https://contoso.com/faq", + source_kind="url", + content_structure_kind="unstructured", + refresh=False, + ), + ) + ], +) +poller.result() +``` + +### Add a QnA pair +```python +from azure.ai.language.questionanswering.authoring.models import UpdateQnaRecord,QnaRecord + +poller = client.begin_update_qnas( + project_name="FAQ", + body=[ + UpdateQnaRecord( + op="add", + value=QnaRecord( + id=1, + answer="Use the Azure SDKs.", + source="manual", + questions=["How do I use Azure services in .NET?"], + ), + ) + ], +) +poller.result() +``` + +### Set synonyms +```python +from azure.ai.language.questionanswering.authoring.models import SynonymAssets,WordAlterations + +client.update_synonyms( + project_name="FAQ", + body=SynonymAssets( + value=[ + WordAlterations(alterations=["qnamaker", "qna maker"]), + WordAlterations(alterations=["qna", "question and answer"]), + ] + ), +) +``` + +### Deploy +```python +client.begin_deploy_project(project_name="FAQ", deployment_name="production").result() +``` + +### Export / Import +```python +export_poller = client.begin_export(project_name="FAQ", format="json") +export_poller.result() # current preview returns None + +from azure.ai.language.questionanswering.authoring.models import ImportJobOptions,Assets,ImportQnaRecord +assets = ImportJobOptions( + assets=Assets( + qnas=[ + ImportQnaRecord( + id=1, + answer="Example answer", + source="https://contoso.com/faq", + questions=["Example question?"], + ) + ] + ) +) +client.begin_import_assets(project_name="FAQ", body=assets, format="json").result() +``` + +## Troubleshooting + +### Errors +Service errors raise `HttpResponseError` (or subclasses) from `azure-core`. Check the `.status_code` / `.message` for details. + +```python +from azure.core.exceptions import HttpResponseError + +try: + client.list_projects() +except HttpResponseError as e: + print("Request failed:", e.message) +``` + +### Logging +Enable basic logging: +```python +import logging +logging.basicConfig(level=logging.INFO) +``` +For request/response details set environment variable `AZURE_LOG_LEVEL=info` or pass `logging_enable=True` per operation. + +## Next steps + +- Explore the full samples +- Learn about Question Answering concepts in [product documentation][product_docs] + +## Contributing + +See [CONTRIBUTING.md][contributing] for instructions on building, testing, and contributing. + +This project welcomes contributions and suggestions. Most contributions require you to agree to a Contributor License Agreement (CLA) declaring that you have the right to, and actually do, grant us the rights to use your contribution. For details, visit . + +When you submit a pull request, a CLA-bot will automatically determine whether you need to provide a CLA and decorate the PR appropriately (e.g., label, comment). Simply follow the instructions provided by the bot. You will only need to do this once across all repos using our CLA. + +This project has adopted the [Microsoft Open Source Code of Conduct][code_of_conduct]. For more information see the [Code of Conduct FAQ][coc_faq] or contact with any additional questions or comments. + + +[product_docs]: https://learn.microsoft.com/azure/ai-services/language-service/question-answering/overview +[contributing]: https://github.com/Azure/azure-sdk-for-python/blob/main/CONTRIBUTING.md +[code_of_conduct]: https://opensource.microsoft.com/codeofconduct/ +[coc_faq]: https://opensource.microsoft.com/codeofconduct/faq/ diff --git a/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/_metadata.json b/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/_metadata.json new file mode 100644 index 000000000000..253921f335be --- /dev/null +++ b/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/_metadata.json @@ -0,0 +1,3 @@ +{ + "apiVersion": "2025-05-15-preview" +} \ No newline at end of file diff --git a/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/apiview-properties.json b/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/apiview-properties.json new file mode 100644 index 000000000000..697471d4bea1 --- /dev/null +++ b/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/apiview-properties.json @@ -0,0 +1,79 @@ +{ + "CrossLanguagePackageId": "Language.QuestionAnswering.Authoring", + "CrossLanguageDefinitionId": { + "azure.ai.language.questionanswering.authoring.models.ActiveLearningFeedback": "Language.QuestionAnswering.Authoring.ActiveLearningFeedback", + "azure.ai.language.questionanswering.authoring.models.Assets": "Language.QuestionAnswering.Authoring.Assets", + "azure.ai.language.questionanswering.authoring.models.Error": "Language.QuestionAnswering.Authoring.Error", + "azure.ai.language.questionanswering.authoring.models.ErrorResponse": "Language.QuestionAnswering.Authoring.ErrorResponse", + "azure.ai.language.questionanswering.authoring.models.FeedbackRecord": "Language.QuestionAnswering.Authoring.FeedbackRecord", + "azure.ai.language.questionanswering.authoring.models.ImportFiles": "Language.QuestionAnswering.Authoring.ImportFiles", + "azure.ai.language.questionanswering.authoring.models.ImportJobOptions": "Language.QuestionAnswering.Authoring.ImportJobOptions", + "azure.ai.language.questionanswering.authoring.models.ImportQnaRecord": "Language.QuestionAnswering.Authoring.ImportQnaRecord", + "azure.ai.language.questionanswering.authoring.models.InnerErrorModel": "Language.QuestionAnswering.Authoring.InnerErrorModel", + "azure.ai.language.questionanswering.authoring.models.ProjectDeployment": "Language.QuestionAnswering.Authoring.ProjectDeployment", + "azure.ai.language.questionanswering.authoring.models.ProjectSettings": "Language.QuestionAnswering.Authoring.ProjectSettings", + "azure.ai.language.questionanswering.authoring.models.QnaDialog": "Language.QuestionAnswering.Authoring.QnaDialog", + "azure.ai.language.questionanswering.authoring.models.QnaPrompt": "Language.QuestionAnswering.Authoring.QnaPrompt", + "azure.ai.language.questionanswering.authoring.models.QnaRecord": "Language.QuestionAnswering.Authoring.QnaRecord", + "azure.ai.language.questionanswering.authoring.models.QnaSourceRecord": "Language.QuestionAnswering.Authoring.QnaSourceRecord", + "azure.ai.language.questionanswering.authoring.models.QuestionAnsweringAuthoringExportJobState": "Language.QuestionAnswering.Authoring.QuestionAnsweringAuthoringExportJobState", + "azure.ai.language.questionanswering.authoring.models.QuestionAnsweringAuthoringImportJobState": "Language.QuestionAnswering.Authoring.QuestionAnsweringAuthoringImportJobState", + "azure.ai.language.questionanswering.authoring.models.QuestionAnsweringAuthoringProjectDeletionJobState": "Language.QuestionAnswering.Authoring.QuestionAnsweringAuthoringProjectDeletionJobState", + "azure.ai.language.questionanswering.authoring.models.QuestionAnsweringAuthoringProjectDeploymentJobState": "Language.QuestionAnswering.Authoring.QuestionAnsweringAuthoringProjectDeploymentJobState", + "azure.ai.language.questionanswering.authoring.models.QuestionAnsweringAuthoringUpdateQnasJobState": "Language.QuestionAnswering.Authoring.QuestionAnsweringAuthoringUpdateQnasJobState", + "azure.ai.language.questionanswering.authoring.models.QuestionAnsweringAuthoringUpdateSourcesJobState": "Language.QuestionAnswering.Authoring.QuestionAnsweringAuthoringUpdateSourcesJobState", + "azure.ai.language.questionanswering.authoring.models.QuestionAnsweringProject": "Language.QuestionAnswering.Authoring.QuestionAnsweringProjectMetadata", + "azure.ai.language.questionanswering.authoring.models.RetrieveQnaRecord": "Language.QuestionAnswering.Authoring.RetrieveQnaRecord", + "azure.ai.language.questionanswering.authoring.models.SuggestedQuestion": "Language.QuestionAnswering.Authoring.SuggestedQuestion", + "azure.ai.language.questionanswering.authoring.models.SuggestedQuestionsCluster": "Language.QuestionAnswering.Authoring.SuggestedQuestionsCluster", + "azure.ai.language.questionanswering.authoring.models.SynonymAssets": "Language.QuestionAnswering.Authoring.SynonymAssets", + "azure.ai.language.questionanswering.authoring.models.UpdateQnaRecord": "Language.QuestionAnswering.Authoring.UpdateQnaRecord", + "azure.ai.language.questionanswering.authoring.models.UpdateQnaSourceRecord": "Language.QuestionAnswering.Authoring.UpdateQnaSourceRecord", + "azure.ai.language.questionanswering.authoring.models.UpdateSourceFiles": "Language.QuestionAnswering.Authoring.UpdateSourceFiles", + "azure.ai.language.questionanswering.authoring.models.UpdateSourceFromFileOperationRecord": "Language.QuestionAnswering.Authoring.UpdateSourceFromFileOperationRecord", + "azure.ai.language.questionanswering.authoring.models.UpdateSourceRecord": "Language.QuestionAnswering.Authoring.UpdateSourceRecord", + "azure.ai.language.questionanswering.authoring.models.WordAlterations": "Language.QuestionAnswering.Authoring.WordAlterations", + "azure.ai.language.questionanswering.authoring.models.ErrorCode": "Language.QuestionAnswering.Authoring.ErrorCode", + "azure.ai.language.questionanswering.authoring.models.InnerErrorCode": "Language.QuestionAnswering.Authoring.InnerErrorCode", + "azure.ai.language.questionanswering.authoring.models.JobStatus": "Language.QuestionAnswering.Authoring.jobStatus", + "azure.ai.language.questionanswering.authoring.models.Format": "Language.QuestionAnswering.Authoring.format", + "azure.ai.language.questionanswering.authoring.models.AssetKind": "Language.QuestionAnswering.Authoring.AssestKind", + "azure.ai.language.questionanswering.authoring.models.SourceKind": "Language.QuestionAnswering.Authoring.sourceKind", + "azure.ai.language.questionanswering.authoring.models.SourceContentStructureKind": "Language.QuestionAnswering.Authoring.sourceContentStructureKind", + "azure.ai.language.questionanswering.authoring.models.UpdateOperationKind": "Language.QuestionAnswering.Authoring.updateOperationKind", + "azure.ai.language.questionanswering.authoring.QuestionAnsweringAuthoringClient.list_projects": "Language.QuestionAnsweringAuthoringClientCustomizations.QuestionAnsweringAuthoringClientOperations.listProjects", + "azure.ai.language.questionanswering.authoring.aio.QuestionAnsweringAuthoringClient.list_projects": "Language.QuestionAnsweringAuthoringClientCustomizations.QuestionAnsweringAuthoringClientOperations.listProjects", + "azure.ai.language.questionanswering.authoring.QuestionAnsweringAuthoringClient.get_project_details": "Language.QuestionAnsweringAuthoringClientCustomizations.QuestionAnsweringAuthoringClientOperations.getProjectDetails", + "azure.ai.language.questionanswering.authoring.aio.QuestionAnsweringAuthoringClient.get_project_details": "Language.QuestionAnsweringAuthoringClientCustomizations.QuestionAnsweringAuthoringClientOperations.getProjectDetails", + "azure.ai.language.questionanswering.authoring.QuestionAnsweringAuthoringClient.create_project": "Language.QuestionAnsweringAuthoringClientCustomizations.QuestionAnsweringAuthoringClientOperations.createProject", + "azure.ai.language.questionanswering.authoring.aio.QuestionAnsweringAuthoringClient.create_project": "Language.QuestionAnsweringAuthoringClientCustomizations.QuestionAnsweringAuthoringClientOperations.createProject", + "azure.ai.language.questionanswering.authoring.QuestionAnsweringAuthoringClient.begin_delete_project": "Language.QuestionAnsweringAuthoringClientCustomizations.QuestionAnsweringAuthoringClientOperations.deleteProject", + "azure.ai.language.questionanswering.authoring.aio.QuestionAnsweringAuthoringClient.begin_delete_project": "Language.QuestionAnsweringAuthoringClientCustomizations.QuestionAnsweringAuthoringClientOperations.deleteProject", + "azure.ai.language.questionanswering.authoring.QuestionAnsweringAuthoringClient.begin_export": "Language.QuestionAnsweringAuthoringClientCustomizations.QuestionAnsweringAuthoringClientOperations.export", + "azure.ai.language.questionanswering.authoring.aio.QuestionAnsweringAuthoringClient.begin_export": "Language.QuestionAnsweringAuthoringClientCustomizations.QuestionAnsweringAuthoringClientOperations.export", + "azure.ai.language.questionanswering.authoring.QuestionAnsweringAuthoringClient.begin_import_assets": "Language.QuestionAnsweringAuthoringClientCustomizations.QuestionAnsweringAuthoringClientOperations.import", + "azure.ai.language.questionanswering.authoring.aio.QuestionAnsweringAuthoringClient.begin_import_assets": "Language.QuestionAnsweringAuthoringClientCustomizations.QuestionAnsweringAuthoringClientOperations.import", + "azure.ai.language.questionanswering.authoring.QuestionAnsweringAuthoringClient.begin_import_from_files": "Language.QuestionAnsweringAuthoringClientCustomizations.QuestionAnsweringAuthoringClientOperations.importFromFiles", + "azure.ai.language.questionanswering.authoring.aio.QuestionAnsweringAuthoringClient.begin_import_from_files": "Language.QuestionAnsweringAuthoringClientCustomizations.QuestionAnsweringAuthoringClientOperations.importFromFiles", + "azure.ai.language.questionanswering.authoring.QuestionAnsweringAuthoringClient.begin_deploy_project": "Language.QuestionAnsweringAuthoringClientCustomizations.QuestionAnsweringAuthoringClientOperations.deployProject", + "azure.ai.language.questionanswering.authoring.aio.QuestionAnsweringAuthoringClient.begin_deploy_project": "Language.QuestionAnsweringAuthoringClientCustomizations.QuestionAnsweringAuthoringClientOperations.deployProject", + "azure.ai.language.questionanswering.authoring.QuestionAnsweringAuthoringClient.list_deployments": "Language.QuestionAnsweringAuthoringClientCustomizations.QuestionAnsweringAuthoringClientOperations.listDeployments", + "azure.ai.language.questionanswering.authoring.aio.QuestionAnsweringAuthoringClient.list_deployments": "Language.QuestionAnsweringAuthoringClientCustomizations.QuestionAnsweringAuthoringClientOperations.listDeployments", + "azure.ai.language.questionanswering.authoring.QuestionAnsweringAuthoringClient.list_synonyms": "Language.QuestionAnsweringAuthoringClientCustomizations.QuestionAnsweringAuthoringClientOperations.getSynonyms", + "azure.ai.language.questionanswering.authoring.aio.QuestionAnsweringAuthoringClient.list_synonyms": "Language.QuestionAnsweringAuthoringClientCustomizations.QuestionAnsweringAuthoringClientOperations.getSynonyms", + "azure.ai.language.questionanswering.authoring.QuestionAnsweringAuthoringClient.update_synonyms": "Language.QuestionAnsweringAuthoringClientCustomizations.QuestionAnsweringAuthoringClientOperations.updateSynonyms", + "azure.ai.language.questionanswering.authoring.aio.QuestionAnsweringAuthoringClient.update_synonyms": "Language.QuestionAnsweringAuthoringClientCustomizations.QuestionAnsweringAuthoringClientOperations.updateSynonyms", + "azure.ai.language.questionanswering.authoring.QuestionAnsweringAuthoringClient.list_sources": "Language.QuestionAnsweringAuthoringClientCustomizations.QuestionAnsweringAuthoringClientOperations.getSources", + "azure.ai.language.questionanswering.authoring.aio.QuestionAnsweringAuthoringClient.list_sources": "Language.QuestionAnsweringAuthoringClientCustomizations.QuestionAnsweringAuthoringClientOperations.getSources", + "azure.ai.language.questionanswering.authoring.QuestionAnsweringAuthoringClient.begin_update_sources": "Language.QuestionAnsweringAuthoringClientCustomizations.QuestionAnsweringAuthoringClientOperations.updateSources", + "azure.ai.language.questionanswering.authoring.aio.QuestionAnsweringAuthoringClient.begin_update_sources": "Language.QuestionAnsweringAuthoringClientCustomizations.QuestionAnsweringAuthoringClientOperations.updateSources", + "azure.ai.language.questionanswering.authoring.QuestionAnsweringAuthoringClient.begin_update_sources_from_files": "Language.QuestionAnsweringAuthoringClientCustomizations.QuestionAnsweringAuthoringClientOperations.updateSourcesFromFiles", + "azure.ai.language.questionanswering.authoring.aio.QuestionAnsweringAuthoringClient.begin_update_sources_from_files": "Language.QuestionAnsweringAuthoringClientCustomizations.QuestionAnsweringAuthoringClientOperations.updateSourcesFromFiles", + "azure.ai.language.questionanswering.authoring.QuestionAnsweringAuthoringClient.list_qnas": "Language.QuestionAnsweringAuthoringClientCustomizations.QuestionAnsweringAuthoringClientOperations.getQnas", + "azure.ai.language.questionanswering.authoring.aio.QuestionAnsweringAuthoringClient.list_qnas": "Language.QuestionAnsweringAuthoringClientCustomizations.QuestionAnsweringAuthoringClientOperations.getQnas", + "azure.ai.language.questionanswering.authoring.QuestionAnsweringAuthoringClient.begin_update_qnas": "Language.QuestionAnsweringAuthoringClientCustomizations.QuestionAnsweringAuthoringClientOperations.updateQnas", + "azure.ai.language.questionanswering.authoring.aio.QuestionAnsweringAuthoringClient.begin_update_qnas": "Language.QuestionAnsweringAuthoringClientCustomizations.QuestionAnsweringAuthoringClientOperations.updateQnas", + "azure.ai.language.questionanswering.authoring.QuestionAnsweringAuthoringClient.add_feedback": "Language.QuestionAnsweringAuthoringClientCustomizations.QuestionAnsweringAuthoringClientOperations.addFeedback", + "azure.ai.language.questionanswering.authoring.aio.QuestionAnsweringAuthoringClient.add_feedback": "Language.QuestionAnsweringAuthoringClientCustomizations.QuestionAnsweringAuthoringClientOperations.addFeedback" + } +} \ No newline at end of file diff --git a/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/assets.json b/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/assets.json new file mode 100644 index 000000000000..ebe4f9908e7d --- /dev/null +++ b/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/assets.json @@ -0,0 +1,6 @@ +{ + "AssetsRepo": "Azure/azure-sdk-assets", + "AssetsRepoPrefixPath": "python", + "TagPrefix": "python/cognitivelanguage/azure-ai-language-questionanswering-authoring", + "Tag": "python/cognitivelanguage/azure-ai-language-questionanswering-authoring_401205f3a4" +} diff --git a/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/azure/__init__.py b/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/azure/__init__.py new file mode 100644 index 000000000000..d55ccad1f573 --- /dev/null +++ b/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/azure/__init__.py @@ -0,0 +1 @@ +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/azure/ai/__init__.py b/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/azure/ai/__init__.py new file mode 100644 index 000000000000..d55ccad1f573 --- /dev/null +++ b/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/azure/ai/__init__.py @@ -0,0 +1 @@ +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/azure/ai/language/__init__.py b/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/azure/ai/language/__init__.py new file mode 100644 index 000000000000..d55ccad1f573 --- /dev/null +++ b/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/azure/ai/language/__init__.py @@ -0,0 +1 @@ +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/azure/ai/language/questionanswering/__init__.py b/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/azure/ai/language/questionanswering/__init__.py new file mode 100644 index 000000000000..d55ccad1f573 --- /dev/null +++ b/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/azure/ai/language/questionanswering/__init__.py @@ -0,0 +1 @@ +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/azure/ai/language/questionanswering/authoring/__init__.py b/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/azure/ai/language/questionanswering/authoring/__init__.py new file mode 100644 index 000000000000..53d2d76553e0 --- /dev/null +++ b/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/azure/ai/language/questionanswering/authoring/__init__.py @@ -0,0 +1,32 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=wrong-import-position + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from ._patch import * # pylint: disable=unused-wildcard-import + +from ._client import QuestionAnsweringAuthoringClient # type: ignore +from ._version import VERSION + +__version__ = VERSION + +try: + from ._patch import __all__ as _patch_all + from ._patch import * +except ImportError: + _patch_all = [] +from ._patch import patch_sdk as _patch_sdk + +__all__ = [ + "QuestionAnsweringAuthoringClient", +] +__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore + +_patch_sdk() diff --git a/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/azure/ai/language/questionanswering/authoring/_client.py b/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/azure/ai/language/questionanswering/authoring/_client.py new file mode 100644 index 000000000000..ab0557ff6774 --- /dev/null +++ b/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/azure/ai/language/questionanswering/authoring/_client.py @@ -0,0 +1,105 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from copy import deepcopy +from typing import Any, TYPE_CHECKING, Union +from typing_extensions import Self + +from azure.core import PipelineClient +from azure.core.credentials import AzureKeyCredential +from azure.core.pipeline import policies +from azure.core.rest import HttpRequest, HttpResponse + +from ._configuration import QuestionAnsweringAuthoringClientConfiguration +from ._operations import _QuestionAnsweringAuthoringClientOperationsMixin +from ._utils.serialization import Deserializer, Serializer + +if TYPE_CHECKING: + from azure.core.credentials import TokenCredential + + +class QuestionAnsweringAuthoringClient(_QuestionAnsweringAuthoringClientOperationsMixin): + """QuestionAnsweringAuthoringClient. + + :param endpoint: Supported Cognitive Services endpoint (e.g., + https://.api.cognitiveservices.azure.com). Required. + :type endpoint: str + :param credential: Credential used to authenticate requests to the service. Is either a key + credential type or a token credential type. Required. + :type credential: ~azure.core.credentials.AzureKeyCredential or + ~azure.core.credentials.TokenCredential + :keyword api_version: The API version to use for this operation. Default value is + "2025-05-15-preview". Note that overriding this default value may result in unsupported + behavior. + :paramtype api_version: str + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + """ + + def __init__(self, endpoint: str, credential: Union[AzureKeyCredential, "TokenCredential"], **kwargs: Any) -> None: + _endpoint = "{Endpoint}/language" + self._config = QuestionAnsweringAuthoringClientConfiguration(endpoint=endpoint, credential=credential, **kwargs) + + _policies = kwargs.pop("policies", None) + if _policies is None: + _policies = [ + policies.RequestIdPolicy(**kwargs), + self._config.headers_policy, + self._config.user_agent_policy, + self._config.proxy_policy, + policies.ContentDecodePolicy(**kwargs), + self._config.redirect_policy, + self._config.retry_policy, + self._config.authentication_policy, + self._config.custom_hook_policy, + self._config.logging_policy, + policies.DistributedTracingPolicy(**kwargs), + policies.SensitiveHeaderCleanupPolicy(**kwargs) if self._config.redirect_policy else None, + self._config.http_logging_policy, + ] + self._client: PipelineClient = PipelineClient(base_url=_endpoint, policies=_policies, **kwargs) + + self._serialize = Serializer() + self._deserialize = Deserializer() + self._serialize.client_side_validation = False + + def send_request(self, request: HttpRequest, *, stream: bool = False, **kwargs: Any) -> HttpResponse: + """Runs the network request through the client's chained policies. + + >>> from azure.core.rest import HttpRequest + >>> request = HttpRequest("GET", "https://www.example.org/") + + >>> response = client.send_request(request) + + + For more information on this code flow, see https://aka.ms/azsdk/dpcodegen/python/send_request + + :param request: The network request you want to make. Required. + :type request: ~azure.core.rest.HttpRequest + :keyword bool stream: Whether the response payload will be streamed. Defaults to False. + :return: The response of your network call. Does not do error handling on your response. + :rtype: ~azure.core.rest.HttpResponse + """ + + request_copy = deepcopy(request) + path_format_arguments = { + "Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + + request_copy.url = self._client.format_url(request_copy.url, **path_format_arguments) + return self._client.send_request(request_copy, stream=stream, **kwargs) # type: ignore + + def close(self) -> None: + self._client.close() + + def __enter__(self) -> Self: + self._client.__enter__() + return self + + def __exit__(self, *exc_details: Any) -> None: + self._client.__exit__(*exc_details) diff --git a/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/azure/ai/language/questionanswering/authoring/_configuration.py b/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/azure/ai/language/questionanswering/authoring/_configuration.py new file mode 100644 index 000000000000..5cb6faf76ebf --- /dev/null +++ b/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/azure/ai/language/questionanswering/authoring/_configuration.py @@ -0,0 +1,74 @@ +# pylint: disable=too-many-lines,duplicate-code +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from typing import Any, TYPE_CHECKING, Union + +from azure.core.credentials import AzureKeyCredential +from azure.core.pipeline import policies + +from ._version import VERSION + +if TYPE_CHECKING: + from azure.core.credentials import TokenCredential + + +class QuestionAnsweringAuthoringClientConfiguration: # pylint: disable=too-many-instance-attributes,name-too-long + """Configuration for QuestionAnsweringAuthoringClient. + + Note that all parameters used to create this instance are saved as instance + attributes. + + :param endpoint: Supported Cognitive Services endpoint (e.g., + https://.api.cognitiveservices.azure.com). Required. + :type endpoint: str + :param credential: Credential used to authenticate requests to the service. Is either a key + credential type or a token credential type. Required. + :type credential: ~azure.core.credentials.AzureKeyCredential or + ~azure.core.credentials.TokenCredential + :keyword api_version: The API version to use for this operation. Default value is + "2025-05-15-preview". Note that overriding this default value may result in unsupported + behavior. + :paramtype api_version: str + """ + + def __init__(self, endpoint: str, credential: Union[AzureKeyCredential, "TokenCredential"], **kwargs: Any) -> None: + api_version: str = kwargs.pop("api_version", "2025-05-15-preview") + + if endpoint is None: + raise ValueError("Parameter 'endpoint' must not be None.") + if credential is None: + raise ValueError("Parameter 'credential' must not be None.") + + self.endpoint = endpoint + self.credential = credential + self.api_version = api_version + self.credential_scopes = kwargs.pop("credential_scopes", ["https://cognitiveservices.azure.com/.default"]) + kwargs.setdefault("sdk_moniker", "ai-language-questionanswering-authoring/{}".format(VERSION)) + self.polling_interval = kwargs.get("polling_interval", 30) + self._configure(**kwargs) + + def _infer_policy(self, **kwargs): + if isinstance(self.credential, AzureKeyCredential): + return policies.AzureKeyCredentialPolicy(self.credential, "Ocp-Apim-Subscription-Key", **kwargs) + if hasattr(self.credential, "get_token"): + return policies.BearerTokenCredentialPolicy(self.credential, *self.credential_scopes, **kwargs) + raise TypeError(f"Unsupported credential: {self.credential}") + + def _configure(self, **kwargs: Any) -> None: + self.user_agent_policy = kwargs.get("user_agent_policy") or policies.UserAgentPolicy(**kwargs) + self.headers_policy = kwargs.get("headers_policy") or policies.HeadersPolicy(**kwargs) + self.proxy_policy = kwargs.get("proxy_policy") or policies.ProxyPolicy(**kwargs) + self.logging_policy = kwargs.get("logging_policy") or policies.NetworkTraceLoggingPolicy(**kwargs) + self.http_logging_policy = kwargs.get("http_logging_policy") or policies.HttpLoggingPolicy(**kwargs) + self.custom_hook_policy = kwargs.get("custom_hook_policy") or policies.CustomHookPolicy(**kwargs) + self.redirect_policy = kwargs.get("redirect_policy") or policies.RedirectPolicy(**kwargs) + self.retry_policy = kwargs.get("retry_policy") or policies.RetryPolicy(**kwargs) + self.authentication_policy = kwargs.get("authentication_policy") + if self.credential and not self.authentication_policy: + self.authentication_policy = self._infer_policy(**kwargs) diff --git a/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/azure/ai/language/questionanswering/authoring/_operations/__init__.py b/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/azure/ai/language/questionanswering/authoring/_operations/__init__.py new file mode 100644 index 000000000000..af23bedf5354 --- /dev/null +++ b/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/azure/ai/language/questionanswering/authoring/_operations/__init__.py @@ -0,0 +1,24 @@ +# pylint: disable=line-too-long,useless-suppression +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=wrong-import-position + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from ._patch import * # pylint: disable=unused-wildcard-import + +from ._operations import _QuestionAnsweringAuthoringClientOperationsMixin # type: ignore # pylint: disable=unused-import + +from ._patch import __all__ as _patch_all +from ._patch import * +from ._patch import patch_sdk as _patch_sdk + +__all__ = [] +__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore +_patch_sdk() diff --git a/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/azure/ai/language/questionanswering/authoring/_operations/_operations.py b/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/azure/ai/language/questionanswering/authoring/_operations/_operations.py new file mode 100644 index 000000000000..364144cf70ec --- /dev/null +++ b/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/azure/ai/language/questionanswering/authoring/_operations/_operations.py @@ -0,0 +1,3307 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from collections.abc import MutableMapping +from io import IOBase +import json +from typing import Any, Callable, IO, Iterator, Optional, TypeVar, Union, cast, overload +import urllib.parse + +from azure.core import PipelineClient +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, + map_error, +) +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.core.polling.base_polling import LROBasePolling +from azure.core.rest import HttpRequest, HttpResponse +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict + +from .. import models as _models +from .._configuration import QuestionAnsweringAuthoringClientConfiguration +from .._utils.model_base import Model as _Model, SdkJSONEncoder, _deserialize, _failsafe_deserialize +from .._utils.serialization import Serializer +from .._utils.utils import ClientMixinABC, prepare_multipart_form_data +from .._validation import api_version_validation + +JSON = MutableMapping[str, Any] +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + + +def build_question_answering_authoring_list_projects_request( # pylint: disable=name-too-long + *, top: Optional[int] = None, skip: Optional[int] = None, maxpagesize: Optional[int] = None, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-05-15-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/authoring/query-knowledgebases/projects" + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if top is not None: + _params["top"] = _SERIALIZER.query("top", top, "int") + if skip is not None: + _params["skip"] = _SERIALIZER.query("skip", skip, "int") + if maxpagesize is not None: + _params["maxpagesize"] = _SERIALIZER.query("maxpagesize", maxpagesize, "int") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_question_answering_authoring_get_project_details_request( # pylint: disable=name-too-long + project_name: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-05-15-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/authoring/query-knowledgebases/projects/{projectName}" + path_format_arguments = { + "projectName": _SERIALIZER.url("project_name", project_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_question_answering_authoring_create_project_request( # pylint: disable=name-too-long + project_name: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-05-15-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/authoring/query-knowledgebases/projects/{projectName}" + path_format_arguments = { + "projectName": _SERIALIZER.url("project_name", project_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_question_answering_authoring_delete_project_request( # pylint: disable=name-too-long + project_name: str, **kwargs: Any +) -> HttpRequest: + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-05-15-preview")) + # Construct URL + _url = "/authoring/query-knowledgebases/projects/{projectName}" + path_format_arguments = { + "projectName": _SERIALIZER.url("project_name", project_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, **kwargs) + + +def build_question_answering_authoring_get_delete_status_request( # pylint: disable=name-too-long + job_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-05-15-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/authoring/query-knowledgebases/projects/deletion-jobs/{jobId}" + path_format_arguments = { + "jobId": _SERIALIZER.url("job_id", job_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_question_answering_authoring_export_request( # pylint: disable=name-too-long + project_name: str, + *, + file_format: Optional[Union[str, _models.Format]] = None, + asset_kind: Optional[Union[str, _models.AssetKind]] = None, + **kwargs: Any +) -> HttpRequest: + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-05-15-preview")) + # Construct URL + _url = "/authoring/query-knowledgebases/projects/{projectName}/:export" + path_format_arguments = { + "projectName": _SERIALIZER.url("project_name", project_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if file_format is not None: + _params["format"] = _SERIALIZER.query("file_format", file_format, "str") + if asset_kind is not None: + _params["assetKind"] = _SERIALIZER.query("asset_kind", asset_kind, "str") + + return HttpRequest(method="POST", url=_url, params=_params, **kwargs) + + +def build_question_answering_authoring_get_export_status_request( # pylint: disable=name-too-long + project_name: str, job_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-05-15-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/authoring/query-knowledgebases/projects/{projectName}/export/jobs/{jobId}" + path_format_arguments = { + "projectName": _SERIALIZER.url("project_name", project_name, "str"), + "jobId": _SERIALIZER.url("job_id", job_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_question_answering_authoring_begin_import_assets_request( # pylint: disable=name-too-long + project_name: str, + *, + file_format: Optional[Union[str, _models.Format]] = None, + asset_kind: Optional[Union[str, _models.AssetKind]] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-05-15-preview")) + # Construct URL + _url = "/authoring/query-knowledgebases/projects/{projectName}/:import" + path_format_arguments = { + "projectName": _SERIALIZER.url("project_name", project_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if file_format is not None: + _params["format"] = _SERIALIZER.query("file_format", file_format, "str") + if asset_kind is not None: + _params["assetKind"] = _SERIALIZER.query("asset_kind", asset_kind, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_question_answering_authoring_import_from_files_request( # pylint: disable=name-too-long + project_name: str, *, asset_kind: Optional[Union[str, _models.AssetKind]] = None, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-05-15-preview")) + # Construct URL + _url = "/authoring/query-knowledgebases/projects/{projectName}/:import" + path_format_arguments = { + "projectName": _SERIALIZER.url("project_name", project_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if asset_kind is not None: + _params["assetKind"] = _SERIALIZER.query("asset_kind", asset_kind, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_question_answering_authoring_get_import_status_request( # pylint: disable=name-too-long + project_name: str, job_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-05-15-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/authoring/query-knowledgebases/projects/{projectName}/import/jobs/{jobId}" + path_format_arguments = { + "projectName": _SERIALIZER.url("project_name", project_name, "str"), + "jobId": _SERIALIZER.url("job_id", job_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_question_answering_authoring_deploy_project_request( # pylint: disable=name-too-long + project_name: str, deployment_name: str, **kwargs: Any +) -> HttpRequest: + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-05-15-preview")) + # Construct URL + _url = "/authoring/query-knowledgebases/projects/{projectName}/deployments/{deploymentName}" + path_format_arguments = { + "projectName": _SERIALIZER.url("project_name", project_name, "str"), + "deploymentName": _SERIALIZER.url("deployment_name", deployment_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, **kwargs) + + +def build_question_answering_authoring_get_deploy_status_request( # pylint: disable=name-too-long + project_name: str, deployment_name: str, job_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-05-15-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/authoring/query-knowledgebases/projects/{projectName}/deployments/{deploymentName}/jobs/{jobId}" + path_format_arguments = { + "projectName": _SERIALIZER.url("project_name", project_name, "str"), + "deploymentName": _SERIALIZER.url("deployment_name", deployment_name, "str"), + "jobId": _SERIALIZER.url("job_id", job_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_question_answering_authoring_list_deployments_request( # pylint: disable=name-too-long + project_name: str, + *, + top: Optional[int] = None, + skip: Optional[int] = None, + maxpagesize: Optional[int] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-05-15-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/authoring/query-knowledgebases/projects/{projectName}/deployments" + path_format_arguments = { + "projectName": _SERIALIZER.url("project_name", project_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if top is not None: + _params["top"] = _SERIALIZER.query("top", top, "int") + if skip is not None: + _params["skip"] = _SERIALIZER.query("skip", skip, "int") + if maxpagesize is not None: + _params["maxpagesize"] = _SERIALIZER.query("maxpagesize", maxpagesize, "int") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_question_answering_authoring_list_synonyms_request( # pylint: disable=name-too-long + project_name: str, + *, + top: Optional[int] = None, + skip: Optional[int] = None, + maxpagesize: Optional[int] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-05-15-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/authoring/query-knowledgebases/projects/{projectName}/synonyms" + path_format_arguments = { + "projectName": _SERIALIZER.url("project_name", project_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if top is not None: + _params["top"] = _SERIALIZER.query("top", top, "int") + if skip is not None: + _params["skip"] = _SERIALIZER.query("skip", skip, "int") + if maxpagesize is not None: + _params["maxpagesize"] = _SERIALIZER.query("maxpagesize", maxpagesize, "int") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_question_answering_authoring_update_synonyms_request( # pylint: disable=name-too-long + project_name: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-05-15-preview")) + # Construct URL + _url = "/authoring/query-knowledgebases/projects/{projectName}/synonyms" + path_format_arguments = { + "projectName": _SERIALIZER.url("project_name", project_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_question_answering_authoring_list_sources_request( # pylint: disable=name-too-long + project_name: str, + *, + top: Optional[int] = None, + skip: Optional[int] = None, + maxpagesize: Optional[int] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-05-15-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/authoring/query-knowledgebases/projects/{projectName}/sources" + path_format_arguments = { + "projectName": _SERIALIZER.url("project_name", project_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if top is not None: + _params["top"] = _SERIALIZER.query("top", top, "int") + if skip is not None: + _params["skip"] = _SERIALIZER.query("skip", skip, "int") + if maxpagesize is not None: + _params["maxpagesize"] = _SERIALIZER.query("maxpagesize", maxpagesize, "int") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_question_answering_authoring_update_sources_request( # pylint: disable=name-too-long + project_name: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-05-15-preview")) + # Construct URL + _url = "/authoring/query-knowledgebases/projects/{projectName}/sources" + path_format_arguments = { + "projectName": _SERIALIZER.url("project_name", project_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + + return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_question_answering_authoring_update_sources_from_files_request( # pylint: disable=name-too-long + project_name: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-05-15-preview")) + # Construct URL + _url = "/authoring/query-knowledgebases/projects/{projectName}/sources" + path_format_arguments = { + "projectName": _SERIALIZER.url("project_name", project_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_question_answering_authoring_get_update_sources_status_request( # pylint: disable=name-too-long + project_name: str, job_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-05-15-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/authoring/query-knowledgebases/projects/{projectName}/sources/jobs/{jobId}" + path_format_arguments = { + "projectName": _SERIALIZER.url("project_name", project_name, "str"), + "jobId": _SERIALIZER.url("job_id", job_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_question_answering_authoring_list_qnas_request( # pylint: disable=name-too-long + project_name: str, + *, + top: Optional[int] = None, + skip: Optional[int] = None, + maxpagesize: Optional[int] = None, + source: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-05-15-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/authoring/query-knowledgebases/projects/{projectName}/qnas" + path_format_arguments = { + "projectName": _SERIALIZER.url("project_name", project_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if top is not None: + _params["top"] = _SERIALIZER.query("top", top, "int") + if skip is not None: + _params["skip"] = _SERIALIZER.query("skip", skip, "int") + if maxpagesize is not None: + _params["maxpagesize"] = _SERIALIZER.query("maxpagesize", maxpagesize, "int") + if source is not None: + _params["source"] = _SERIALIZER.query("source", source, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_question_answering_authoring_update_qnas_request( # pylint: disable=name-too-long + project_name: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-05-15-preview")) + # Construct URL + _url = "/authoring/query-knowledgebases/projects/{projectName}/qnas" + path_format_arguments = { + "projectName": _SERIALIZER.url("project_name", project_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + + return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_question_answering_authoring_get_update_qnas_status_request( # pylint: disable=name-too-long + project_name: str, job_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-05-15-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/authoring/query-knowledgebases/projects/{projectName}/qnas/jobs/{jobId}" + path_format_arguments = { + "projectName": _SERIALIZER.url("project_name", project_name, "str"), + "jobId": _SERIALIZER.url("job_id", job_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_question_answering_authoring_add_feedback_request( # pylint: disable=name-too-long + project_name: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2025-05-15-preview")) + # Construct URL + _url = "/authoring/query-knowledgebases/projects/{projectName}/feedback" + path_format_arguments = { + "projectName": _SERIALIZER.url("project_name", project_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +class _QuestionAnsweringAuthoringClientOperationsMixin( # pylint: disable=too-many-public-methods + ClientMixinABC[PipelineClient[HttpRequest, HttpResponse], QuestionAnsweringAuthoringClientConfiguration] +): + + @distributed_trace + def list_projects( + self, *, top: Optional[int] = None, skip: Optional[int] = None, **kwargs: Any + ) -> ItemPaged["_models.QuestionAnsweringProject"]: + """Gets all projects for a user. + + :keyword top: The maximum number of resources to return from the collection. Default value is + None. + :paramtype top: int + :keyword skip: An offset into the collection of the first resource to be returned. Default + value is None. + :paramtype skip: int + :return: An iterator like instance of QuestionAnsweringProject + :rtype: + ~azure.core.paging.ItemPaged[~azure.ai.language.questionanswering.authoring.models.QuestionAnsweringProject] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + maxpagesize = kwargs.pop("maxpagesize", None) + cls: ClsType[list[_models.QuestionAnsweringProject]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_question_answering_authoring_list_projects_request( + top=top, + skip=skip, + maxpagesize=maxpagesize, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "Endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "Endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(list[_models.QuestionAnsweringProject], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + @distributed_trace + def get_project_details(self, project_name: str, **kwargs: Any) -> _models.QuestionAnsweringProject: + """Get the requested project metadata. + + :param project_name: Name of the project. Required. + :type project_name: str + :return: QuestionAnsweringProject. The QuestionAnsweringProject is compatible with + MutableMapping + :rtype: ~azure.ai.language.questionanswering.authoring.models.QuestionAnsweringProject + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.QuestionAnsweringProject] = kwargs.pop("cls", None) + + _request = build_question_answering_authoring_get_project_details_request( + project_name=project_name, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_models.QuestionAnsweringProject, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @overload + def create_project( + self, + project_name: str, + body: _models.QuestionAnsweringProject, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.QuestionAnsweringProject: + """Create or update a project. + + :param project_name: Name of the project. Required. + :type project_name: str + :param body: The resource instance. Required. + :type body: ~azure.ai.language.questionanswering.authoring.models.QuestionAnsweringProject + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: QuestionAnsweringProject. The QuestionAnsweringProject is compatible with + MutableMapping + :rtype: ~azure.ai.language.questionanswering.authoring.models.QuestionAnsweringProject + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def create_project( + self, project_name: str, body: JSON, *, content_type: str = "application/json", **kwargs: Any + ) -> _models.QuestionAnsweringProject: + """Create or update a project. + + :param project_name: Name of the project. Required. + :type project_name: str + :param body: The resource instance. Required. + :type body: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: QuestionAnsweringProject. The QuestionAnsweringProject is compatible with + MutableMapping + :rtype: ~azure.ai.language.questionanswering.authoring.models.QuestionAnsweringProject + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def create_project( + self, project_name: str, body: IO[bytes], *, content_type: str = "application/json", **kwargs: Any + ) -> _models.QuestionAnsweringProject: + """Create or update a project. + + :param project_name: Name of the project. Required. + :type project_name: str + :param body: The resource instance. Required. + :type body: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: QuestionAnsweringProject. The QuestionAnsweringProject is compatible with + MutableMapping + :rtype: ~azure.ai.language.questionanswering.authoring.models.QuestionAnsweringProject + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def create_project( + self, project_name: str, body: Union[_models.QuestionAnsweringProject, JSON, IO[bytes]], **kwargs: Any + ) -> _models.QuestionAnsweringProject: + """Create or update a project. + + :param project_name: Name of the project. Required. + :type project_name: str + :param body: The resource instance. Is one of the following types: QuestionAnsweringProject, + JSON, IO[bytes] Required. + :type body: ~azure.ai.language.questionanswering.authoring.models.QuestionAnsweringProject or + JSON or IO[bytes] + :return: QuestionAnsweringProject. The QuestionAnsweringProject is compatible with + MutableMapping + :rtype: ~azure.ai.language.questionanswering.authoring.models.QuestionAnsweringProject + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.QuestionAnsweringProject] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_question_answering_authoring_create_project_request( + project_name=project_name, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_models.QuestionAnsweringProject, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + def _delete_project_initial(self, project_name: str, **kwargs: Any) -> Iterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + _request = build_question_answering_authoring_delete_project_request( + project_name=project_name, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["Operation-Location"] = self._deserialize("str", response.headers.get("Operation-Location")) + + deserialized = response.iter_bytes() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def begin_delete_project(self, project_name: str, **kwargs: Any) -> LROPoller[None]: + """Delete the project. + + :param project_name: The name of the project to use. Required. + :type project_name: str + :return: An instance of LROPoller that returns None + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._delete_project_initial( + project_name=project_name, cls=lambda x, y, z: x, headers=_headers, params=_params, **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + path_format_arguments = { + "Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, LROBasePolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[None].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + @distributed_trace + def _get_delete_status( + self, job_id: str, **kwargs: Any + ) -> _models.QuestionAnsweringAuthoringProjectDeletionJobState: + """Gets the status of a Project delete job. + + :param job_id: The job ID. Required. + :type job_id: str + :return: QuestionAnsweringAuthoringProjectDeletionJobState. The + QuestionAnsweringAuthoringProjectDeletionJobState is compatible with MutableMapping + :rtype: + ~azure.ai.language.questionanswering.authoring.models.QuestionAnsweringAuthoringProjectDeletionJobState + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.QuestionAnsweringAuthoringProjectDeletionJobState] = kwargs.pop("cls", None) + + _request = build_question_answering_authoring_get_delete_status_request( + job_id=job_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_models.QuestionAnsweringAuthoringProjectDeletionJobState, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + def _export_initial( + self, + project_name: str, + *, + file_format: Optional[Union[str, _models.Format]] = None, + asset_kind: Optional[Union[str, _models.AssetKind]] = None, + **kwargs: Any + ) -> Iterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + _request = build_question_answering_authoring_export_request( + project_name=project_name, + file_format=file_format, + asset_kind=asset_kind, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["Operation-Location"] = self._deserialize("str", response.headers.get("Operation-Location")) + + deserialized = response.iter_bytes() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def begin_export( + self, + project_name: str, + *, + file_format: Optional[Union[str, _models.Format]] = None, + asset_kind: Optional[Union[str, _models.AssetKind]] = None, + **kwargs: Any + ) -> LROPoller[None]: + """Export project metadata and assets. + + :param project_name: The name of the project to use. Required. + :type project_name: str + :keyword file_format: Knowledge base Import or Export format. Known values are: "json", "tsv", + and "excel". Default value is None. + :paramtype file_format: str or ~azure.ai.language.questionanswering.authoring.models.Format + :keyword asset_kind: Kind of the asset of the project. Known values are: "qnas" and "synonyms". + Default value is None. + :paramtype asset_kind: str or ~azure.ai.language.questionanswering.authoring.models.AssetKind + :return: An instance of LROPoller that returns None + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._export_initial( + project_name=project_name, + file_format=file_format, + asset_kind=asset_kind, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + path_format_arguments = { + "Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, LROBasePolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[None].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + @distributed_trace + def _get_export_status( + self, project_name: str, job_id: str, **kwargs: Any + ) -> _models.QuestionAnsweringAuthoringExportJobState: + """Gets the status of an Export job, once job completes, returns the project + metadata, and assets. + + :param project_name: Name of the project. Required. + :type project_name: str + :param job_id: The job ID. Required. + :type job_id: str + :return: QuestionAnsweringAuthoringExportJobState. The QuestionAnsweringAuthoringExportJobState + is compatible with MutableMapping + :rtype: + ~azure.ai.language.questionanswering.authoring.models.QuestionAnsweringAuthoringExportJobState + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.QuestionAnsweringAuthoringExportJobState] = kwargs.pop("cls", None) + + _request = build_question_answering_authoring_get_export_status_request( + project_name=project_name, + job_id=job_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_models.QuestionAnsweringAuthoringExportJobState, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + def _begin_import_assets_initial( + self, + project_name: str, + body: Optional[Union[_models.ImportJobOptions, JSON, IO[bytes]]] = None, + *, + file_format: Optional[Union[str, _models.Format]] = None, + asset_kind: Optional[Union[str, _models.AssetKind]] = None, + **kwargs: Any + ) -> Iterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + content_type = content_type if body else None + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" if body else None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + if body is not None: + _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + else: + _content = None + + _request = build_question_answering_authoring_begin_import_assets_request( + project_name=project_name, + file_format=file_format, + asset_kind=asset_kind, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["Operation-Location"] = self._deserialize("str", response.headers.get("Operation-Location")) + + deserialized = response.iter_bytes() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + def begin_import_assets( + self, + project_name: str, + body: Optional[_models.ImportJobOptions] = None, + *, + file_format: Optional[Union[str, _models.Format]] = None, + asset_kind: Optional[Union[str, _models.AssetKind]] = None, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[None]: + """Import project assets. + + :param project_name: The name of the project to use. Required. + :type project_name: str + :param body: Project assets the needs to be imported. Default value is None. + :type body: ~azure.ai.language.questionanswering.authoring.models.ImportJobOptions + :keyword file_format: Knowledge base Import or Export format. Known values are: "json", "tsv", + and "excel". Default value is None. + :paramtype file_format: str or ~azure.ai.language.questionanswering.authoring.models.Format + :keyword asset_kind: Kind of the asset of the project. Known values are: "qnas" and "synonyms". + Default value is None. + :paramtype asset_kind: str or ~azure.ai.language.questionanswering.authoring.models.AssetKind + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns None + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_import_assets( + self, + project_name: str, + body: Optional[JSON] = None, + *, + file_format: Optional[Union[str, _models.Format]] = None, + asset_kind: Optional[Union[str, _models.AssetKind]] = None, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[None]: + """Import project assets. + + :param project_name: The name of the project to use. Required. + :type project_name: str + :param body: Project assets the needs to be imported. Default value is None. + :type body: JSON + :keyword file_format: Knowledge base Import or Export format. Known values are: "json", "tsv", + and "excel". Default value is None. + :paramtype file_format: str or ~azure.ai.language.questionanswering.authoring.models.Format + :keyword asset_kind: Kind of the asset of the project. Known values are: "qnas" and "synonyms". + Default value is None. + :paramtype asset_kind: str or ~azure.ai.language.questionanswering.authoring.models.AssetKind + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns None + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_import_assets( + self, + project_name: str, + body: Optional[IO[bytes]] = None, + *, + file_format: Optional[Union[str, _models.Format]] = None, + asset_kind: Optional[Union[str, _models.AssetKind]] = None, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[None]: + """Import project assets. + + :param project_name: The name of the project to use. Required. + :type project_name: str + :param body: Project assets the needs to be imported. Default value is None. + :type body: IO[bytes] + :keyword file_format: Knowledge base Import or Export format. Known values are: "json", "tsv", + and "excel". Default value is None. + :paramtype file_format: str or ~azure.ai.language.questionanswering.authoring.models.Format + :keyword asset_kind: Kind of the asset of the project. Known values are: "qnas" and "synonyms". + Default value is None. + :paramtype asset_kind: str or ~azure.ai.language.questionanswering.authoring.models.AssetKind + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns None + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_import_assets( + self, + project_name: str, + body: Optional[Union[_models.ImportJobOptions, JSON, IO[bytes]]] = None, + *, + file_format: Optional[Union[str, _models.Format]] = None, + asset_kind: Optional[Union[str, _models.AssetKind]] = None, + **kwargs: Any + ) -> LROPoller[None]: + """Import project assets. + + :param project_name: The name of the project to use. Required. + :type project_name: str + :param body: Project assets the needs to be imported. Is one of the following types: + ImportJobOptions, JSON, IO[bytes] Default value is None. + :type body: ~azure.ai.language.questionanswering.authoring.models.ImportJobOptions or JSON or + IO[bytes] + :keyword file_format: Knowledge base Import or Export format. Known values are: "json", "tsv", + and "excel". Default value is None. + :paramtype file_format: str or ~azure.ai.language.questionanswering.authoring.models.Format + :keyword asset_kind: Kind of the asset of the project. Known values are: "qnas" and "synonyms". + Default value is None. + :paramtype asset_kind: str or ~azure.ai.language.questionanswering.authoring.models.AssetKind + :return: An instance of LROPoller that returns None + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + content_type = content_type if body else None + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._begin_import_assets_initial( + project_name=project_name, + body=body, + file_format=file_format, + asset_kind=asset_kind, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + path_format_arguments = { + "Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, LROBasePolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[None].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + @api_version_validation( + method_added_on="2025-05-15-preview", + params_added_on={"2025-05-15-preview": ["api_version", "content_type", "project_name", "asset_kind"]}, + api_versions_list=["2025-05-15-preview"], + ) + def _import_from_files_initial( + self, + project_name: str, + body: Union[_models.ImportFiles, JSON], + *, + asset_kind: Optional[Union[str, _models.AssetKind]] = None, + **kwargs: Any + ) -> Iterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + _body = body.as_dict() if isinstance(body, _Model) else body + _file_fields: list[str] = ["files"] + _data_fields: list[str] = [] + _files, _data = prepare_multipart_form_data(_body, _file_fields, _data_fields) + + _request = build_question_answering_authoring_import_from_files_request( + project_name=project_name, + asset_kind=asset_kind, + api_version=self._config.api_version, + files=_files, + data=_data, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["Operation-Location"] = self._deserialize("str", response.headers.get("Operation-Location")) + + deserialized = response.iter_bytes() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + def begin_import_from_files( + self, + project_name: str, + body: _models.ImportFiles, + *, + asset_kind: Optional[Union[str, _models.AssetKind]] = None, + **kwargs: Any + ) -> LROPoller[None]: + """Import project assets from file. + + :param project_name: The name of the project to use. Required. + :type project_name: str + :param body: Collection of files containing project assets the needs to be imported. Required. + :type body: ~azure.ai.language.questionanswering.authoring.models.ImportFiles + :keyword asset_kind: Kind of the asset of the project. Known values are: "qnas" and "synonyms". + Default value is None. + :paramtype asset_kind: str or ~azure.ai.language.questionanswering.authoring.models.AssetKind + :return: An instance of LROPoller that returns None + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_import_from_files( + self, + project_name: str, + body: JSON, + *, + asset_kind: Optional[Union[str, _models.AssetKind]] = None, + **kwargs: Any + ) -> LROPoller[None]: + """Import project assets from file. + + :param project_name: The name of the project to use. Required. + :type project_name: str + :param body: Collection of files containing project assets the needs to be imported. Required. + :type body: JSON + :keyword asset_kind: Kind of the asset of the project. Known values are: "qnas" and "synonyms". + Default value is None. + :paramtype asset_kind: str or ~azure.ai.language.questionanswering.authoring.models.AssetKind + :return: An instance of LROPoller that returns None + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + @api_version_validation( + method_added_on="2025-05-15-preview", + params_added_on={"2025-05-15-preview": ["api_version", "content_type", "project_name", "asset_kind"]}, + api_versions_list=["2025-05-15-preview"], + ) + def begin_import_from_files( + self, + project_name: str, + body: Union[_models.ImportFiles, JSON], + *, + asset_kind: Optional[Union[str, _models.AssetKind]] = None, + **kwargs: Any + ) -> LROPoller[None]: + """Import project assets from file. + + :param project_name: The name of the project to use. Required. + :type project_name: str + :param body: Collection of files containing project assets the needs to be imported. Is either + a ImportFiles type or a JSON type. Required. + :type body: ~azure.ai.language.questionanswering.authoring.models.ImportFiles or JSON + :keyword asset_kind: Kind of the asset of the project. Known values are: "qnas" and "synonyms". + Default value is None. + :paramtype asset_kind: str or ~azure.ai.language.questionanswering.authoring.models.AssetKind + :return: An instance of LROPoller that returns None + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._import_from_files_initial( + project_name=project_name, + body=body, + asset_kind=asset_kind, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + path_format_arguments = { + "Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, LROBasePolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[None].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + @distributed_trace + def _get_import_status( + self, project_name: str, job_id: str, **kwargs: Any + ) -> _models.QuestionAnsweringAuthoringImportJobState: + """Gets the status of an Import job. + + :param project_name: Name of the project. Required. + :type project_name: str + :param job_id: The job ID. Required. + :type job_id: str + :return: QuestionAnsweringAuthoringImportJobState. The QuestionAnsweringAuthoringImportJobState + is compatible with MutableMapping + :rtype: + ~azure.ai.language.questionanswering.authoring.models.QuestionAnsweringAuthoringImportJobState + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.QuestionAnsweringAuthoringImportJobState] = kwargs.pop("cls", None) + + _request = build_question_answering_authoring_get_import_status_request( + project_name=project_name, + job_id=job_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_models.QuestionAnsweringAuthoringImportJobState, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + def _deploy_project_initial(self, project_name: str, deployment_name: str, **kwargs: Any) -> Iterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + _request = build_question_answering_authoring_deploy_project_request( + project_name=project_name, + deployment_name=deployment_name, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["Operation-Location"] = self._deserialize("str", response.headers.get("Operation-Location")) + + deserialized = response.iter_bytes() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def begin_deploy_project(self, project_name: str, deployment_name: str, **kwargs: Any) -> LROPoller[None]: + """Deploy project to production. + + :param project_name: The name of the project to use. Required. + :type project_name: str + :param deployment_name: The name of the specific deployment of the project to use. Required. + :type deployment_name: str + :return: An instance of LROPoller that returns None + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._deploy_project_initial( + project_name=project_name, + deployment_name=deployment_name, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + path_format_arguments = { + "Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, LROBasePolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[None].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + @distributed_trace + def _get_deploy_status( + self, project_name: str, deployment_name: str, job_id: str, **kwargs: Any + ) -> _models.QuestionAnsweringAuthoringProjectDeploymentJobState: + """Gets the status of a Deploy job. + + :param project_name: Name of the project. Required. + :type project_name: str + :param deployment_name: Name of the deployment. Required. + :type deployment_name: str + :param job_id: The job ID. Required. + :type job_id: str + :return: QuestionAnsweringAuthoringProjectDeploymentJobState. The + QuestionAnsweringAuthoringProjectDeploymentJobState is compatible with MutableMapping + :rtype: + ~azure.ai.language.questionanswering.authoring.models.QuestionAnsweringAuthoringProjectDeploymentJobState + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.QuestionAnsweringAuthoringProjectDeploymentJobState] = kwargs.pop("cls", None) + + _request = build_question_answering_authoring_get_deploy_status_request( + project_name=project_name, + deployment_name=deployment_name, + job_id=job_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_models.QuestionAnsweringAuthoringProjectDeploymentJobState, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def list_deployments( + self, project_name: str, *, top: Optional[int] = None, skip: Optional[int] = None, **kwargs: Any + ) -> ItemPaged["_models.ProjectDeployment"]: + """List all deployments of a project. + + :param project_name: Name of the project. Required. + :type project_name: str + :keyword top: The maximum number of resources to return from the collection. Default value is + None. + :paramtype top: int + :keyword skip: An offset into the collection of the first resource to be returned. Default + value is None. + :paramtype skip: int + :return: An iterator like instance of ProjectDeployment + :rtype: + ~azure.core.paging.ItemPaged[~azure.ai.language.questionanswering.authoring.models.ProjectDeployment] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + maxpagesize = kwargs.pop("maxpagesize", None) + cls: ClsType[list[_models.ProjectDeployment]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_question_answering_authoring_list_deployments_request( + project_name=project_name, + top=top, + skip=skip, + maxpagesize=maxpagesize, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "Endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "Endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(list[_models.ProjectDeployment], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + @distributed_trace + def list_synonyms( + self, project_name: str, *, top: Optional[int] = None, skip: Optional[int] = None, **kwargs: Any + ) -> ItemPaged["_models.WordAlterations"]: + """Gets all the synonyms of a project. + + :param project_name: Name of the project. Required. + :type project_name: str + :keyword top: The maximum number of resources to return from the collection. Default value is + None. + :paramtype top: int + :keyword skip: An offset into the collection of the first resource to be returned. Default + value is None. + :paramtype skip: int + :return: An iterator like instance of WordAlterations + :rtype: + ~azure.core.paging.ItemPaged[~azure.ai.language.questionanswering.authoring.models.WordAlterations] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + maxpagesize = kwargs.pop("maxpagesize", None) + cls: ClsType[list[_models.WordAlterations]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_question_answering_authoring_list_synonyms_request( + project_name=project_name, + top=top, + skip=skip, + maxpagesize=maxpagesize, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "Endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "Endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(list[_models.WordAlterations], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + @overload + def update_synonyms( + self, project_name: str, body: _models.SynonymAssets, *, content_type: str = "application/json", **kwargs: Any + ) -> None: + """Updates all the synonyms of a project. + + :param project_name: The name of the project to use. Required. + :type project_name: str + :param body: All the synonyms of a project. Required. + :type body: ~azure.ai.language.questionanswering.authoring.models.SynonymAssets + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def update_synonyms( + self, project_name: str, body: JSON, *, content_type: str = "application/json", **kwargs: Any + ) -> None: + """Updates all the synonyms of a project. + + :param project_name: The name of the project to use. Required. + :type project_name: str + :param body: All the synonyms of a project. Required. + :type body: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def update_synonyms( + self, project_name: str, body: IO[bytes], *, content_type: str = "application/json", **kwargs: Any + ) -> None: + """Updates all the synonyms of a project. + + :param project_name: The name of the project to use. Required. + :type project_name: str + :param body: All the synonyms of a project. Required. + :type body: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def update_synonyms( # pylint: disable=inconsistent-return-statements + self, project_name: str, body: Union[_models.SynonymAssets, JSON, IO[bytes]], **kwargs: Any + ) -> None: + """Updates all the synonyms of a project. + + :param project_name: The name of the project to use. Required. + :type project_name: str + :param body: All the synonyms of a project. Is one of the following types: SynonymAssets, JSON, + IO[bytes] Required. + :type body: ~azure.ai.language.questionanswering.authoring.models.SynonymAssets or JSON or + IO[bytes] + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[None] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_question_answering_authoring_update_synonyms_request( + project_name=project_name, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error) + + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + @distributed_trace + def list_sources( + self, project_name: str, *, top: Optional[int] = None, skip: Optional[int] = None, **kwargs: Any + ) -> ItemPaged["_models.QnaSourceRecord"]: + """Gets all the sources of a project. + + :param project_name: Name of the project. Required. + :type project_name: str + :keyword top: The maximum number of resources to return from the collection. Default value is + None. + :paramtype top: int + :keyword skip: An offset into the collection of the first resource to be returned. Default + value is None. + :paramtype skip: int + :return: An iterator like instance of QnaSourceRecord + :rtype: + ~azure.core.paging.ItemPaged[~azure.ai.language.questionanswering.authoring.models.QnaSourceRecord] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + maxpagesize = kwargs.pop("maxpagesize", None) + cls: ClsType[list[_models.QnaSourceRecord]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_question_answering_authoring_list_sources_request( + project_name=project_name, + top=top, + skip=skip, + maxpagesize=maxpagesize, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "Endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "Endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(list[_models.QnaSourceRecord], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + def _update_sources_initial( + self, project_name: str, body: Union[list[_models.UpdateSourceRecord], IO[bytes]], **kwargs: Any + ) -> Iterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_question_answering_authoring_update_sources_request( + project_name=project_name, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["Operation-Location"] = self._deserialize("str", response.headers.get("Operation-Location")) + + deserialized = response.iter_bytes() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + def begin_update_sources( + self, + project_name: str, + body: list[_models.UpdateSourceRecord], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[None]: + """Updates the sources of a project. + + :param project_name: The name of the project to use. Required. + :type project_name: str + :param body: Update sources parameters of a project. Required. + :type body: list[~azure.ai.language.questionanswering.authoring.models.UpdateSourceRecord] + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns None + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_update_sources( + self, project_name: str, body: IO[bytes], *, content_type: str = "application/json", **kwargs: Any + ) -> LROPoller[None]: + """Updates the sources of a project. + + :param project_name: The name of the project to use. Required. + :type project_name: str + :param body: Update sources parameters of a project. Required. + :type body: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns None + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_update_sources( + self, project_name: str, body: Union[list[_models.UpdateSourceRecord], IO[bytes]], **kwargs: Any + ) -> LROPoller[None]: + """Updates the sources of a project. + + :param project_name: The name of the project to use. Required. + :type project_name: str + :param body: Update sources parameters of a project. Is either a [UpdateSourceRecord] type or a + IO[bytes] type. Required. + :type body: list[~azure.ai.language.questionanswering.authoring.models.UpdateSourceRecord] or + IO[bytes] + :return: An instance of LROPoller that returns None + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._update_sources_initial( + project_name=project_name, + body=body, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + path_format_arguments = { + "Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, LROBasePolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[None].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + @api_version_validation( + method_added_on="2025-05-15-preview", + params_added_on={"2025-05-15-preview": ["api_version", "content_type", "project_name"]}, + api_versions_list=["2025-05-15-preview"], + ) + def _update_sources_from_files_initial( + self, project_name: str, body: Union[_models.UpdateSourceFiles, JSON], **kwargs: Any + ) -> Iterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + _body = body.as_dict() if isinstance(body, _Model) else body + _file_fields: list[str] = ["files"] + _data_fields: list[str] = ["fileOperations"] + _files, _data = prepare_multipart_form_data(_body, _file_fields, _data_fields) + + _request = build_question_answering_authoring_update_sources_from_files_request( + project_name=project_name, + api_version=self._config.api_version, + files=_files, + data=_data, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["Operation-Location"] = self._deserialize("str", response.headers.get("Operation-Location")) + + deserialized = response.iter_bytes() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + def begin_update_sources_from_files( + self, project_name: str, body: _models.UpdateSourceFiles, **kwargs: Any + ) -> LROPoller[None]: + """Updates the sources of a project from files. + + :param project_name: The name of the project to use. Required. + :type project_name: str + :param body: Collection of files containing project assets the needs to be imported. Required. + :type body: ~azure.ai.language.questionanswering.authoring.models.UpdateSourceFiles + :return: An instance of LROPoller that returns None + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_update_sources_from_files(self, project_name: str, body: JSON, **kwargs: Any) -> LROPoller[None]: + """Updates the sources of a project from files. + + :param project_name: The name of the project to use. Required. + :type project_name: str + :param body: Collection of files containing project assets the needs to be imported. Required. + :type body: JSON + :return: An instance of LROPoller that returns None + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + @api_version_validation( + method_added_on="2025-05-15-preview", + params_added_on={"2025-05-15-preview": ["api_version", "content_type", "project_name"]}, + api_versions_list=["2025-05-15-preview"], + ) + def begin_update_sources_from_files( + self, project_name: str, body: Union[_models.UpdateSourceFiles, JSON], **kwargs: Any + ) -> LROPoller[None]: + """Updates the sources of a project from files. + + :param project_name: The name of the project to use. Required. + :type project_name: str + :param body: Collection of files containing project assets the needs to be imported. Is either + a UpdateSourceFiles type or a JSON type. Required. + :type body: ~azure.ai.language.questionanswering.authoring.models.UpdateSourceFiles or JSON + :return: An instance of LROPoller that returns None + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._update_sources_from_files_initial( + project_name=project_name, body=body, cls=lambda x, y, z: x, headers=_headers, params=_params, **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + path_format_arguments = { + "Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, LROBasePolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[None].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + @distributed_trace + def _get_update_sources_status( + self, project_name: str, job_id: str, **kwargs: Any + ) -> _models.QuestionAnsweringAuthoringUpdateSourcesJobState: + """Gets the status of update sources job. + + :param project_name: Name of the project. Required. + :type project_name: str + :param job_id: The job ID. Required. + :type job_id: str + :return: QuestionAnsweringAuthoringUpdateSourcesJobState. The + QuestionAnsweringAuthoringUpdateSourcesJobState is compatible with MutableMapping + :rtype: + ~azure.ai.language.questionanswering.authoring.models.QuestionAnsweringAuthoringUpdateSourcesJobState + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.QuestionAnsweringAuthoringUpdateSourcesJobState] = kwargs.pop("cls", None) + + _request = build_question_answering_authoring_get_update_sources_status_request( + project_name=project_name, + job_id=job_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_models.QuestionAnsweringAuthoringUpdateSourcesJobState, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def list_qnas( + self, + project_name: str, + *, + top: Optional[int] = None, + skip: Optional[int] = None, + source: Optional[str] = None, + **kwargs: Any + ) -> ItemPaged["_models.RetrieveQnaRecord"]: + """Gets all the QnAs of a project. + + :param project_name: Name of the project. Required. + :type project_name: str + :keyword top: The maximum number of resources to return from the collection. Default value is + None. + :paramtype top: int + :keyword skip: An offset into the collection of the first resource to be returned. Default + value is None. + :paramtype skip: int + :keyword source: Source of the QnA. Default value is None. + :paramtype source: str + :return: An iterator like instance of RetrieveQnaRecord + :rtype: + ~azure.core.paging.ItemPaged[~azure.ai.language.questionanswering.authoring.models.RetrieveQnaRecord] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + maxpagesize = kwargs.pop("maxpagesize", None) + cls: ClsType[list[_models.RetrieveQnaRecord]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_question_answering_authoring_list_qnas_request( + project_name=project_name, + top=top, + skip=skip, + maxpagesize=maxpagesize, + source=source, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "Endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "Endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(list[_models.RetrieveQnaRecord], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + def _update_qnas_initial( + self, project_name: str, body: Union[list[_models.UpdateQnaRecord], IO[bytes]], **kwargs: Any + ) -> Iterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_question_answering_authoring_update_qnas_request( + project_name=project_name, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["Operation-Location"] = self._deserialize("str", response.headers.get("Operation-Location")) + + deserialized = response.iter_bytes() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + def begin_update_qnas( + self, + project_name: str, + body: list[_models.UpdateQnaRecord], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[None]: + """Updates the QnAs of a project. + + :param project_name: The name of the project to use. Required. + :type project_name: str + :param body: Update QnAs parameters of a project. Required. + :type body: list[~azure.ai.language.questionanswering.authoring.models.UpdateQnaRecord] + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns None + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_update_qnas( + self, project_name: str, body: IO[bytes], *, content_type: str = "application/json", **kwargs: Any + ) -> LROPoller[None]: + """Updates the QnAs of a project. + + :param project_name: The name of the project to use. Required. + :type project_name: str + :param body: Update QnAs parameters of a project. Required. + :type body: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns None + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_update_qnas( + self, project_name: str, body: Union[list[_models.UpdateQnaRecord], IO[bytes]], **kwargs: Any + ) -> LROPoller[None]: + """Updates the QnAs of a project. + + :param project_name: The name of the project to use. Required. + :type project_name: str + :param body: Update QnAs parameters of a project. Is either a [UpdateQnaRecord] type or a + IO[bytes] type. Required. + :type body: list[~azure.ai.language.questionanswering.authoring.models.UpdateQnaRecord] or + IO[bytes] + :return: An instance of LROPoller that returns None + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._update_qnas_initial( + project_name=project_name, + body=body, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + path_format_arguments = { + "Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, LROBasePolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[None].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + @distributed_trace + def _get_update_qnas_status( + self, project_name: str, job_id: str, **kwargs: Any + ) -> _models.QuestionAnsweringAuthoringUpdateQnasJobState: + """Gets the status of update QnAs job. + + :param project_name: Name of the project. Required. + :type project_name: str + :param job_id: The job ID. Required. + :type job_id: str + :return: QuestionAnsweringAuthoringUpdateQnasJobState. The + QuestionAnsweringAuthoringUpdateQnasJobState is compatible with MutableMapping + :rtype: + ~azure.ai.language.questionanswering.authoring.models.QuestionAnsweringAuthoringUpdateQnasJobState + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.QuestionAnsweringAuthoringUpdateQnasJobState] = kwargs.pop("cls", None) + + _request = build_question_answering_authoring_get_update_qnas_status_request( + project_name=project_name, + job_id=job_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_models.QuestionAnsweringAuthoringUpdateQnasJobState, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @overload + def add_feedback( + self, + project_name: str, + body: _models.ActiveLearningFeedback, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> None: + """Update Active Learning feedback. + + :param project_name: The name of the project to use. Required. + :type project_name: str + :param body: Feedback for Active Learning. Required. + :type body: ~azure.ai.language.questionanswering.authoring.models.ActiveLearningFeedback + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def add_feedback( + self, project_name: str, body: JSON, *, content_type: str = "application/json", **kwargs: Any + ) -> None: + """Update Active Learning feedback. + + :param project_name: The name of the project to use. Required. + :type project_name: str + :param body: Feedback for Active Learning. Required. + :type body: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def add_feedback( + self, project_name: str, body: IO[bytes], *, content_type: str = "application/json", **kwargs: Any + ) -> None: + """Update Active Learning feedback. + + :param project_name: The name of the project to use. Required. + :type project_name: str + :param body: Feedback for Active Learning. Required. + :type body: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def add_feedback( # pylint: disable=inconsistent-return-statements + self, project_name: str, body: Union[_models.ActiveLearningFeedback, JSON, IO[bytes]], **kwargs: Any + ) -> None: + """Update Active Learning feedback. + + :param project_name: The name of the project to use. Required. + :type project_name: str + :param body: Feedback for Active Learning. Is one of the following types: + ActiveLearningFeedback, JSON, IO[bytes] Required. + :type body: ~azure.ai.language.questionanswering.authoring.models.ActiveLearningFeedback or + JSON or IO[bytes] + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[None] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_question_answering_authoring_add_feedback_request( + project_name=project_name, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error) + + if cls: + return cls(pipeline_response, None, {}) # type: ignore diff --git a/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/azure/ai/language/questionanswering/authoring/_operations/_patch.py b/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/azure/ai/language/questionanswering/authoring/_operations/_patch.py new file mode 100644 index 000000000000..5c40f444b2ef --- /dev/null +++ b/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/azure/ai/language/questionanswering/authoring/_operations/_patch.py @@ -0,0 +1,324 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------- +# pylint: disable=line-too-long,useless-suppression,duplicate-code,arguments-renamed +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" +from collections.abc import MutableMapping +from typing import Any, Union, overload, IO +from azure.core.polling import LROPoller +from azure.core.tracing.decorator import distributed_trace + +from .. import models as _models + +from ._operations import ( + _QuestionAnsweringAuthoringClientOperationsMixin as _QuestionAnsweringAuthoringClientOperationsMixinGenerated, +) + +JSON = MutableMapping[str, Any] + + +class _QuestionAnsweringAuthoringClientOperationsMixin(_QuestionAnsweringAuthoringClientOperationsMixinGenerated): + """Mixin class for patching methods with backward compatible parameter names.""" + + # create_project overloads with 'options' parameter + @overload # type: ignore[override] + def create_project( + self, + project_name: str, + options: _models.QuestionAnsweringProject, + *, + content_type: str = "application/json", + **kwargs: Any, + ) -> _models.QuestionAnsweringProject: + """Create or update a project. + + :param project_name: Name of the project. Required. + :type project_name: str + :param options: The resource instance. Required. + :type options: ~azure.ai.language.questionanswering.authoring.models.QuestionAnsweringProject + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: QuestionAnsweringProject. The QuestionAnsweringProject is compatible with + MutableMapping + :rtype: ~azure.ai.language.questionanswering.authoring.models.QuestionAnsweringProject + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def create_project( + self, project_name: str, options: JSON, *, content_type: str = "application/json", **kwargs: Any + ) -> _models.QuestionAnsweringProject: + """Create or update a project. + + :param project_name: Name of the project. Required. + :type project_name: str + :param options: The resource instance. Required. + :type options: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: QuestionAnsweringProject. The QuestionAnsweringProject is compatible with + MutableMapping + :rtype: ~azure.ai.language.questionanswering.authoring.models.QuestionAnsweringProject + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def create_project( + self, project_name: str, options: IO[bytes], *, content_type: str = "application/json", **kwargs: Any + ) -> _models.QuestionAnsweringProject: + """Create or update a project. + + :param project_name: Name of the project. Required. + :type project_name: str + :param options: The resource instance. Required. + :type options: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: QuestionAnsweringProject. The QuestionAnsweringProject is compatible with + MutableMapping + :rtype: ~azure.ai.language.questionanswering.authoring.models.QuestionAnsweringProject + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def create_project( # pyright: ignore[reportIncompatibleMethodOverride] + self, project_name: str, options: Union[_models.QuestionAnsweringProject, JSON, IO[bytes]], **kwargs: Any + ) -> _models.QuestionAnsweringProject: + """Create or update a project. + + :param project_name: Name of the project. Required. + :type project_name: str + :param options: The resource instance. Is one of the following types: QuestionAnsweringProject, + JSON, IO[bytes] Required. + :type options: ~azure.ai.language.questionanswering.authoring.models.QuestionAnsweringProject or + JSON or IO[bytes] + :return: QuestionAnsweringProject. The QuestionAnsweringProject is compatible with + MutableMapping + :rtype: ~azure.ai.language.questionanswering.authoring.models.QuestionAnsweringProject + :raises ~azure.core.exceptions.HttpResponseError: + """ + # Call the parent implementation with 'body' parameter for backward compatibility + return super().create_project(project_name=project_name, body=options, **kwargs) + + # update_synonyms overloads with 'synonyms' parameter + @overload # type: ignore[override] + def update_synonyms( + self, + project_name: str, + synonyms: _models.SynonymAssets, + *, + content_type: str = "application/json", + **kwargs: Any, + ) -> None: + """Updates all the synonyms of a project. + + :param project_name: The name of the project to use. Required. + :type project_name: str + :param synonyms: All the synonyms of a project. Required. + :type synonyms: ~azure.ai.language.questionanswering.authoring.models.SynonymAssets + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def update_synonyms( + self, project_name: str, synonyms: JSON, *, content_type: str = "application/json", **kwargs: Any + ) -> None: + """Updates all the synonyms of a project. + + :param project_name: The name of the project to use. Required. + :type project_name: str + :param synonyms: All the synonyms of a project. Required. + :type synonyms: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def update_synonyms( + self, project_name: str, synonyms: IO[bytes], *, content_type: str = "application/json", **kwargs: Any + ) -> None: + """Updates all the synonyms of a project. + + :param project_name: The name of the project to use. Required. + :type project_name: str + :param synonyms: All the synonyms of a project. Required. + :type synonyms: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def update_synonyms( # pyright: ignore[reportIncompatibleMethodOverride] + self, project_name: str, synonyms: Union[_models.SynonymAssets, JSON, IO[bytes]], **kwargs: Any + ) -> None: + """Updates all the synonyms of a project. + + :param project_name: The name of the project to use. Required. + :type project_name: str + :param synonyms: All the synonyms of a project. Is one of the following types: SynonymAssets, JSON, + IO[bytes] Required. + :type synonyms: ~azure.ai.language.questionanswering.authoring.models.SynonymAssets or JSON or + IO[bytes] + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + # Call the parent implementation with 'body' parameter for backward compatibility + return super().update_synonyms(project_name=project_name, body=synonyms, **kwargs) + + # begin_update_qnas overloads with 'qnas' parameter + @overload # type: ignore[override] + def begin_update_qnas( + self, + project_name: str, + qnas: list[_models.UpdateQnaRecord], + *, + content_type: str = "application/json", + **kwargs: Any, + ) -> LROPoller[None]: + """Updates the QnAs of a project. + + :param project_name: The name of the project to use. Required. + :type project_name: str + :param qnas: Update QnAs parameters of a project. Required. + :type qnas: list[~azure.ai.language.questionanswering.authoring.models.UpdateQnaRecord] + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns None + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_update_qnas( + self, project_name: str, qnas: IO[bytes], *, content_type: str = "application/json", **kwargs: Any + ) -> LROPoller[None]: + """Updates the QnAs of a project. + + :param project_name: The name of the project to use. Required. + :type project_name: str + :param qnas: Update QnAs parameters of a project. Required. + :type qnas: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns None + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_update_qnas( # pyright: ignore[reportIncompatibleMethodOverride] + self, project_name: str, qnas: Union[list[_models.UpdateQnaRecord], IO[bytes]], **kwargs: Any + ) -> LROPoller[None]: + """Updates the QnAs of a project. + + :param project_name: The name of the project to use. Required. + :type project_name: str + :param qnas: Update QnAs parameters of a project. Is either a [UpdateQnaRecord] type or a + IO[bytes] type. Required. + :type qnas: list[~azure.ai.language.questionanswering.authoring.models.UpdateQnaRecord] or + IO[bytes] + :return: An instance of LROPoller that returns None + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + # Call the parent implementation with 'body' parameter for backward compatibility + return super().begin_update_qnas(project_name=project_name, body=qnas, **kwargs) + + # begin_update_sources overloads with 'sources' parameter + @overload # type: ignore[override] + def begin_update_sources( + self, + project_name: str, + sources: list[_models.UpdateSourceRecord], + *, + content_type: str = "application/json", + **kwargs: Any, + ) -> LROPoller[None]: + """Updates the sources of a project. + + :param project_name: The name of the project to use. Required. + :type project_name: str + :param sources: Update sources parameters of a project. Required. + :type sources: list[~azure.ai.language.questionanswering.authoring.models.UpdateSourceRecord] + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns None + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_update_sources( + self, project_name: str, sources: IO[bytes], *, content_type: str = "application/json", **kwargs: Any + ) -> LROPoller[None]: + """Updates the sources of a project. + + :param project_name: The name of the project to use. Required. + :type project_name: str + :param sources: Update sources parameters of a project. Required. + :type sources: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns None + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_update_sources( # pyright: ignore[reportIncompatibleMethodOverride] + self, project_name: str, sources: Union[list[_models.UpdateSourceRecord], IO[bytes]], **kwargs: Any + ) -> LROPoller[None]: + """Updates the sources of a project. + + :param project_name: The name of the project to use. Required. + :type project_name: str + :param sources: Update sources parameters of a project. Is either a [UpdateSourceRecord] type or a + IO[bytes] type. Required. + :type sources: list[~azure.ai.language.questionanswering.authoring.models.UpdateSourceRecord] or + IO[bytes] + :return: An instance of LROPoller that returns None + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + # Call the parent implementation with 'body' parameter for backward compatibility + return super().begin_update_sources(project_name=project_name, body=sources, **kwargs) + + +__all__: list[str] = [ + "_QuestionAnsweringAuthoringClientOperationsMixin" +] # Add all objects you want publicly available to users at this package level + + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/azure/ai/language/questionanswering/authoring/_patch.py b/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/azure/ai/language/questionanswering/authoring/_patch.py new file mode 100644 index 000000000000..87676c65a8f0 --- /dev/null +++ b/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/azure/ai/language/questionanswering/authoring/_patch.py @@ -0,0 +1,21 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------- +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" + + +__all__: list[str] = [] # Add all objects you want publicly available to users at this package level + + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/azure/ai/language/questionanswering/authoring/_utils/__init__.py b/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/azure/ai/language/questionanswering/authoring/_utils/__init__.py new file mode 100644 index 000000000000..8026245c2abc --- /dev/null +++ b/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/azure/ai/language/questionanswering/authoring/_utils/__init__.py @@ -0,0 +1,6 @@ +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- diff --git a/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/azure/ai/language/questionanswering/authoring/_utils/model_base.py b/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/azure/ai/language/questionanswering/authoring/_utils/model_base.py new file mode 100644 index 000000000000..45d922b21ccc --- /dev/null +++ b/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/azure/ai/language/questionanswering/authoring/_utils/model_base.py @@ -0,0 +1,1237 @@ +# pylint: disable=line-too-long,useless-suppression,too-many-lines,duplicate-code +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=protected-access, broad-except + +import copy +import calendar +import decimal +import functools +import sys +import logging +import base64 +import re +import typing +import enum +import email.utils +from datetime import datetime, date, time, timedelta, timezone +from json import JSONEncoder +import xml.etree.ElementTree as ET +from collections.abc import MutableMapping +from typing_extensions import Self +import isodate +from azure.core.exceptions import DeserializationError +from azure.core import CaseInsensitiveEnumMeta +from azure.core.pipeline import PipelineResponse +from azure.core.serialization import _Null +from azure.core.rest import HttpResponse + +_LOGGER = logging.getLogger(__name__) + +__all__ = ["SdkJSONEncoder", "Model", "rest_field", "rest_discriminator"] + +TZ_UTC = timezone.utc +_T = typing.TypeVar("_T") + + +def _timedelta_as_isostr(td: timedelta) -> str: + """Converts a datetime.timedelta object into an ISO 8601 formatted string, e.g. 'P4DT12H30M05S' + + Function adapted from the Tin Can Python project: https://github.com/RusticiSoftware/TinCanPython + + :param timedelta td: The timedelta to convert + :rtype: str + :return: ISO8601 version of this timedelta + """ + + # Split seconds to larger units + seconds = td.total_seconds() + minutes, seconds = divmod(seconds, 60) + hours, minutes = divmod(minutes, 60) + days, hours = divmod(hours, 24) + + days, hours, minutes = list(map(int, (days, hours, minutes))) + seconds = round(seconds, 6) + + # Build date + date_str = "" + if days: + date_str = "%sD" % days + + if hours or minutes or seconds: + # Build time + time_str = "T" + + # Hours + bigger_exists = date_str or hours + if bigger_exists: + time_str += "{:02}H".format(hours) + + # Minutes + bigger_exists = bigger_exists or minutes + if bigger_exists: + time_str += "{:02}M".format(minutes) + + # Seconds + try: + if seconds.is_integer(): + seconds_string = "{:02}".format(int(seconds)) + else: + # 9 chars long w/ leading 0, 6 digits after decimal + seconds_string = "%09.6f" % seconds + # Remove trailing zeros + seconds_string = seconds_string.rstrip("0") + except AttributeError: # int.is_integer() raises + seconds_string = "{:02}".format(seconds) + + time_str += "{}S".format(seconds_string) + else: + time_str = "" + + return "P" + date_str + time_str + + +def _serialize_bytes(o, format: typing.Optional[str] = None) -> str: + encoded = base64.b64encode(o).decode() + if format == "base64url": + return encoded.strip("=").replace("+", "-").replace("/", "_") + return encoded + + +def _serialize_datetime(o, format: typing.Optional[str] = None): + if hasattr(o, "year") and hasattr(o, "hour"): + if format == "rfc7231": + return email.utils.format_datetime(o, usegmt=True) + if format == "unix-timestamp": + return int(calendar.timegm(o.utctimetuple())) + + # astimezone() fails for naive times in Python 2.7, so make make sure o is aware (tzinfo is set) + if not o.tzinfo: + iso_formatted = o.replace(tzinfo=TZ_UTC).isoformat() + else: + iso_formatted = o.astimezone(TZ_UTC).isoformat() + # Replace the trailing "+00:00" UTC offset with "Z" (RFC 3339: https://www.ietf.org/rfc/rfc3339.txt) + return iso_formatted.replace("+00:00", "Z") + # Next try datetime.date or datetime.time + return o.isoformat() + + +def _is_readonly(p): + try: + return p._visibility == ["read"] + except AttributeError: + return False + + +class SdkJSONEncoder(JSONEncoder): + """A JSON encoder that's capable of serializing datetime objects and bytes.""" + + def __init__(self, *args, exclude_readonly: bool = False, format: typing.Optional[str] = None, **kwargs): + super().__init__(*args, **kwargs) + self.exclude_readonly = exclude_readonly + self.format = format + + def default(self, o): # pylint: disable=too-many-return-statements + if _is_model(o): + if self.exclude_readonly: + readonly_props = [p._rest_name for p in o._attr_to_rest_field.values() if _is_readonly(p)] + return {k: v for k, v in o.items() if k not in readonly_props} + return dict(o.items()) + try: + return super(SdkJSONEncoder, self).default(o) + except TypeError: + if isinstance(o, _Null): + return None + if isinstance(o, decimal.Decimal): + return float(o) + if isinstance(o, (bytes, bytearray)): + return _serialize_bytes(o, self.format) + try: + # First try datetime.datetime + return _serialize_datetime(o, self.format) + except AttributeError: + pass + # Last, try datetime.timedelta + try: + return _timedelta_as_isostr(o) + except AttributeError: + # This will be raised when it hits value.total_seconds in the method above + pass + return super(SdkJSONEncoder, self).default(o) + + +_VALID_DATE = re.compile(r"\d{4}[-]\d{2}[-]\d{2}T\d{2}:\d{2}:\d{2}" + r"\.?\d*Z?[-+]?[\d{2}]?:?[\d{2}]?") +_VALID_RFC7231 = re.compile( + r"(Mon|Tue|Wed|Thu|Fri|Sat|Sun),\s\d{2}\s" + r"(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)\s\d{4}\s\d{2}:\d{2}:\d{2}\sGMT" +) + + +def _deserialize_datetime(attr: typing.Union[str, datetime]) -> datetime: + """Deserialize ISO-8601 formatted string into Datetime object. + + :param str attr: response string to be deserialized. + :rtype: ~datetime.datetime + :returns: The datetime object from that input + """ + if isinstance(attr, datetime): + # i'm already deserialized + return attr + attr = attr.upper() + match = _VALID_DATE.match(attr) + if not match: + raise ValueError("Invalid datetime string: " + attr) + + check_decimal = attr.split(".") + if len(check_decimal) > 1: + decimal_str = "" + for digit in check_decimal[1]: + if digit.isdigit(): + decimal_str += digit + else: + break + if len(decimal_str) > 6: + attr = attr.replace(decimal_str, decimal_str[0:6]) + + date_obj = isodate.parse_datetime(attr) + test_utc = date_obj.utctimetuple() + if test_utc.tm_year > 9999 or test_utc.tm_year < 1: + raise OverflowError("Hit max or min date") + return date_obj + + +def _deserialize_datetime_rfc7231(attr: typing.Union[str, datetime]) -> datetime: + """Deserialize RFC7231 formatted string into Datetime object. + + :param str attr: response string to be deserialized. + :rtype: ~datetime.datetime + :returns: The datetime object from that input + """ + if isinstance(attr, datetime): + # i'm already deserialized + return attr + match = _VALID_RFC7231.match(attr) + if not match: + raise ValueError("Invalid datetime string: " + attr) + + return email.utils.parsedate_to_datetime(attr) + + +def _deserialize_datetime_unix_timestamp(attr: typing.Union[float, datetime]) -> datetime: + """Deserialize unix timestamp into Datetime object. + + :param str attr: response string to be deserialized. + :rtype: ~datetime.datetime + :returns: The datetime object from that input + """ + if isinstance(attr, datetime): + # i'm already deserialized + return attr + return datetime.fromtimestamp(attr, TZ_UTC) + + +def _deserialize_date(attr: typing.Union[str, date]) -> date: + """Deserialize ISO-8601 formatted string into Date object. + :param str attr: response string to be deserialized. + :rtype: date + :returns: The date object from that input + """ + # This must NOT use defaultmonth/defaultday. Using None ensure this raises an exception. + if isinstance(attr, date): + return attr + return isodate.parse_date(attr, defaultmonth=None, defaultday=None) # type: ignore + + +def _deserialize_time(attr: typing.Union[str, time]) -> time: + """Deserialize ISO-8601 formatted string into time object. + + :param str attr: response string to be deserialized. + :rtype: datetime.time + :returns: The time object from that input + """ + if isinstance(attr, time): + return attr + return isodate.parse_time(attr) + + +def _deserialize_bytes(attr): + if isinstance(attr, (bytes, bytearray)): + return attr + return bytes(base64.b64decode(attr)) + + +def _deserialize_bytes_base64(attr): + if isinstance(attr, (bytes, bytearray)): + return attr + padding = "=" * (3 - (len(attr) + 3) % 4) # type: ignore + attr = attr + padding # type: ignore + encoded = attr.replace("-", "+").replace("_", "/") + return bytes(base64.b64decode(encoded)) + + +def _deserialize_duration(attr): + if isinstance(attr, timedelta): + return attr + return isodate.parse_duration(attr) + + +def _deserialize_decimal(attr): + if isinstance(attr, decimal.Decimal): + return attr + return decimal.Decimal(str(attr)) + + +def _deserialize_int_as_str(attr): + if isinstance(attr, int): + return attr + return int(attr) + + +_DESERIALIZE_MAPPING = { + datetime: _deserialize_datetime, + date: _deserialize_date, + time: _deserialize_time, + bytes: _deserialize_bytes, + bytearray: _deserialize_bytes, + timedelta: _deserialize_duration, + typing.Any: lambda x: x, + decimal.Decimal: _deserialize_decimal, +} + +_DESERIALIZE_MAPPING_WITHFORMAT = { + "rfc3339": _deserialize_datetime, + "rfc7231": _deserialize_datetime_rfc7231, + "unix-timestamp": _deserialize_datetime_unix_timestamp, + "base64": _deserialize_bytes, + "base64url": _deserialize_bytes_base64, +} + + +def get_deserializer(annotation: typing.Any, rf: typing.Optional["_RestField"] = None): + if annotation is int and rf and rf._format == "str": + return _deserialize_int_as_str + if rf and rf._format: + return _DESERIALIZE_MAPPING_WITHFORMAT.get(rf._format) + return _DESERIALIZE_MAPPING.get(annotation) # pyright: ignore + + +def _get_type_alias_type(module_name: str, alias_name: str): + types = { + k: v + for k, v in sys.modules[module_name].__dict__.items() + if isinstance(v, typing._GenericAlias) # type: ignore + } + if alias_name not in types: + return alias_name + return types[alias_name] + + +def _get_model(module_name: str, model_name: str): + models = {k: v for k, v in sys.modules[module_name].__dict__.items() if isinstance(v, type)} + module_end = module_name.rsplit(".", 1)[0] + models.update({k: v for k, v in sys.modules[module_end].__dict__.items() if isinstance(v, type)}) + if isinstance(model_name, str): + model_name = model_name.split(".")[-1] + if model_name not in models: + return model_name + return models[model_name] + + +_UNSET = object() + + +class _MyMutableMapping(MutableMapping[str, typing.Any]): + def __init__(self, data: dict[str, typing.Any]) -> None: + self._data = data + + def __contains__(self, key: typing.Any) -> bool: + return key in self._data + + def __getitem__(self, key: str) -> typing.Any: + return self._data.__getitem__(key) + + def __setitem__(self, key: str, value: typing.Any) -> None: + self._data.__setitem__(key, value) + + def __delitem__(self, key: str) -> None: + self._data.__delitem__(key) + + def __iter__(self) -> typing.Iterator[typing.Any]: + return self._data.__iter__() + + def __len__(self) -> int: + return self._data.__len__() + + def __ne__(self, other: typing.Any) -> bool: + return not self.__eq__(other) + + def keys(self) -> typing.KeysView[str]: + """ + :returns: a set-like object providing a view on D's keys + :rtype: ~typing.KeysView + """ + return self._data.keys() + + def values(self) -> typing.ValuesView[typing.Any]: + """ + :returns: an object providing a view on D's values + :rtype: ~typing.ValuesView + """ + return self._data.values() + + def items(self) -> typing.ItemsView[str, typing.Any]: + """ + :returns: set-like object providing a view on D's items + :rtype: ~typing.ItemsView + """ + return self._data.items() + + def get(self, key: str, default: typing.Any = None) -> typing.Any: + """ + Get the value for key if key is in the dictionary, else default. + :param str key: The key to look up. + :param any default: The value to return if key is not in the dictionary. Defaults to None + :returns: D[k] if k in D, else d. + :rtype: any + """ + try: + return self[key] + except KeyError: + return default + + @typing.overload + def pop(self, key: str) -> typing.Any: ... # pylint: disable=arguments-differ + + @typing.overload + def pop(self, key: str, default: _T) -> _T: ... # pylint: disable=signature-differs + + @typing.overload + def pop(self, key: str, default: typing.Any) -> typing.Any: ... # pylint: disable=signature-differs + + def pop(self, key: str, default: typing.Any = _UNSET) -> typing.Any: + """ + Removes specified key and return the corresponding value. + :param str key: The key to pop. + :param any default: The value to return if key is not in the dictionary + :returns: The value corresponding to the key. + :rtype: any + :raises KeyError: If key is not found and default is not given. + """ + if default is _UNSET: + return self._data.pop(key) + return self._data.pop(key, default) + + def popitem(self) -> tuple[str, typing.Any]: + """ + Removes and returns some (key, value) pair + :returns: The (key, value) pair. + :rtype: tuple + :raises KeyError: if D is empty. + """ + return self._data.popitem() + + def clear(self) -> None: + """ + Remove all items from D. + """ + self._data.clear() + + def update(self, *args: typing.Any, **kwargs: typing.Any) -> None: # pylint: disable=arguments-differ + """ + Updates D from mapping/iterable E and F. + :param any args: Either a mapping object or an iterable of key-value pairs. + """ + self._data.update(*args, **kwargs) + + @typing.overload + def setdefault(self, key: str, default: None = None) -> None: ... + + @typing.overload + def setdefault(self, key: str, default: typing.Any) -> typing.Any: ... # pylint: disable=signature-differs + + def setdefault(self, key: str, default: typing.Any = _UNSET) -> typing.Any: + """ + Same as calling D.get(k, d), and setting D[k]=d if k not found + :param str key: The key to look up. + :param any default: The value to set if key is not in the dictionary + :returns: D[k] if k in D, else d. + :rtype: any + """ + if default is _UNSET: + return self._data.setdefault(key) + return self._data.setdefault(key, default) + + def __eq__(self, other: typing.Any) -> bool: + try: + other_model = self.__class__(other) + except Exception: + return False + return self._data == other_model._data + + def __repr__(self) -> str: + return str(self._data) + + +def _is_model(obj: typing.Any) -> bool: + return getattr(obj, "_is_model", False) + + +def _serialize(o, format: typing.Optional[str] = None): # pylint: disable=too-many-return-statements + if isinstance(o, list): + return [_serialize(x, format) for x in o] + if isinstance(o, dict): + return {k: _serialize(v, format) for k, v in o.items()} + if isinstance(o, set): + return {_serialize(x, format) for x in o} + if isinstance(o, tuple): + return tuple(_serialize(x, format) for x in o) + if isinstance(o, (bytes, bytearray)): + return _serialize_bytes(o, format) + if isinstance(o, decimal.Decimal): + return float(o) + if isinstance(o, enum.Enum): + return o.value + if isinstance(o, int): + if format == "str": + return str(o) + return o + try: + # First try datetime.datetime + return _serialize_datetime(o, format) + except AttributeError: + pass + # Last, try datetime.timedelta + try: + return _timedelta_as_isostr(o) + except AttributeError: + # This will be raised when it hits value.total_seconds in the method above + pass + return o + + +def _get_rest_field(attr_to_rest_field: dict[str, "_RestField"], rest_name: str) -> typing.Optional["_RestField"]: + try: + return next(rf for rf in attr_to_rest_field.values() if rf._rest_name == rest_name) + except StopIteration: + return None + + +def _create_value(rf: typing.Optional["_RestField"], value: typing.Any) -> typing.Any: + if not rf: + return _serialize(value, None) + if rf._is_multipart_file_input: + return value + if rf._is_model: + return _deserialize(rf._type, value) + if isinstance(value, ET.Element): + value = _deserialize(rf._type, value) + return _serialize(value, rf._format) + + +class Model(_MyMutableMapping): + _is_model = True + # label whether current class's _attr_to_rest_field has been calculated + # could not see _attr_to_rest_field directly because subclass inherits it from parent class + _calculated: set[str] = set() + + def __init__(self, *args: typing.Any, **kwargs: typing.Any) -> None: + class_name = self.__class__.__name__ + if len(args) > 1: + raise TypeError(f"{class_name}.__init__() takes 2 positional arguments but {len(args) + 1} were given") + dict_to_pass = { + rest_field._rest_name: rest_field._default + for rest_field in self._attr_to_rest_field.values() + if rest_field._default is not _UNSET + } + if args: # pylint: disable=too-many-nested-blocks + if isinstance(args[0], ET.Element): + existed_attr_keys = [] + model_meta = getattr(self, "_xml", {}) + + for rf in self._attr_to_rest_field.values(): + prop_meta = getattr(rf, "_xml", {}) + xml_name = prop_meta.get("name", rf._rest_name) + xml_ns = prop_meta.get("ns", model_meta.get("ns", None)) + if xml_ns: + xml_name = "{" + xml_ns + "}" + xml_name + + # attribute + if prop_meta.get("attribute", False) and args[0].get(xml_name) is not None: + existed_attr_keys.append(xml_name) + dict_to_pass[rf._rest_name] = _deserialize(rf._type, args[0].get(xml_name)) + continue + + # unwrapped element is array + if prop_meta.get("unwrapped", False): + # unwrapped array could either use prop items meta/prop meta + if prop_meta.get("itemsName"): + xml_name = prop_meta.get("itemsName") + xml_ns = prop_meta.get("itemNs") + if xml_ns: + xml_name = "{" + xml_ns + "}" + xml_name + items = args[0].findall(xml_name) # pyright: ignore + if len(items) > 0: + existed_attr_keys.append(xml_name) + dict_to_pass[rf._rest_name] = _deserialize(rf._type, items) + continue + + # text element is primitive type + if prop_meta.get("text", False): + if args[0].text is not None: + dict_to_pass[rf._rest_name] = _deserialize(rf._type, args[0].text) + continue + + # wrapped element could be normal property or array, it should only have one element + item = args[0].find(xml_name) + if item is not None: + existed_attr_keys.append(xml_name) + dict_to_pass[rf._rest_name] = _deserialize(rf._type, item) + + # rest thing is additional properties + for e in args[0]: + if e.tag not in existed_attr_keys: + dict_to_pass[e.tag] = _convert_element(e) + else: + dict_to_pass.update( + {k: _create_value(_get_rest_field(self._attr_to_rest_field, k), v) for k, v in args[0].items()} + ) + else: + non_attr_kwargs = [k for k in kwargs if k not in self._attr_to_rest_field] + if non_attr_kwargs: + # actual type errors only throw the first wrong keyword arg they see, so following that. + raise TypeError(f"{class_name}.__init__() got an unexpected keyword argument '{non_attr_kwargs[0]}'") + dict_to_pass.update( + { + self._attr_to_rest_field[k]._rest_name: _create_value(self._attr_to_rest_field[k], v) + for k, v in kwargs.items() + if v is not None + } + ) + super().__init__(dict_to_pass) + + def copy(self) -> "Model": + return Model(self.__dict__) + + def __new__(cls, *args: typing.Any, **kwargs: typing.Any) -> Self: + if f"{cls.__module__}.{cls.__qualname__}" not in cls._calculated: + # we know the last nine classes in mro are going to be 'Model', '_MyMutableMapping', 'MutableMapping', + # 'Mapping', 'Collection', 'Sized', 'Iterable', 'Container' and 'object' + mros = cls.__mro__[:-9][::-1] # ignore parents, and reverse the mro order + attr_to_rest_field: dict[str, _RestField] = { # map attribute name to rest_field property + k: v for mro_class in mros for k, v in mro_class.__dict__.items() if k[0] != "_" and hasattr(v, "_type") + } + annotations = { + k: v + for mro_class in mros + if hasattr(mro_class, "__annotations__") + for k, v in mro_class.__annotations__.items() + } + for attr, rf in attr_to_rest_field.items(): + rf._module = cls.__module__ + if not rf._type: + rf._type = rf._get_deserialize_callable_from_annotation(annotations.get(attr, None)) + if not rf._rest_name_input: + rf._rest_name_input = attr + cls._attr_to_rest_field: dict[str, _RestField] = dict(attr_to_rest_field.items()) + cls._calculated.add(f"{cls.__module__}.{cls.__qualname__}") + + return super().__new__(cls) + + def __init_subclass__(cls, discriminator: typing.Optional[str] = None) -> None: + for base in cls.__bases__: + if hasattr(base, "__mapping__"): + base.__mapping__[discriminator or cls.__name__] = cls # type: ignore + + @classmethod + def _get_discriminator(cls, exist_discriminators) -> typing.Optional["_RestField"]: + for v in cls.__dict__.values(): + if isinstance(v, _RestField) and v._is_discriminator and v._rest_name not in exist_discriminators: + return v + return None + + @classmethod + def _deserialize(cls, data, exist_discriminators): + if not hasattr(cls, "__mapping__"): + return cls(data) + discriminator = cls._get_discriminator(exist_discriminators) + if discriminator is None: + return cls(data) + exist_discriminators.append(discriminator._rest_name) + if isinstance(data, ET.Element): + model_meta = getattr(cls, "_xml", {}) + prop_meta = getattr(discriminator, "_xml", {}) + xml_name = prop_meta.get("name", discriminator._rest_name) + xml_ns = prop_meta.get("ns", model_meta.get("ns", None)) + if xml_ns: + xml_name = "{" + xml_ns + "}" + xml_name + + if data.get(xml_name) is not None: + discriminator_value = data.get(xml_name) + else: + discriminator_value = data.find(xml_name).text # pyright: ignore + else: + discriminator_value = data.get(discriminator._rest_name) + mapped_cls = cls.__mapping__.get(discriminator_value, cls) # pyright: ignore # pylint: disable=no-member + return mapped_cls._deserialize(data, exist_discriminators) + + def as_dict(self, *, exclude_readonly: bool = False) -> dict[str, typing.Any]: + """Return a dict that can be turned into json using json.dump. + + :keyword bool exclude_readonly: Whether to remove the readonly properties. + :returns: A dict JSON compatible object + :rtype: dict + """ + + result = {} + readonly_props = [] + if exclude_readonly: + readonly_props = [p._rest_name for p in self._attr_to_rest_field.values() if _is_readonly(p)] + for k, v in self.items(): + if exclude_readonly and k in readonly_props: # pyright: ignore + continue + is_multipart_file_input = False + try: + is_multipart_file_input = next( + rf for rf in self._attr_to_rest_field.values() if rf._rest_name == k + )._is_multipart_file_input + except StopIteration: + pass + result[k] = v if is_multipart_file_input else Model._as_dict_value(v, exclude_readonly=exclude_readonly) + return result + + @staticmethod + def _as_dict_value(v: typing.Any, exclude_readonly: bool = False) -> typing.Any: + if v is None or isinstance(v, _Null): + return None + if isinstance(v, (list, tuple, set)): + return type(v)(Model._as_dict_value(x, exclude_readonly=exclude_readonly) for x in v) + if isinstance(v, dict): + return {dk: Model._as_dict_value(dv, exclude_readonly=exclude_readonly) for dk, dv in v.items()} + return v.as_dict(exclude_readonly=exclude_readonly) if hasattr(v, "as_dict") else v + + +def _deserialize_model(model_deserializer: typing.Optional[typing.Callable], obj): + if _is_model(obj): + return obj + return _deserialize(model_deserializer, obj) + + +def _deserialize_with_optional(if_obj_deserializer: typing.Optional[typing.Callable], obj): + if obj is None: + return obj + return _deserialize_with_callable(if_obj_deserializer, obj) + + +def _deserialize_with_union(deserializers, obj): + for deserializer in deserializers: + try: + return _deserialize(deserializer, obj) + except DeserializationError: + pass + raise DeserializationError() + + +def _deserialize_dict( + value_deserializer: typing.Optional[typing.Callable], + module: typing.Optional[str], + obj: dict[typing.Any, typing.Any], +): + if obj is None: + return obj + if isinstance(obj, ET.Element): + obj = {child.tag: child for child in obj} + return {k: _deserialize(value_deserializer, v, module) for k, v in obj.items()} + + +def _deserialize_multiple_sequence( + entry_deserializers: list[typing.Optional[typing.Callable]], + module: typing.Optional[str], + obj, +): + if obj is None: + return obj + return type(obj)(_deserialize(deserializer, entry, module) for entry, deserializer in zip(obj, entry_deserializers)) + + +def _deserialize_sequence( + deserializer: typing.Optional[typing.Callable], + module: typing.Optional[str], + obj, +): + if obj is None: + return obj + if isinstance(obj, ET.Element): + obj = list(obj) + return type(obj)(_deserialize(deserializer, entry, module) for entry in obj) + + +def _sorted_annotations(types: list[typing.Any]) -> list[typing.Any]: + return sorted( + types, + key=lambda x: hasattr(x, "__name__") and x.__name__.lower() in ("str", "float", "int", "bool"), + ) + + +def _get_deserialize_callable_from_annotation( # pylint: disable=too-many-return-statements, too-many-statements, too-many-branches + annotation: typing.Any, + module: typing.Optional[str], + rf: typing.Optional["_RestField"] = None, +) -> typing.Optional[typing.Callable[[typing.Any], typing.Any]]: + if not annotation: + return None + + # is it a type alias? + if isinstance(annotation, str): + if module is not None: + annotation = _get_type_alias_type(module, annotation) + + # is it a forward ref / in quotes? + if isinstance(annotation, (str, typing.ForwardRef)): + try: + model_name = annotation.__forward_arg__ # type: ignore + except AttributeError: + model_name = annotation + if module is not None: + annotation = _get_model(module, model_name) # type: ignore + + try: + if module and _is_model(annotation): + if rf: + rf._is_model = True + + return functools.partial(_deserialize_model, annotation) # pyright: ignore + except Exception: + pass + + # is it a literal? + try: + if annotation.__origin__ is typing.Literal: # pyright: ignore + return None + except AttributeError: + pass + + # is it optional? + try: + if any(a for a in annotation.__args__ if a == type(None)): # pyright: ignore + if len(annotation.__args__) <= 2: # pyright: ignore + if_obj_deserializer = _get_deserialize_callable_from_annotation( + next(a for a in annotation.__args__ if a != type(None)), module, rf # pyright: ignore + ) + + return functools.partial(_deserialize_with_optional, if_obj_deserializer) + # the type is Optional[Union[...]], we need to remove the None type from the Union + annotation_copy = copy.copy(annotation) + annotation_copy.__args__ = [a for a in annotation_copy.__args__ if a != type(None)] # pyright: ignore + return _get_deserialize_callable_from_annotation(annotation_copy, module, rf) + except AttributeError: + pass + + # is it union? + if getattr(annotation, "__origin__", None) is typing.Union: + # initial ordering is we make `string` the last deserialization option, because it is often them most generic + deserializers = [ + _get_deserialize_callable_from_annotation(arg, module, rf) + for arg in _sorted_annotations(annotation.__args__) # pyright: ignore + ] + + return functools.partial(_deserialize_with_union, deserializers) + + try: + annotation_name = ( + annotation.__name__ if hasattr(annotation, "__name__") else annotation._name # pyright: ignore + ) + if annotation_name.lower() == "dict": + value_deserializer = _get_deserialize_callable_from_annotation( + annotation.__args__[1], module, rf # pyright: ignore + ) + + return functools.partial( + _deserialize_dict, + value_deserializer, + module, + ) + except (AttributeError, IndexError): + pass + try: + annotation_name = ( + annotation.__name__ if hasattr(annotation, "__name__") else annotation._name # pyright: ignore + ) + if annotation_name.lower() in ["list", "set", "tuple", "sequence"]: + if len(annotation.__args__) > 1: # pyright: ignore + entry_deserializers = [ + _get_deserialize_callable_from_annotation(dt, module, rf) + for dt in annotation.__args__ # pyright: ignore + ] + return functools.partial(_deserialize_multiple_sequence, entry_deserializers, module) + deserializer = _get_deserialize_callable_from_annotation( + annotation.__args__[0], module, rf # pyright: ignore + ) + + return functools.partial(_deserialize_sequence, deserializer, module) + except (TypeError, IndexError, AttributeError, SyntaxError): + pass + + def _deserialize_default( + deserializer, + obj, + ): + if obj is None: + return obj + try: + return _deserialize_with_callable(deserializer, obj) + except Exception: + pass + return obj + + if get_deserializer(annotation, rf): + return functools.partial(_deserialize_default, get_deserializer(annotation, rf)) + + return functools.partial(_deserialize_default, annotation) + + +def _deserialize_with_callable( + deserializer: typing.Optional[typing.Callable[[typing.Any], typing.Any]], + value: typing.Any, +): # pylint: disable=too-many-return-statements + try: + if value is None or isinstance(value, _Null): + return None + if isinstance(value, ET.Element): + if deserializer is str: + return value.text or "" + if deserializer is int: + return int(value.text) if value.text else None + if deserializer is float: + return float(value.text) if value.text else None + if deserializer is bool: + return value.text == "true" if value.text else None + if deserializer is None: + return value + if deserializer in [int, float, bool]: + return deserializer(value) + if isinstance(deserializer, CaseInsensitiveEnumMeta): + try: + return deserializer(value) + except ValueError: + # for unknown value, return raw value + return value + if isinstance(deserializer, type) and issubclass(deserializer, Model): + return deserializer._deserialize(value, []) + return typing.cast(typing.Callable[[typing.Any], typing.Any], deserializer)(value) + except Exception as e: + raise DeserializationError() from e + + +def _deserialize( + deserializer: typing.Any, + value: typing.Any, + module: typing.Optional[str] = None, + rf: typing.Optional["_RestField"] = None, + format: typing.Optional[str] = None, +) -> typing.Any: + if isinstance(value, PipelineResponse): + value = value.http_response.json() + if rf is None and format: + rf = _RestField(format=format) + if not isinstance(deserializer, functools.partial): + deserializer = _get_deserialize_callable_from_annotation(deserializer, module, rf) + return _deserialize_with_callable(deserializer, value) + + +def _failsafe_deserialize( + deserializer: typing.Any, + response: HttpResponse, + module: typing.Optional[str] = None, + rf: typing.Optional["_RestField"] = None, + format: typing.Optional[str] = None, +) -> typing.Any: + try: + return _deserialize(deserializer, response.json(), module, rf, format) + except DeserializationError: + _LOGGER.warning( + "Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True + ) + return None + + +def _failsafe_deserialize_xml( + deserializer: typing.Any, + response: HttpResponse, +) -> typing.Any: + try: + return _deserialize_xml(deserializer, response.text()) + except DeserializationError: + _LOGGER.warning( + "Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True + ) + return None + + +class _RestField: + def __init__( + self, + *, + name: typing.Optional[str] = None, + type: typing.Optional[typing.Callable] = None, # pylint: disable=redefined-builtin + is_discriminator: bool = False, + visibility: typing.Optional[list[str]] = None, + default: typing.Any = _UNSET, + format: typing.Optional[str] = None, + is_multipart_file_input: bool = False, + xml: typing.Optional[dict[str, typing.Any]] = None, + ): + self._type = type + self._rest_name_input = name + self._module: typing.Optional[str] = None + self._is_discriminator = is_discriminator + self._visibility = visibility + self._is_model = False + self._default = default + self._format = format + self._is_multipart_file_input = is_multipart_file_input + self._xml = xml if xml is not None else {} + + @property + def _class_type(self) -> typing.Any: + return getattr(self._type, "args", [None])[0] + + @property + def _rest_name(self) -> str: + if self._rest_name_input is None: + raise ValueError("Rest name was never set") + return self._rest_name_input + + def __get__(self, obj: Model, type=None): # pylint: disable=redefined-builtin + # by this point, type and rest_name will have a value bc we default + # them in __new__ of the Model class + item = obj.get(self._rest_name) + if item is None: + return item + if self._is_model: + return item + return _deserialize(self._type, _serialize(item, self._format), rf=self) + + def __set__(self, obj: Model, value) -> None: + if value is None: + # we want to wipe out entries if users set attr to None + try: + obj.__delitem__(self._rest_name) + except KeyError: + pass + return + if self._is_model: + if not _is_model(value): + value = _deserialize(self._type, value) + obj.__setitem__(self._rest_name, value) + return + obj.__setitem__(self._rest_name, _serialize(value, self._format)) + + def _get_deserialize_callable_from_annotation( + self, annotation: typing.Any + ) -> typing.Optional[typing.Callable[[typing.Any], typing.Any]]: + return _get_deserialize_callable_from_annotation(annotation, self._module, self) + + +def rest_field( + *, + name: typing.Optional[str] = None, + type: typing.Optional[typing.Callable] = None, # pylint: disable=redefined-builtin + visibility: typing.Optional[list[str]] = None, + default: typing.Any = _UNSET, + format: typing.Optional[str] = None, + is_multipart_file_input: bool = False, + xml: typing.Optional[dict[str, typing.Any]] = None, +) -> typing.Any: + return _RestField( + name=name, + type=type, + visibility=visibility, + default=default, + format=format, + is_multipart_file_input=is_multipart_file_input, + xml=xml, + ) + + +def rest_discriminator( + *, + name: typing.Optional[str] = None, + type: typing.Optional[typing.Callable] = None, # pylint: disable=redefined-builtin + visibility: typing.Optional[list[str]] = None, + xml: typing.Optional[dict[str, typing.Any]] = None, +) -> typing.Any: + return _RestField(name=name, type=type, is_discriminator=True, visibility=visibility, xml=xml) + + +def serialize_xml(model: Model, exclude_readonly: bool = False) -> str: + """Serialize a model to XML. + + :param Model model: The model to serialize. + :param bool exclude_readonly: Whether to exclude readonly properties. + :returns: The XML representation of the model. + :rtype: str + """ + return ET.tostring(_get_element(model, exclude_readonly), encoding="unicode") # type: ignore + + +def _get_element( + o: typing.Any, + exclude_readonly: bool = False, + parent_meta: typing.Optional[dict[str, typing.Any]] = None, + wrapped_element: typing.Optional[ET.Element] = None, +) -> typing.Union[ET.Element, list[ET.Element]]: + if _is_model(o): + model_meta = getattr(o, "_xml", {}) + + # if prop is a model, then use the prop element directly, else generate a wrapper of model + if wrapped_element is None: + wrapped_element = _create_xml_element( + model_meta.get("name", o.__class__.__name__), + model_meta.get("prefix"), + model_meta.get("ns"), + ) + + readonly_props = [] + if exclude_readonly: + readonly_props = [p._rest_name for p in o._attr_to_rest_field.values() if _is_readonly(p)] + + for k, v in o.items(): + # do not serialize readonly properties + if exclude_readonly and k in readonly_props: + continue + + prop_rest_field = _get_rest_field(o._attr_to_rest_field, k) + if prop_rest_field: + prop_meta = getattr(prop_rest_field, "_xml").copy() + # use the wire name as xml name if no specific name is set + if prop_meta.get("name") is None: + prop_meta["name"] = k + else: + # additional properties will not have rest field, use the wire name as xml name + prop_meta = {"name": k} + + # if no ns for prop, use model's + if prop_meta.get("ns") is None and model_meta.get("ns"): + prop_meta["ns"] = model_meta.get("ns") + prop_meta["prefix"] = model_meta.get("prefix") + + if prop_meta.get("unwrapped", False): + # unwrapped could only set on array + wrapped_element.extend(_get_element(v, exclude_readonly, prop_meta)) + elif prop_meta.get("text", False): + # text could only set on primitive type + wrapped_element.text = _get_primitive_type_value(v) + elif prop_meta.get("attribute", False): + xml_name = prop_meta.get("name", k) + if prop_meta.get("ns"): + ET.register_namespace(prop_meta.get("prefix"), prop_meta.get("ns")) # pyright: ignore + xml_name = "{" + prop_meta.get("ns") + "}" + xml_name # pyright: ignore + # attribute should be primitive type + wrapped_element.set(xml_name, _get_primitive_type_value(v)) + else: + # other wrapped prop element + wrapped_element.append(_get_wrapped_element(v, exclude_readonly, prop_meta)) + return wrapped_element + if isinstance(o, list): + return [_get_element(x, exclude_readonly, parent_meta) for x in o] # type: ignore + if isinstance(o, dict): + result = [] + for k, v in o.items(): + result.append( + _get_wrapped_element( + v, + exclude_readonly, + { + "name": k, + "ns": parent_meta.get("ns") if parent_meta else None, + "prefix": parent_meta.get("prefix") if parent_meta else None, + }, + ) + ) + return result + + # primitive case need to create element based on parent_meta + if parent_meta: + return _get_wrapped_element( + o, + exclude_readonly, + { + "name": parent_meta.get("itemsName", parent_meta.get("name")), + "prefix": parent_meta.get("itemsPrefix", parent_meta.get("prefix")), + "ns": parent_meta.get("itemsNs", parent_meta.get("ns")), + }, + ) + + raise ValueError("Could not serialize value into xml: " + o) + + +def _get_wrapped_element( + v: typing.Any, + exclude_readonly: bool, + meta: typing.Optional[dict[str, typing.Any]], +) -> ET.Element: + wrapped_element = _create_xml_element( + meta.get("name") if meta else None, meta.get("prefix") if meta else None, meta.get("ns") if meta else None + ) + if isinstance(v, (dict, list)): + wrapped_element.extend(_get_element(v, exclude_readonly, meta)) + elif _is_model(v): + _get_element(v, exclude_readonly, meta, wrapped_element) + else: + wrapped_element.text = _get_primitive_type_value(v) + return wrapped_element + + +def _get_primitive_type_value(v) -> str: + if v is True: + return "true" + if v is False: + return "false" + if isinstance(v, _Null): + return "" + return str(v) + + +def _create_xml_element(tag, prefix=None, ns=None): + if prefix and ns: + ET.register_namespace(prefix, ns) + if ns: + return ET.Element("{" + ns + "}" + tag) + return ET.Element(tag) + + +def _deserialize_xml( + deserializer: typing.Any, + value: str, +) -> typing.Any: + element = ET.fromstring(value) # nosec + return _deserialize(deserializer, element) + + +def _convert_element(e: ET.Element): + # dict case + if len(e.attrib) > 0 or len({child.tag for child in e}) > 1: + dict_result: dict[str, typing.Any] = {} + for child in e: + if dict_result.get(child.tag) is not None: + if isinstance(dict_result[child.tag], list): + dict_result[child.tag].append(_convert_element(child)) + else: + dict_result[child.tag] = [dict_result[child.tag], _convert_element(child)] + else: + dict_result[child.tag] = _convert_element(child) + dict_result.update(e.attrib) + return dict_result + # array case + if len(e) > 0: + array_result: list[typing.Any] = [] + for child in e: + array_result.append(_convert_element(child)) + return array_result + # primitive case + return e.text diff --git a/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/azure/ai/language/questionanswering/authoring/_utils/serialization.py b/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/azure/ai/language/questionanswering/authoring/_utils/serialization.py new file mode 100644 index 000000000000..cca8513e0e67 --- /dev/null +++ b/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/azure/ai/language/questionanswering/authoring/_utils/serialization.py @@ -0,0 +1,2030 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=line-too-long,useless-suppression,too-many-lines,duplicate-code,missing-module-docstring,missing-class-docstring,missing-function-docstring,consider-using-f-string,invalid-name,too-many-locals,too-many-branches + +# pyright: reportUnnecessaryTypeIgnoreComment=false + +from base64 import b64decode, b64encode +import calendar +import datetime +import decimal +import email +from enum import Enum +import json +import logging +import re +import sys +import codecs +from typing import ( + Any, + cast, + Optional, + Union, + AnyStr, + IO, + Mapping, + Callable, + MutableMapping, +) + +try: + from urllib import quote # type: ignore +except ImportError: + from urllib.parse import quote +import xml.etree.ElementTree as ET + +import isodate # type: ignore +from typing_extensions import Self + +from azure.core.exceptions import DeserializationError, SerializationError +from azure.core.serialization import NULL as CoreNull + +_BOM = codecs.BOM_UTF8.decode(encoding="utf-8") + +JSON = MutableMapping[str, Any] + + +class RawDeserializer: + + # Accept "text" because we're open minded people... + JSON_REGEXP = re.compile(r"^(application|text)/([a-z+.]+\+)?json$") + + # Name used in context + CONTEXT_NAME = "deserialized_data" + + @classmethod + def deserialize_from_text(cls, data: Optional[Union[AnyStr, IO]], content_type: Optional[str] = None) -> Any: + """Decode data according to content-type. + + Accept a stream of data as well, but will be load at once in memory for now. + + If no content-type, will return the string version (not bytes, not stream) + + :param data: Input, could be bytes or stream (will be decoded with UTF8) or text + :type data: str or bytes or IO + :param str content_type: The content type. + :return: The deserialized data. + :rtype: object + """ + if hasattr(data, "read"): + # Assume a stream + data = cast(IO, data).read() + + if isinstance(data, bytes): + data_as_str = data.decode(encoding="utf-8-sig") + else: + # Explain to mypy the correct type. + data_as_str = cast(str, data) + + # Remove Byte Order Mark if present in string + data_as_str = data_as_str.lstrip(_BOM) + + if content_type is None: + return data + + if cls.JSON_REGEXP.match(content_type): + try: + return json.loads(data_as_str) + except ValueError as err: + raise DeserializationError("JSON is invalid: {}".format(err), err) from err + elif "xml" in (content_type or []): + try: + + try: + if isinstance(data, unicode): # type: ignore + # If I'm Python 2.7 and unicode XML will scream if I try a "fromstring" on unicode string + data_as_str = data_as_str.encode(encoding="utf-8") # type: ignore + except NameError: + pass + + return ET.fromstring(data_as_str) # nosec + except ET.ParseError as err: + # It might be because the server has an issue, and returned JSON with + # content-type XML.... + # So let's try a JSON load, and if it's still broken + # let's flow the initial exception + def _json_attemp(data): + try: + return True, json.loads(data) + except ValueError: + return False, None # Don't care about this one + + success, json_result = _json_attemp(data) + if success: + return json_result + # If i'm here, it's not JSON, it's not XML, let's scream + # and raise the last context in this block (the XML exception) + # The function hack is because Py2.7 messes up with exception + # context otherwise. + _LOGGER.critical("Wasn't XML not JSON, failing") + raise DeserializationError("XML is invalid") from err + elif content_type.startswith("text/"): + return data_as_str + raise DeserializationError("Cannot deserialize content-type: {}".format(content_type)) + + @classmethod + def deserialize_from_http_generics(cls, body_bytes: Optional[Union[AnyStr, IO]], headers: Mapping) -> Any: + """Deserialize from HTTP response. + + Use bytes and headers to NOT use any requests/aiohttp or whatever + specific implementation. + Headers will tested for "content-type" + + :param bytes body_bytes: The body of the response. + :param dict headers: The headers of the response. + :returns: The deserialized data. + :rtype: object + """ + # Try to use content-type from headers if available + content_type = None + if "content-type" in headers: + content_type = headers["content-type"].split(";")[0].strip().lower() + # Ouch, this server did not declare what it sent... + # Let's guess it's JSON... + # Also, since Autorest was considering that an empty body was a valid JSON, + # need that test as well.... + else: + content_type = "application/json" + + if body_bytes: + return cls.deserialize_from_text(body_bytes, content_type) + return None + + +_LOGGER = logging.getLogger(__name__) + +try: + _long_type = long # type: ignore +except NameError: + _long_type = int + +TZ_UTC = datetime.timezone.utc + +_FLATTEN = re.compile(r"(? None: + self.additional_properties: Optional[dict[str, Any]] = {} + for k in kwargs: # pylint: disable=consider-using-dict-items + if k not in self._attribute_map: + _LOGGER.warning("%s is not a known attribute of class %s and will be ignored", k, self.__class__) + elif k in self._validation and self._validation[k].get("readonly", False): + _LOGGER.warning("Readonly attribute %s will be ignored in class %s", k, self.__class__) + else: + setattr(self, k, kwargs[k]) + + def __eq__(self, other: Any) -> bool: + """Compare objects by comparing all attributes. + + :param object other: The object to compare + :returns: True if objects are equal + :rtype: bool + """ + if isinstance(other, self.__class__): + return self.__dict__ == other.__dict__ + return False + + def __ne__(self, other: Any) -> bool: + """Compare objects by comparing all attributes. + + :param object other: The object to compare + :returns: True if objects are not equal + :rtype: bool + """ + return not self.__eq__(other) + + def __str__(self) -> str: + return str(self.__dict__) + + @classmethod + def enable_additional_properties_sending(cls) -> None: + cls._attribute_map["additional_properties"] = {"key": "", "type": "{object}"} + + @classmethod + def is_xml_model(cls) -> bool: + try: + cls._xml_map # type: ignore + except AttributeError: + return False + return True + + @classmethod + def _create_xml_node(cls): + """Create XML node. + + :returns: The XML node + :rtype: xml.etree.ElementTree.Element + """ + try: + xml_map = cls._xml_map # type: ignore + except AttributeError: + xml_map = {} + + return _create_xml_node(xml_map.get("name", cls.__name__), xml_map.get("prefix", None), xml_map.get("ns", None)) + + def serialize(self, keep_readonly: bool = False, **kwargs: Any) -> JSON: + """Return the JSON that would be sent to server from this model. + + This is an alias to `as_dict(full_restapi_key_transformer, keep_readonly=False)`. + + If you want XML serialization, you can pass the kwargs is_xml=True. + + :param bool keep_readonly: If you want to serialize the readonly attributes + :returns: A dict JSON compatible object + :rtype: dict + """ + serializer = Serializer(self._infer_class_models()) + return serializer._serialize( # type: ignore # pylint: disable=protected-access + self, keep_readonly=keep_readonly, **kwargs + ) + + def as_dict( + self, + keep_readonly: bool = True, + key_transformer: Callable[[str, dict[str, Any], Any], Any] = attribute_transformer, + **kwargs: Any + ) -> JSON: + """Return a dict that can be serialized using json.dump. + + Advanced usage might optionally use a callback as parameter: + + .. code::python + + def my_key_transformer(key, attr_desc, value): + return key + + Key is the attribute name used in Python. Attr_desc + is a dict of metadata. Currently contains 'type' with the + msrest type and 'key' with the RestAPI encoded key. + Value is the current value in this object. + + The string returned will be used to serialize the key. + If the return type is a list, this is considered hierarchical + result dict. + + See the three examples in this file: + + - attribute_transformer + - full_restapi_key_transformer + - last_restapi_key_transformer + + If you want XML serialization, you can pass the kwargs is_xml=True. + + :param bool keep_readonly: If you want to serialize the readonly attributes + :param function key_transformer: A key transformer function. + :returns: A dict JSON compatible object + :rtype: dict + """ + serializer = Serializer(self._infer_class_models()) + return serializer._serialize( # type: ignore # pylint: disable=protected-access + self, key_transformer=key_transformer, keep_readonly=keep_readonly, **kwargs + ) + + @classmethod + def _infer_class_models(cls): + try: + str_models = cls.__module__.rsplit(".", 1)[0] + models = sys.modules[str_models] + client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} + if cls.__name__ not in client_models: + raise ValueError("Not Autorest generated code") + except Exception: # pylint: disable=broad-exception-caught + # Assume it's not Autorest generated (tests?). Add ourselves as dependencies. + client_models = {cls.__name__: cls} + return client_models + + @classmethod + def deserialize(cls, data: Any, content_type: Optional[str] = None) -> Self: + """Parse a str using the RestAPI syntax and return a model. + + :param str data: A str using RestAPI structure. JSON by default. + :param str content_type: JSON by default, set application/xml if XML. + :returns: An instance of this model + :raises DeserializationError: if something went wrong + :rtype: Self + """ + deserializer = Deserializer(cls._infer_class_models()) + return deserializer(cls.__name__, data, content_type=content_type) # type: ignore + + @classmethod + def from_dict( + cls, + data: Any, + key_extractors: Optional[Callable[[str, dict[str, Any], Any], Any]] = None, + content_type: Optional[str] = None, + ) -> Self: + """Parse a dict using given key extractor return a model. + + By default consider key + extractors (rest_key_case_insensitive_extractor, attribute_key_case_insensitive_extractor + and last_rest_key_case_insensitive_extractor) + + :param dict data: A dict using RestAPI structure + :param function key_extractors: A key extractor function. + :param str content_type: JSON by default, set application/xml if XML. + :returns: An instance of this model + :raises DeserializationError: if something went wrong + :rtype: Self + """ + deserializer = Deserializer(cls._infer_class_models()) + deserializer.key_extractors = ( # type: ignore + [ # type: ignore + attribute_key_case_insensitive_extractor, + rest_key_case_insensitive_extractor, + last_rest_key_case_insensitive_extractor, + ] + if key_extractors is None + else key_extractors + ) + return deserializer(cls.__name__, data, content_type=content_type) # type: ignore + + @classmethod + def _flatten_subtype(cls, key, objects): + if "_subtype_map" not in cls.__dict__: + return {} + result = dict(cls._subtype_map[key]) + for valuetype in cls._subtype_map[key].values(): + result |= objects[valuetype]._flatten_subtype(key, objects) # pylint: disable=protected-access + return result + + @classmethod + def _classify(cls, response, objects): + """Check the class _subtype_map for any child classes. + We want to ignore any inherited _subtype_maps. + + :param dict response: The initial data + :param dict objects: The class objects + :returns: The class to be used + :rtype: class + """ + for subtype_key in cls.__dict__.get("_subtype_map", {}).keys(): + subtype_value = None + + if not isinstance(response, ET.Element): + rest_api_response_key = cls._get_rest_key_parts(subtype_key)[-1] + subtype_value = response.get(rest_api_response_key, None) or response.get(subtype_key, None) + else: + subtype_value = xml_key_extractor(subtype_key, cls._attribute_map[subtype_key], response) + if subtype_value: + # Try to match base class. Can be class name only + # (bug to fix in Autorest to support x-ms-discriminator-name) + if cls.__name__ == subtype_value: + return cls + flatten_mapping_type = cls._flatten_subtype(subtype_key, objects) + try: + return objects[flatten_mapping_type[subtype_value]] # type: ignore + except KeyError: + _LOGGER.warning( + "Subtype value %s has no mapping, use base class %s.", + subtype_value, + cls.__name__, + ) + break + else: + _LOGGER.warning("Discriminator %s is absent or null, use base class %s.", subtype_key, cls.__name__) + break + return cls + + @classmethod + def _get_rest_key_parts(cls, attr_key): + """Get the RestAPI key of this attr, split it and decode part + :param str attr_key: Attribute key must be in attribute_map. + :returns: A list of RestAPI part + :rtype: list + """ + rest_split_key = _FLATTEN.split(cls._attribute_map[attr_key]["key"]) + return [_decode_attribute_map_key(key_part) for key_part in rest_split_key] + + +def _decode_attribute_map_key(key): + """This decode a key in an _attribute_map to the actual key we want to look at + inside the received data. + + :param str key: A key string from the generated code + :returns: The decoded key + :rtype: str + """ + return key.replace("\\.", ".") + + +class Serializer: # pylint: disable=too-many-public-methods + """Request object model serializer.""" + + basic_types = {str: "str", int: "int", bool: "bool", float: "float"} + + _xml_basic_types_serializers = {"bool": lambda x: str(x).lower()} + days = {0: "Mon", 1: "Tue", 2: "Wed", 3: "Thu", 4: "Fri", 5: "Sat", 6: "Sun"} + months = { + 1: "Jan", + 2: "Feb", + 3: "Mar", + 4: "Apr", + 5: "May", + 6: "Jun", + 7: "Jul", + 8: "Aug", + 9: "Sep", + 10: "Oct", + 11: "Nov", + 12: "Dec", + } + validation = { + "min_length": lambda x, y: len(x) < y, + "max_length": lambda x, y: len(x) > y, + "minimum": lambda x, y: x < y, + "maximum": lambda x, y: x > y, + "minimum_ex": lambda x, y: x <= y, + "maximum_ex": lambda x, y: x >= y, + "min_items": lambda x, y: len(x) < y, + "max_items": lambda x, y: len(x) > y, + "pattern": lambda x, y: not re.match(y, x, re.UNICODE), + "unique": lambda x, y: len(x) != len(set(x)), + "multiple": lambda x, y: x % y != 0, + } + + def __init__(self, classes: Optional[Mapping[str, type]] = None) -> None: + self.serialize_type = { + "iso-8601": Serializer.serialize_iso, + "rfc-1123": Serializer.serialize_rfc, + "unix-time": Serializer.serialize_unix, + "duration": Serializer.serialize_duration, + "date": Serializer.serialize_date, + "time": Serializer.serialize_time, + "decimal": Serializer.serialize_decimal, + "long": Serializer.serialize_long, + "bytearray": Serializer.serialize_bytearray, + "base64": Serializer.serialize_base64, + "object": self.serialize_object, + "[]": self.serialize_iter, + "{}": self.serialize_dict, + } + self.dependencies: dict[str, type] = dict(classes) if classes else {} + self.key_transformer = full_restapi_key_transformer + self.client_side_validation = True + + def _serialize( # pylint: disable=too-many-nested-blocks, too-many-branches, too-many-statements, too-many-locals + self, target_obj, data_type=None, **kwargs + ): + """Serialize data into a string according to type. + + :param object target_obj: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: str, dict + :raises SerializationError: if serialization fails. + :returns: The serialized data. + """ + key_transformer = kwargs.get("key_transformer", self.key_transformer) + keep_readonly = kwargs.get("keep_readonly", False) + if target_obj is None: + return None + + attr_name = None + class_name = target_obj.__class__.__name__ + + if data_type: + return self.serialize_data(target_obj, data_type, **kwargs) + + if not hasattr(target_obj, "_attribute_map"): + data_type = type(target_obj).__name__ + if data_type in self.basic_types.values(): + return self.serialize_data(target_obj, data_type, **kwargs) + + # Force "is_xml" kwargs if we detect a XML model + try: + is_xml_model_serialization = kwargs["is_xml"] + except KeyError: + is_xml_model_serialization = kwargs.setdefault("is_xml", target_obj.is_xml_model()) + + serialized = {} + if is_xml_model_serialization: + serialized = target_obj._create_xml_node() # pylint: disable=protected-access + try: + attributes = target_obj._attribute_map # pylint: disable=protected-access + for attr, attr_desc in attributes.items(): + attr_name = attr + if not keep_readonly and target_obj._validation.get( # pylint: disable=protected-access + attr_name, {} + ).get("readonly", False): + continue + + if attr_name == "additional_properties" and attr_desc["key"] == "": + if target_obj.additional_properties is not None: + serialized |= target_obj.additional_properties + continue + try: + + orig_attr = getattr(target_obj, attr) + if is_xml_model_serialization: + pass # Don't provide "transformer" for XML for now. Keep "orig_attr" + else: # JSON + keys, orig_attr = key_transformer(attr, attr_desc.copy(), orig_attr) + keys = keys if isinstance(keys, list) else [keys] + + kwargs["serialization_ctxt"] = attr_desc + new_attr = self.serialize_data(orig_attr, attr_desc["type"], **kwargs) + + if is_xml_model_serialization: + xml_desc = attr_desc.get("xml", {}) + xml_name = xml_desc.get("name", attr_desc["key"]) + xml_prefix = xml_desc.get("prefix", None) + xml_ns = xml_desc.get("ns", None) + if xml_desc.get("attr", False): + if xml_ns: + ET.register_namespace(xml_prefix, xml_ns) + xml_name = "{{{}}}{}".format(xml_ns, xml_name) + serialized.set(xml_name, new_attr) # type: ignore + continue + if xml_desc.get("text", False): + serialized.text = new_attr # type: ignore + continue + if isinstance(new_attr, list): + serialized.extend(new_attr) # type: ignore + elif isinstance(new_attr, ET.Element): + # If the down XML has no XML/Name, + # we MUST replace the tag with the local tag. But keeping the namespaces. + if "name" not in getattr(orig_attr, "_xml_map", {}): + splitted_tag = new_attr.tag.split("}") + if len(splitted_tag) == 2: # Namespace + new_attr.tag = "}".join([splitted_tag[0], xml_name]) + else: + new_attr.tag = xml_name + serialized.append(new_attr) # type: ignore + else: # That's a basic type + # Integrate namespace if necessary + local_node = _create_xml_node(xml_name, xml_prefix, xml_ns) + local_node.text = str(new_attr) + serialized.append(local_node) # type: ignore + else: # JSON + for k in reversed(keys): # type: ignore + new_attr = {k: new_attr} + + _new_attr = new_attr + _serialized = serialized + for k in keys: # type: ignore + if k not in _serialized: + _serialized.update(_new_attr) # type: ignore + _new_attr = _new_attr[k] # type: ignore + _serialized = _serialized[k] + except ValueError as err: + if isinstance(err, SerializationError): + raise + + except (AttributeError, KeyError, TypeError) as err: + msg = "Attribute {} in object {} cannot be serialized.\n{}".format(attr_name, class_name, str(target_obj)) + raise SerializationError(msg) from err + return serialized + + def body(self, data, data_type, **kwargs): + """Serialize data intended for a request body. + + :param object data: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: dict + :raises SerializationError: if serialization fails. + :raises ValueError: if data is None + :returns: The serialized request body + """ + + # Just in case this is a dict + internal_data_type_str = data_type.strip("[]{}") + internal_data_type = self.dependencies.get(internal_data_type_str, None) + try: + is_xml_model_serialization = kwargs["is_xml"] + except KeyError: + if internal_data_type and issubclass(internal_data_type, Model): + is_xml_model_serialization = kwargs.setdefault("is_xml", internal_data_type.is_xml_model()) + else: + is_xml_model_serialization = False + if internal_data_type and not isinstance(internal_data_type, Enum): + try: + deserializer = Deserializer(self.dependencies) + # Since it's on serialization, it's almost sure that format is not JSON REST + # We're not able to deal with additional properties for now. + deserializer.additional_properties_detection = False + if is_xml_model_serialization: + deserializer.key_extractors = [ # type: ignore + attribute_key_case_insensitive_extractor, + ] + else: + deserializer.key_extractors = [ + rest_key_case_insensitive_extractor, + attribute_key_case_insensitive_extractor, + last_rest_key_case_insensitive_extractor, + ] + data = deserializer._deserialize(data_type, data) # pylint: disable=protected-access + except DeserializationError as err: + raise SerializationError("Unable to build a model: " + str(err)) from err + + return self._serialize(data, data_type, **kwargs) + + def url(self, name, data, data_type, **kwargs): + """Serialize data intended for a URL path. + + :param str name: The name of the URL path parameter. + :param object data: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: str + :returns: The serialized URL path + :raises TypeError: if serialization fails. + :raises ValueError: if data is None + """ + try: + output = self.serialize_data(data, data_type, **kwargs) + if data_type == "bool": + output = json.dumps(output) + + if kwargs.get("skip_quote") is True: + output = str(output) + output = output.replace("{", quote("{")).replace("}", quote("}")) + else: + output = quote(str(output), safe="") + except SerializationError as exc: + raise TypeError("{} must be type {}.".format(name, data_type)) from exc + return output + + def query(self, name, data, data_type, **kwargs): + """Serialize data intended for a URL query. + + :param str name: The name of the query parameter. + :param object data: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: str, list + :raises TypeError: if serialization fails. + :raises ValueError: if data is None + :returns: The serialized query parameter + """ + try: + # Treat the list aside, since we don't want to encode the div separator + if data_type.startswith("["): + internal_data_type = data_type[1:-1] + do_quote = not kwargs.get("skip_quote", False) + return self.serialize_iter(data, internal_data_type, do_quote=do_quote, **kwargs) + + # Not a list, regular serialization + output = self.serialize_data(data, data_type, **kwargs) + if data_type == "bool": + output = json.dumps(output) + if kwargs.get("skip_quote") is True: + output = str(output) + else: + output = quote(str(output), safe="") + except SerializationError as exc: + raise TypeError("{} must be type {}.".format(name, data_type)) from exc + return str(output) + + def header(self, name, data, data_type, **kwargs): + """Serialize data intended for a request header. + + :param str name: The name of the header. + :param object data: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: str + :raises TypeError: if serialization fails. + :raises ValueError: if data is None + :returns: The serialized header + """ + try: + if data_type in ["[str]"]: + data = ["" if d is None else d for d in data] + + output = self.serialize_data(data, data_type, **kwargs) + if data_type == "bool": + output = json.dumps(output) + except SerializationError as exc: + raise TypeError("{} must be type {}.".format(name, data_type)) from exc + return str(output) + + def serialize_data(self, data, data_type, **kwargs): + """Serialize generic data according to supplied data type. + + :param object data: The data to be serialized. + :param str data_type: The type to be serialized from. + :raises AttributeError: if required data is None. + :raises ValueError: if data is None + :raises SerializationError: if serialization fails. + :returns: The serialized data. + :rtype: str, int, float, bool, dict, list + """ + if data is None: + raise ValueError("No value for given attribute") + + try: + if data is CoreNull: + return None + if data_type in self.basic_types.values(): + return self.serialize_basic(data, data_type, **kwargs) + + if data_type in self.serialize_type: + return self.serialize_type[data_type](data, **kwargs) + + # If dependencies is empty, try with current data class + # It has to be a subclass of Enum anyway + enum_type = self.dependencies.get(data_type, data.__class__) + if issubclass(enum_type, Enum): + return Serializer.serialize_enum(data, enum_obj=enum_type) + + iter_type = data_type[0] + data_type[-1] + if iter_type in self.serialize_type: + return self.serialize_type[iter_type](data, data_type[1:-1], **kwargs) + + except (ValueError, TypeError) as err: + msg = "Unable to serialize value: {!r} as type: {!r}." + raise SerializationError(msg.format(data, data_type)) from err + return self._serialize(data, **kwargs) + + @classmethod + def _get_custom_serializers(cls, data_type, **kwargs): # pylint: disable=inconsistent-return-statements + custom_serializer = kwargs.get("basic_types_serializers", {}).get(data_type) + if custom_serializer: + return custom_serializer + if kwargs.get("is_xml", False): + return cls._xml_basic_types_serializers.get(data_type) + + @classmethod + def serialize_basic(cls, data, data_type, **kwargs): + """Serialize basic builting data type. + Serializes objects to str, int, float or bool. + + Possible kwargs: + - basic_types_serializers dict[str, callable] : If set, use the callable as serializer + - is_xml bool : If set, use xml_basic_types_serializers + + :param obj data: Object to be serialized. + :param str data_type: Type of object in the iterable. + :rtype: str, int, float, bool + :return: serialized object + """ + custom_serializer = cls._get_custom_serializers(data_type, **kwargs) + if custom_serializer: + return custom_serializer(data) + if data_type == "str": + return cls.serialize_unicode(data) + return eval(data_type)(data) # nosec # pylint: disable=eval-used + + @classmethod + def serialize_unicode(cls, data): + """Special handling for serializing unicode strings in Py2. + Encode to UTF-8 if unicode, otherwise handle as a str. + + :param str data: Object to be serialized. + :rtype: str + :return: serialized object + """ + try: # If I received an enum, return its value + return data.value + except AttributeError: + pass + + try: + if isinstance(data, unicode): # type: ignore + # Don't change it, JSON and XML ElementTree are totally able + # to serialize correctly u'' strings + return data + except NameError: + return str(data) + return str(data) + + def serialize_iter(self, data, iter_type, div=None, **kwargs): + """Serialize iterable. + + Supported kwargs: + - serialization_ctxt dict : The current entry of _attribute_map, or same format. + serialization_ctxt['type'] should be same as data_type. + - is_xml bool : If set, serialize as XML + + :param list data: Object to be serialized. + :param str iter_type: Type of object in the iterable. + :param str div: If set, this str will be used to combine the elements + in the iterable into a combined string. Default is 'None'. + Defaults to False. + :rtype: list, str + :return: serialized iterable + """ + if isinstance(data, str): + raise SerializationError("Refuse str type as a valid iter type.") + + serialization_ctxt = kwargs.get("serialization_ctxt", {}) + is_xml = kwargs.get("is_xml", False) + + serialized = [] + for d in data: + try: + serialized.append(self.serialize_data(d, iter_type, **kwargs)) + except ValueError as err: + if isinstance(err, SerializationError): + raise + serialized.append(None) + + if kwargs.get("do_quote", False): + serialized = ["" if s is None else quote(str(s), safe="") for s in serialized] + + if div: + serialized = ["" if s is None else str(s) for s in serialized] + serialized = div.join(serialized) + + if "xml" in serialization_ctxt or is_xml: + # XML serialization is more complicated + xml_desc = serialization_ctxt.get("xml", {}) + xml_name = xml_desc.get("name") + if not xml_name: + xml_name = serialization_ctxt["key"] + + # Create a wrap node if necessary (use the fact that Element and list have "append") + is_wrapped = xml_desc.get("wrapped", False) + node_name = xml_desc.get("itemsName", xml_name) + if is_wrapped: + final_result = _create_xml_node(xml_name, xml_desc.get("prefix", None), xml_desc.get("ns", None)) + else: + final_result = [] + # All list elements to "local_node" + for el in serialized: + if isinstance(el, ET.Element): + el_node = el + else: + el_node = _create_xml_node(node_name, xml_desc.get("prefix", None), xml_desc.get("ns", None)) + if el is not None: # Otherwise it writes "None" :-p + el_node.text = str(el) + final_result.append(el_node) + return final_result + return serialized + + def serialize_dict(self, attr, dict_type, **kwargs): + """Serialize a dictionary of objects. + + :param dict attr: Object to be serialized. + :param str dict_type: Type of object in the dictionary. + :rtype: dict + :return: serialized dictionary + """ + serialization_ctxt = kwargs.get("serialization_ctxt", {}) + serialized = {} + for key, value in attr.items(): + try: + serialized[self.serialize_unicode(key)] = self.serialize_data(value, dict_type, **kwargs) + except ValueError as err: + if isinstance(err, SerializationError): + raise + serialized[self.serialize_unicode(key)] = None + + if "xml" in serialization_ctxt: + # XML serialization is more complicated + xml_desc = serialization_ctxt["xml"] + xml_name = xml_desc["name"] + + final_result = _create_xml_node(xml_name, xml_desc.get("prefix", None), xml_desc.get("ns", None)) + for key, value in serialized.items(): + ET.SubElement(final_result, key).text = value + return final_result + + return serialized + + def serialize_object(self, attr, **kwargs): # pylint: disable=too-many-return-statements + """Serialize a generic object. + This will be handled as a dictionary. If object passed in is not + a basic type (str, int, float, dict, list) it will simply be + cast to str. + + :param dict attr: Object to be serialized. + :rtype: dict or str + :return: serialized object + """ + if attr is None: + return None + if isinstance(attr, ET.Element): + return attr + obj_type = type(attr) + if obj_type in self.basic_types: + return self.serialize_basic(attr, self.basic_types[obj_type], **kwargs) + if obj_type is _long_type: + return self.serialize_long(attr) + if obj_type is str: + return self.serialize_unicode(attr) + if obj_type is datetime.datetime: + return self.serialize_iso(attr) + if obj_type is datetime.date: + return self.serialize_date(attr) + if obj_type is datetime.time: + return self.serialize_time(attr) + if obj_type is datetime.timedelta: + return self.serialize_duration(attr) + if obj_type is decimal.Decimal: + return self.serialize_decimal(attr) + + # If it's a model or I know this dependency, serialize as a Model + if obj_type in self.dependencies.values() or isinstance(attr, Model): + return self._serialize(attr) + + if obj_type == dict: + serialized = {} + for key, value in attr.items(): + try: + serialized[self.serialize_unicode(key)] = self.serialize_object(value, **kwargs) + except ValueError: + serialized[self.serialize_unicode(key)] = None + return serialized + + if obj_type == list: + serialized = [] + for obj in attr: + try: + serialized.append(self.serialize_object(obj, **kwargs)) + except ValueError: + pass + return serialized + return str(attr) + + @staticmethod + def serialize_enum(attr, enum_obj=None): + try: + result = attr.value + except AttributeError: + result = attr + try: + enum_obj(result) # type: ignore + return result + except ValueError as exc: + for enum_value in enum_obj: # type: ignore + if enum_value.value.lower() == str(attr).lower(): + return enum_value.value + error = "{!r} is not valid value for enum {!r}" + raise SerializationError(error.format(attr, enum_obj)) from exc + + @staticmethod + def serialize_bytearray(attr, **kwargs): # pylint: disable=unused-argument + """Serialize bytearray into base-64 string. + + :param str attr: Object to be serialized. + :rtype: str + :return: serialized base64 + """ + return b64encode(attr).decode() + + @staticmethod + def serialize_base64(attr, **kwargs): # pylint: disable=unused-argument + """Serialize str into base-64 string. + + :param str attr: Object to be serialized. + :rtype: str + :return: serialized base64 + """ + encoded = b64encode(attr).decode("ascii") + return encoded.strip("=").replace("+", "-").replace("/", "_") + + @staticmethod + def serialize_decimal(attr, **kwargs): # pylint: disable=unused-argument + """Serialize Decimal object to float. + + :param decimal attr: Object to be serialized. + :rtype: float + :return: serialized decimal + """ + return float(attr) + + @staticmethod + def serialize_long(attr, **kwargs): # pylint: disable=unused-argument + """Serialize long (Py2) or int (Py3). + + :param int attr: Object to be serialized. + :rtype: int/long + :return: serialized long + """ + return _long_type(attr) + + @staticmethod + def serialize_date(attr, **kwargs): # pylint: disable=unused-argument + """Serialize Date object into ISO-8601 formatted string. + + :param Date attr: Object to be serialized. + :rtype: str + :return: serialized date + """ + if isinstance(attr, str): + attr = isodate.parse_date(attr) + t = "{:04}-{:02}-{:02}".format(attr.year, attr.month, attr.day) + return t + + @staticmethod + def serialize_time(attr, **kwargs): # pylint: disable=unused-argument + """Serialize Time object into ISO-8601 formatted string. + + :param datetime.time attr: Object to be serialized. + :rtype: str + :return: serialized time + """ + if isinstance(attr, str): + attr = isodate.parse_time(attr) + t = "{:02}:{:02}:{:02}".format(attr.hour, attr.minute, attr.second) + if attr.microsecond: + t += ".{:02}".format(attr.microsecond) + return t + + @staticmethod + def serialize_duration(attr, **kwargs): # pylint: disable=unused-argument + """Serialize TimeDelta object into ISO-8601 formatted string. + + :param TimeDelta attr: Object to be serialized. + :rtype: str + :return: serialized duration + """ + if isinstance(attr, str): + attr = isodate.parse_duration(attr) + return isodate.duration_isoformat(attr) + + @staticmethod + def serialize_rfc(attr, **kwargs): # pylint: disable=unused-argument + """Serialize Datetime object into RFC-1123 formatted string. + + :param Datetime attr: Object to be serialized. + :rtype: str + :raises TypeError: if format invalid. + :return: serialized rfc + """ + try: + if not attr.tzinfo: + _LOGGER.warning("Datetime with no tzinfo will be considered UTC.") + utc = attr.utctimetuple() + except AttributeError as exc: + raise TypeError("RFC1123 object must be valid Datetime object.") from exc + + return "{}, {:02} {} {:04} {:02}:{:02}:{:02} GMT".format( + Serializer.days[utc.tm_wday], + utc.tm_mday, + Serializer.months[utc.tm_mon], + utc.tm_year, + utc.tm_hour, + utc.tm_min, + utc.tm_sec, + ) + + @staticmethod + def serialize_iso(attr, **kwargs): # pylint: disable=unused-argument + """Serialize Datetime object into ISO-8601 formatted string. + + :param Datetime attr: Object to be serialized. + :rtype: str + :raises SerializationError: if format invalid. + :return: serialized iso + """ + if isinstance(attr, str): + attr = isodate.parse_datetime(attr) + try: + if not attr.tzinfo: + _LOGGER.warning("Datetime with no tzinfo will be considered UTC.") + utc = attr.utctimetuple() + if utc.tm_year > 9999 or utc.tm_year < 1: + raise OverflowError("Hit max or min date") + + microseconds = str(attr.microsecond).rjust(6, "0").rstrip("0").ljust(3, "0") + if microseconds: + microseconds = "." + microseconds + date = "{:04}-{:02}-{:02}T{:02}:{:02}:{:02}".format( + utc.tm_year, utc.tm_mon, utc.tm_mday, utc.tm_hour, utc.tm_min, utc.tm_sec + ) + return date + microseconds + "Z" + except (ValueError, OverflowError) as err: + msg = "Unable to serialize datetime object." + raise SerializationError(msg) from err + except AttributeError as err: + msg = "ISO-8601 object must be valid Datetime object." + raise TypeError(msg) from err + + @staticmethod + def serialize_unix(attr, **kwargs): # pylint: disable=unused-argument + """Serialize Datetime object into IntTime format. + This is represented as seconds. + + :param Datetime attr: Object to be serialized. + :rtype: int + :raises SerializationError: if format invalid + :return: serialied unix + """ + if isinstance(attr, int): + return attr + try: + if not attr.tzinfo: + _LOGGER.warning("Datetime with no tzinfo will be considered UTC.") + return int(calendar.timegm(attr.utctimetuple())) + except AttributeError as exc: + raise TypeError("Unix time object must be valid Datetime object.") from exc + + +def rest_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument + key = attr_desc["key"] + working_data = data + + while "." in key: + # Need the cast, as for some reasons "split" is typed as list[str | Any] + dict_keys = cast(list[str], _FLATTEN.split(key)) + if len(dict_keys) == 1: + key = _decode_attribute_map_key(dict_keys[0]) + break + working_key = _decode_attribute_map_key(dict_keys[0]) + working_data = working_data.get(working_key, data) + if working_data is None: + # If at any point while following flatten JSON path see None, it means + # that all properties under are None as well + return None + key = ".".join(dict_keys[1:]) + + return working_data.get(key) + + +def rest_key_case_insensitive_extractor( # pylint: disable=unused-argument, inconsistent-return-statements + attr, attr_desc, data +): + key = attr_desc["key"] + working_data = data + + while "." in key: + dict_keys = _FLATTEN.split(key) + if len(dict_keys) == 1: + key = _decode_attribute_map_key(dict_keys[0]) + break + working_key = _decode_attribute_map_key(dict_keys[0]) + working_data = attribute_key_case_insensitive_extractor(working_key, None, working_data) + if working_data is None: + # If at any point while following flatten JSON path see None, it means + # that all properties under are None as well + return None + key = ".".join(dict_keys[1:]) + + if working_data: + return attribute_key_case_insensitive_extractor(key, None, working_data) + + +def last_rest_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument + """Extract the attribute in "data" based on the last part of the JSON path key. + + :param str attr: The attribute to extract + :param dict attr_desc: The attribute description + :param dict data: The data to extract from + :rtype: object + :returns: The extracted attribute + """ + key = attr_desc["key"] + dict_keys = _FLATTEN.split(key) + return attribute_key_extractor(dict_keys[-1], None, data) + + +def last_rest_key_case_insensitive_extractor(attr, attr_desc, data): # pylint: disable=unused-argument + """Extract the attribute in "data" based on the last part of the JSON path key. + + This is the case insensitive version of "last_rest_key_extractor" + :param str attr: The attribute to extract + :param dict attr_desc: The attribute description + :param dict data: The data to extract from + :rtype: object + :returns: The extracted attribute + """ + key = attr_desc["key"] + dict_keys = _FLATTEN.split(key) + return attribute_key_case_insensitive_extractor(dict_keys[-1], None, data) + + +def attribute_key_extractor(attr, _, data): + return data.get(attr) + + +def attribute_key_case_insensitive_extractor(attr, _, data): + found_key = None + lower_attr = attr.lower() + for key in data: + if lower_attr == key.lower(): + found_key = key + break + + return data.get(found_key) + + +def _extract_name_from_internal_type(internal_type): + """Given an internal type XML description, extract correct XML name with namespace. + + :param dict internal_type: An model type + :rtype: tuple + :returns: A tuple XML name + namespace dict + """ + internal_type_xml_map = getattr(internal_type, "_xml_map", {}) + xml_name = internal_type_xml_map.get("name", internal_type.__name__) + xml_ns = internal_type_xml_map.get("ns", None) + if xml_ns: + xml_name = "{{{}}}{}".format(xml_ns, xml_name) + return xml_name + + +def xml_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument,too-many-return-statements + if isinstance(data, dict): + return None + + # Test if this model is XML ready first + if not isinstance(data, ET.Element): + return None + + xml_desc = attr_desc.get("xml", {}) + xml_name = xml_desc.get("name", attr_desc["key"]) + + # Look for a children + is_iter_type = attr_desc["type"].startswith("[") + is_wrapped = xml_desc.get("wrapped", False) + internal_type = attr_desc.get("internalType", None) + internal_type_xml_map = getattr(internal_type, "_xml_map", {}) + + # Integrate namespace if necessary + xml_ns = xml_desc.get("ns", internal_type_xml_map.get("ns", None)) + if xml_ns: + xml_name = "{{{}}}{}".format(xml_ns, xml_name) + + # If it's an attribute, that's simple + if xml_desc.get("attr", False): + return data.get(xml_name) + + # If it's x-ms-text, that's simple too + if xml_desc.get("text", False): + return data.text + + # Scenario where I take the local name: + # - Wrapped node + # - Internal type is an enum (considered basic types) + # - Internal type has no XML/Name node + if is_wrapped or (internal_type and (issubclass(internal_type, Enum) or "name" not in internal_type_xml_map)): + children = data.findall(xml_name) + # If internal type has a local name and it's not a list, I use that name + elif not is_iter_type and internal_type and "name" in internal_type_xml_map: + xml_name = _extract_name_from_internal_type(internal_type) + children = data.findall(xml_name) + # That's an array + else: + if internal_type: # Complex type, ignore itemsName and use the complex type name + items_name = _extract_name_from_internal_type(internal_type) + else: + items_name = xml_desc.get("itemsName", xml_name) + children = data.findall(items_name) + + if len(children) == 0: + if is_iter_type: + if is_wrapped: + return None # is_wrapped no node, we want None + return [] # not wrapped, assume empty list + return None # Assume it's not there, maybe an optional node. + + # If is_iter_type and not wrapped, return all found children + if is_iter_type: + if not is_wrapped: + return children + # Iter and wrapped, should have found one node only (the wrap one) + if len(children) != 1: + raise DeserializationError( + "Tried to deserialize an array not wrapped, and found several nodes '{}'. Maybe you should declare this array as wrapped?".format( + xml_name + ) + ) + return list(children[0]) # Might be empty list and that's ok. + + # Here it's not a itertype, we should have found one element only or empty + if len(children) > 1: + raise DeserializationError("Find several XML '{}' where it was not expected".format(xml_name)) + return children[0] + + +class Deserializer: + """Response object model deserializer. + + :param dict classes: Class type dictionary for deserializing complex types. + :ivar list key_extractors: Ordered list of extractors to be used by this deserializer. + """ + + basic_types = {str: "str", int: "int", bool: "bool", float: "float"} + + valid_date = re.compile(r"\d{4}[-]\d{2}[-]\d{2}T\d{2}:\d{2}:\d{2}\.?\d*Z?[-+]?[\d{2}]?:?[\d{2}]?") + + def __init__(self, classes: Optional[Mapping[str, type]] = None) -> None: + self.deserialize_type = { + "iso-8601": Deserializer.deserialize_iso, + "rfc-1123": Deserializer.deserialize_rfc, + "unix-time": Deserializer.deserialize_unix, + "duration": Deserializer.deserialize_duration, + "date": Deserializer.deserialize_date, + "time": Deserializer.deserialize_time, + "decimal": Deserializer.deserialize_decimal, + "long": Deserializer.deserialize_long, + "bytearray": Deserializer.deserialize_bytearray, + "base64": Deserializer.deserialize_base64, + "object": self.deserialize_object, + "[]": self.deserialize_iter, + "{}": self.deserialize_dict, + } + self.deserialize_expected_types = { + "duration": (isodate.Duration, datetime.timedelta), + "iso-8601": (datetime.datetime), + } + self.dependencies: dict[str, type] = dict(classes) if classes else {} + self.key_extractors = [rest_key_extractor, xml_key_extractor] + # Additional properties only works if the "rest_key_extractor" is used to + # extract the keys. Making it to work whatever the key extractor is too much + # complicated, with no real scenario for now. + # So adding a flag to disable additional properties detection. This flag should be + # used if your expect the deserialization to NOT come from a JSON REST syntax. + # Otherwise, result are unexpected + self.additional_properties_detection = True + + def __call__(self, target_obj, response_data, content_type=None): + """Call the deserializer to process a REST response. + + :param str target_obj: Target data type to deserialize to. + :param requests.Response response_data: REST response object. + :param str content_type: Swagger "produces" if available. + :raises DeserializationError: if deserialization fails. + :return: Deserialized object. + :rtype: object + """ + data = self._unpack_content(response_data, content_type) + return self._deserialize(target_obj, data) + + def _deserialize(self, target_obj, data): # pylint: disable=inconsistent-return-statements + """Call the deserializer on a model. + + Data needs to be already deserialized as JSON or XML ElementTree + + :param str target_obj: Target data type to deserialize to. + :param object data: Object to deserialize. + :raises DeserializationError: if deserialization fails. + :return: Deserialized object. + :rtype: object + """ + # This is already a model, go recursive just in case + if hasattr(data, "_attribute_map"): + constants = [name for name, config in getattr(data, "_validation", {}).items() if config.get("constant")] + try: + for attr, mapconfig in data._attribute_map.items(): # pylint: disable=protected-access + if attr in constants: + continue + value = getattr(data, attr) + if value is None: + continue + local_type = mapconfig["type"] + internal_data_type = local_type.strip("[]{}") + if internal_data_type not in self.dependencies or isinstance(internal_data_type, Enum): + continue + setattr(data, attr, self._deserialize(local_type, value)) + return data + except AttributeError: + return + + response, class_name = self._classify_target(target_obj, data) + + if isinstance(response, str): + return self.deserialize_data(data, response) + if isinstance(response, type) and issubclass(response, Enum): + return self.deserialize_enum(data, response) + + if data is None or data is CoreNull: + return data + try: + attributes = response._attribute_map # type: ignore # pylint: disable=protected-access + d_attrs = {} + for attr, attr_desc in attributes.items(): + # Check empty string. If it's not empty, someone has a real "additionalProperties"... + if attr == "additional_properties" and attr_desc["key"] == "": + continue + raw_value = None + # Enhance attr_desc with some dynamic data + attr_desc = attr_desc.copy() # Do a copy, do not change the real one + internal_data_type = attr_desc["type"].strip("[]{}") + if internal_data_type in self.dependencies: + attr_desc["internalType"] = self.dependencies[internal_data_type] + + for key_extractor in self.key_extractors: + found_value = key_extractor(attr, attr_desc, data) + if found_value is not None: + if raw_value is not None and raw_value != found_value: + msg = ( + "Ignoring extracted value '%s' from %s for key '%s'" + " (duplicate extraction, follow extractors order)" + ) + _LOGGER.warning(msg, found_value, key_extractor, attr) + continue + raw_value = found_value + + value = self.deserialize_data(raw_value, attr_desc["type"]) + d_attrs[attr] = value + except (AttributeError, TypeError, KeyError) as err: + msg = "Unable to deserialize to object: " + class_name # type: ignore + raise DeserializationError(msg) from err + additional_properties = self._build_additional_properties(attributes, data) + return self._instantiate_model(response, d_attrs, additional_properties) + + def _build_additional_properties(self, attribute_map, data): + if not self.additional_properties_detection: + return None + if "additional_properties" in attribute_map and attribute_map.get("additional_properties", {}).get("key") != "": + # Check empty string. If it's not empty, someone has a real "additionalProperties" + return None + if isinstance(data, ET.Element): + data = {el.tag: el.text for el in data} + + known_keys = { + _decode_attribute_map_key(_FLATTEN.split(desc["key"])[0]) + for desc in attribute_map.values() + if desc["key"] != "" + } + present_keys = set(data.keys()) + missing_keys = present_keys - known_keys + return {key: data[key] for key in missing_keys} + + def _classify_target(self, target, data): + """Check to see whether the deserialization target object can + be classified into a subclass. + Once classification has been determined, initialize object. + + :param str target: The target object type to deserialize to. + :param str/dict data: The response data to deserialize. + :return: The classified target object and its class name. + :rtype: tuple + """ + if target is None: + return None, None + + if isinstance(target, str): + try: + target = self.dependencies[target] + except KeyError: + return target, target + + try: + target = target._classify(data, self.dependencies) # type: ignore # pylint: disable=protected-access + except AttributeError: + pass # Target is not a Model, no classify + return target, target.__class__.__name__ # type: ignore + + def failsafe_deserialize(self, target_obj, data, content_type=None): + """Ignores any errors encountered in deserialization, + and falls back to not deserializing the object. Recommended + for use in error deserialization, as we want to return the + HttpResponseError to users, and not have them deal with + a deserialization error. + + :param str target_obj: The target object type to deserialize to. + :param str/dict data: The response data to deserialize. + :param str content_type: Swagger "produces" if available. + :return: Deserialized object. + :rtype: object + """ + try: + return self(target_obj, data, content_type=content_type) + except: # pylint: disable=bare-except + _LOGGER.debug( + "Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True + ) + return None + + @staticmethod + def _unpack_content(raw_data, content_type=None): + """Extract the correct structure for deserialization. + + If raw_data is a PipelineResponse, try to extract the result of RawDeserializer. + if we can't, raise. Your Pipeline should have a RawDeserializer. + + If not a pipeline response and raw_data is bytes or string, use content-type + to decode it. If no content-type, try JSON. + + If raw_data is something else, bypass all logic and return it directly. + + :param obj raw_data: Data to be processed. + :param str content_type: How to parse if raw_data is a string/bytes. + :raises JSONDecodeError: If JSON is requested and parsing is impossible. + :raises UnicodeDecodeError: If bytes is not UTF8 + :rtype: object + :return: Unpacked content. + """ + # Assume this is enough to detect a Pipeline Response without importing it + context = getattr(raw_data, "context", {}) + if context: + if RawDeserializer.CONTEXT_NAME in context: + return context[RawDeserializer.CONTEXT_NAME] + raise ValueError("This pipeline didn't have the RawDeserializer policy; can't deserialize") + + # Assume this is enough to recognize universal_http.ClientResponse without importing it + if hasattr(raw_data, "body"): + return RawDeserializer.deserialize_from_http_generics(raw_data.text(), raw_data.headers) + + # Assume this enough to recognize requests.Response without importing it. + if hasattr(raw_data, "_content_consumed"): + return RawDeserializer.deserialize_from_http_generics(raw_data.text, raw_data.headers) + + if isinstance(raw_data, (str, bytes)) or hasattr(raw_data, "read"): + return RawDeserializer.deserialize_from_text(raw_data, content_type) # type: ignore + return raw_data + + def _instantiate_model(self, response, attrs, additional_properties=None): + """Instantiate a response model passing in deserialized args. + + :param Response response: The response model class. + :param dict attrs: The deserialized response attributes. + :param dict additional_properties: Additional properties to be set. + :rtype: Response + :return: The instantiated response model. + """ + if callable(response): + subtype = getattr(response, "_subtype_map", {}) + try: + readonly = [ + k + for k, v in response._validation.items() # pylint: disable=protected-access # type: ignore + if v.get("readonly") + ] + const = [ + k + for k, v in response._validation.items() # pylint: disable=protected-access # type: ignore + if v.get("constant") + ] + kwargs = {k: v for k, v in attrs.items() if k not in subtype and k not in readonly + const} + response_obj = response(**kwargs) + for attr in readonly: + setattr(response_obj, attr, attrs.get(attr)) + if additional_properties: + response_obj.additional_properties = additional_properties # type: ignore + return response_obj + except TypeError as err: + msg = "Unable to deserialize {} into model {}. ".format(kwargs, response) # type: ignore + raise DeserializationError(msg + str(err)) from err + else: + try: + for attr, value in attrs.items(): + setattr(response, attr, value) + return response + except Exception as exp: + msg = "Unable to populate response model. " + msg += "Type: {}, Error: {}".format(type(response), exp) + raise DeserializationError(msg) from exp + + def deserialize_data(self, data, data_type): # pylint: disable=too-many-return-statements + """Process data for deserialization according to data type. + + :param str data: The response string to be deserialized. + :param str data_type: The type to deserialize to. + :raises DeserializationError: if deserialization fails. + :return: Deserialized object. + :rtype: object + """ + if data is None: + return data + + try: + if not data_type: + return data + if data_type in self.basic_types.values(): + return self.deserialize_basic(data, data_type) + if data_type in self.deserialize_type: + if isinstance(data, self.deserialize_expected_types.get(data_type, tuple())): + return data + + is_a_text_parsing_type = lambda x: x not in [ # pylint: disable=unnecessary-lambda-assignment + "object", + "[]", + r"{}", + ] + if isinstance(data, ET.Element) and is_a_text_parsing_type(data_type) and not data.text: + return None + data_val = self.deserialize_type[data_type](data) + return data_val + + iter_type = data_type[0] + data_type[-1] + if iter_type in self.deserialize_type: + return self.deserialize_type[iter_type](data, data_type[1:-1]) + + obj_type = self.dependencies[data_type] + if issubclass(obj_type, Enum): + if isinstance(data, ET.Element): + data = data.text + return self.deserialize_enum(data, obj_type) + + except (ValueError, TypeError, AttributeError) as err: + msg = "Unable to deserialize response data." + msg += " Data: {}, {}".format(data, data_type) + raise DeserializationError(msg) from err + return self._deserialize(obj_type, data) + + def deserialize_iter(self, attr, iter_type): + """Deserialize an iterable. + + :param list attr: Iterable to be deserialized. + :param str iter_type: The type of object in the iterable. + :return: Deserialized iterable. + :rtype: list + """ + if attr is None: + return None + if isinstance(attr, ET.Element): # If I receive an element here, get the children + attr = list(attr) + if not isinstance(attr, (list, set)): + raise DeserializationError("Cannot deserialize as [{}] an object of type {}".format(iter_type, type(attr))) + return [self.deserialize_data(a, iter_type) for a in attr] + + def deserialize_dict(self, attr, dict_type): + """Deserialize a dictionary. + + :param dict/list attr: Dictionary to be deserialized. Also accepts + a list of key, value pairs. + :param str dict_type: The object type of the items in the dictionary. + :return: Deserialized dictionary. + :rtype: dict + """ + if isinstance(attr, list): + return {x["key"]: self.deserialize_data(x["value"], dict_type) for x in attr} + + if isinstance(attr, ET.Element): + # Transform value into {"Key": "value"} + attr = {el.tag: el.text for el in attr} + return {k: self.deserialize_data(v, dict_type) for k, v in attr.items()} + + def deserialize_object(self, attr, **kwargs): # pylint: disable=too-many-return-statements + """Deserialize a generic object. + This will be handled as a dictionary. + + :param dict attr: Dictionary to be deserialized. + :return: Deserialized object. + :rtype: dict + :raises TypeError: if non-builtin datatype encountered. + """ + if attr is None: + return None + if isinstance(attr, ET.Element): + # Do no recurse on XML, just return the tree as-is + return attr + if isinstance(attr, str): + return self.deserialize_basic(attr, "str") + obj_type = type(attr) + if obj_type in self.basic_types: + return self.deserialize_basic(attr, self.basic_types[obj_type]) + if obj_type is _long_type: + return self.deserialize_long(attr) + + if obj_type == dict: + deserialized = {} + for key, value in attr.items(): + try: + deserialized[key] = self.deserialize_object(value, **kwargs) + except ValueError: + deserialized[key] = None + return deserialized + + if obj_type == list: + deserialized = [] + for obj in attr: + try: + deserialized.append(self.deserialize_object(obj, **kwargs)) + except ValueError: + pass + return deserialized + + error = "Cannot deserialize generic object with type: " + raise TypeError(error + str(obj_type)) + + def deserialize_basic(self, attr, data_type): # pylint: disable=too-many-return-statements + """Deserialize basic builtin data type from string. + Will attempt to convert to str, int, float and bool. + This function will also accept '1', '0', 'true' and 'false' as + valid bool values. + + :param str attr: response string to be deserialized. + :param str data_type: deserialization data type. + :return: Deserialized basic type. + :rtype: str, int, float or bool + :raises TypeError: if string format is not valid. + """ + # If we're here, data is supposed to be a basic type. + # If it's still an XML node, take the text + if isinstance(attr, ET.Element): + attr = attr.text + if not attr: + if data_type == "str": + # None or '', node is empty string. + return "" + # None or '', node with a strong type is None. + # Don't try to model "empty bool" or "empty int" + return None + + if data_type == "bool": + if attr in [True, False, 1, 0]: + return bool(attr) + if isinstance(attr, str): + if attr.lower() in ["true", "1"]: + return True + if attr.lower() in ["false", "0"]: + return False + raise TypeError("Invalid boolean value: {}".format(attr)) + + if data_type == "str": + return self.deserialize_unicode(attr) + return eval(data_type)(attr) # nosec # pylint: disable=eval-used + + @staticmethod + def deserialize_unicode(data): + """Preserve unicode objects in Python 2, otherwise return data + as a string. + + :param str data: response string to be deserialized. + :return: Deserialized string. + :rtype: str or unicode + """ + # We might be here because we have an enum modeled as string, + # and we try to deserialize a partial dict with enum inside + if isinstance(data, Enum): + return data + + # Consider this is real string + try: + if isinstance(data, unicode): # type: ignore + return data + except NameError: + return str(data) + return str(data) + + @staticmethod + def deserialize_enum(data, enum_obj): + """Deserialize string into enum object. + + If the string is not a valid enum value it will be returned as-is + and a warning will be logged. + + :param str data: Response string to be deserialized. If this value is + None or invalid it will be returned as-is. + :param Enum enum_obj: Enum object to deserialize to. + :return: Deserialized enum object. + :rtype: Enum + """ + if isinstance(data, enum_obj) or data is None: + return data + if isinstance(data, Enum): + data = data.value + if isinstance(data, int): + # Workaround. We might consider remove it in the future. + try: + return list(enum_obj.__members__.values())[data] + except IndexError as exc: + error = "{!r} is not a valid index for enum {!r}" + raise DeserializationError(error.format(data, enum_obj)) from exc + try: + return enum_obj(str(data)) + except ValueError: + for enum_value in enum_obj: + if enum_value.value.lower() == str(data).lower(): + return enum_value + # We don't fail anymore for unknown value, we deserialize as a string + _LOGGER.warning("Deserializer is not able to find %s as valid enum in %s", data, enum_obj) + return Deserializer.deserialize_unicode(data) + + @staticmethod + def deserialize_bytearray(attr): + """Deserialize string into bytearray. + + :param str attr: response string to be deserialized. + :return: Deserialized bytearray + :rtype: bytearray + :raises TypeError: if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + return bytearray(b64decode(attr)) # type: ignore + + @staticmethod + def deserialize_base64(attr): + """Deserialize base64 encoded string into string. + + :param str attr: response string to be deserialized. + :return: Deserialized base64 string + :rtype: bytearray + :raises TypeError: if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + padding = "=" * (3 - (len(attr) + 3) % 4) # type: ignore + attr = attr + padding # type: ignore + encoded = attr.replace("-", "+").replace("_", "/") + return b64decode(encoded) + + @staticmethod + def deserialize_decimal(attr): + """Deserialize string into Decimal object. + + :param str attr: response string to be deserialized. + :return: Deserialized decimal + :raises DeserializationError: if string format invalid. + :rtype: decimal + """ + if isinstance(attr, ET.Element): + attr = attr.text + try: + return decimal.Decimal(str(attr)) # type: ignore + except decimal.DecimalException as err: + msg = "Invalid decimal {}".format(attr) + raise DeserializationError(msg) from err + + @staticmethod + def deserialize_long(attr): + """Deserialize string into long (Py2) or int (Py3). + + :param str attr: response string to be deserialized. + :return: Deserialized int + :rtype: long or int + :raises ValueError: if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + return _long_type(attr) # type: ignore + + @staticmethod + def deserialize_duration(attr): + """Deserialize ISO-8601 formatted string into TimeDelta object. + + :param str attr: response string to be deserialized. + :return: Deserialized duration + :rtype: TimeDelta + :raises DeserializationError: if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + try: + duration = isodate.parse_duration(attr) + except (ValueError, OverflowError, AttributeError) as err: + msg = "Cannot deserialize duration object." + raise DeserializationError(msg) from err + return duration + + @staticmethod + def deserialize_date(attr): + """Deserialize ISO-8601 formatted string into Date object. + + :param str attr: response string to be deserialized. + :return: Deserialized date + :rtype: Date + :raises DeserializationError: if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + if re.search(r"[^\W\d_]", attr, re.I + re.U): # type: ignore + raise DeserializationError("Date must have only digits and -. Received: %s" % attr) + # This must NOT use defaultmonth/defaultday. Using None ensure this raises an exception. + return isodate.parse_date(attr, defaultmonth=0, defaultday=0) + + @staticmethod + def deserialize_time(attr): + """Deserialize ISO-8601 formatted string into time object. + + :param str attr: response string to be deserialized. + :return: Deserialized time + :rtype: datetime.time + :raises DeserializationError: if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + if re.search(r"[^\W\d_]", attr, re.I + re.U): # type: ignore + raise DeserializationError("Date must have only digits and -. Received: %s" % attr) + return isodate.parse_time(attr) + + @staticmethod + def deserialize_rfc(attr): + """Deserialize RFC-1123 formatted string into Datetime object. + + :param str attr: response string to be deserialized. + :return: Deserialized RFC datetime + :rtype: Datetime + :raises DeserializationError: if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + try: + parsed_date = email.utils.parsedate_tz(attr) # type: ignore + date_obj = datetime.datetime( + *parsed_date[:6], tzinfo=datetime.timezone(datetime.timedelta(minutes=(parsed_date[9] or 0) / 60)) + ) + if not date_obj.tzinfo: + date_obj = date_obj.astimezone(tz=TZ_UTC) + except ValueError as err: + msg = "Cannot deserialize to rfc datetime object." + raise DeserializationError(msg) from err + return date_obj + + @staticmethod + def deserialize_iso(attr): + """Deserialize ISO-8601 formatted string into Datetime object. + + :param str attr: response string to be deserialized. + :return: Deserialized ISO datetime + :rtype: Datetime + :raises DeserializationError: if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + try: + attr = attr.upper() # type: ignore + match = Deserializer.valid_date.match(attr) + if not match: + raise ValueError("Invalid datetime string: " + attr) + + check_decimal = attr.split(".") + if len(check_decimal) > 1: + decimal_str = "" + for digit in check_decimal[1]: + if digit.isdigit(): + decimal_str += digit + else: + break + if len(decimal_str) > 6: + attr = attr.replace(decimal_str, decimal_str[0:6]) + + date_obj = isodate.parse_datetime(attr) + test_utc = date_obj.utctimetuple() + if test_utc.tm_year > 9999 or test_utc.tm_year < 1: + raise OverflowError("Hit max or min date") + except (ValueError, OverflowError, AttributeError) as err: + msg = "Cannot deserialize datetime object." + raise DeserializationError(msg) from err + return date_obj + + @staticmethod + def deserialize_unix(attr): + """Serialize Datetime object into IntTime format. + This is represented as seconds. + + :param int attr: Object to be serialized. + :return: Deserialized datetime + :rtype: Datetime + :raises DeserializationError: if format invalid + """ + if isinstance(attr, ET.Element): + attr = int(attr.text) # type: ignore + try: + attr = int(attr) + date_obj = datetime.datetime.fromtimestamp(attr, TZ_UTC) + except ValueError as err: + msg = "Cannot deserialize to unix datetime object." + raise DeserializationError(msg) from err + return date_obj diff --git a/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/azure/ai/language/questionanswering/authoring/_utils/utils.py b/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/azure/ai/language/questionanswering/authoring/_utils/utils.py new file mode 100644 index 000000000000..222120789c32 --- /dev/null +++ b/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/azure/ai/language/questionanswering/authoring/_utils/utils.py @@ -0,0 +1,68 @@ +# pylint: disable=missing-module-docstring,invalid-name,too-few-public-methods,missing-function-docstring +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from abc import ABC +import json +from typing import Any, Generic, IO, Mapping, Optional, TYPE_CHECKING, TypeVar, Union + +from .._utils.model_base import Model, SdkJSONEncoder + +if TYPE_CHECKING: + from .serialization import Deserializer, Serializer + + +TClient = TypeVar("TClient") +TConfig = TypeVar("TConfig") + + +class ClientMixinABC(ABC, Generic[TClient, TConfig]): + """DO NOT use this class. It is for internal typing use only.""" + + _client: TClient + _config: TConfig + _serialize: "Serializer" + _deserialize: "Deserializer" + + +# file-like tuple could be `(filename, IO (or bytes))` or `(filename, IO (or bytes), content_type)` +FileContent = Union[str, bytes, IO[str], IO[bytes]] + +FileType = Union[ + # file (or bytes) + FileContent, + # (filename, file (or bytes)) + tuple[Optional[str], FileContent], + # (filename, file (or bytes), content_type) + tuple[Optional[str], FileContent, Optional[str]], +] + + +def serialize_multipart_data_entry(data_entry: Any) -> Any: + if isinstance(data_entry, (list, tuple, dict, Model)): + return json.dumps(data_entry, cls=SdkJSONEncoder, exclude_readonly=True) + return data_entry + + +def prepare_multipart_form_data( + body: Mapping[str, Any], multipart_fields: list[str], data_fields: list[str] +) -> tuple[list[FileType], dict[str, Any]]: + files: list[FileType] = [] + data: dict[str, Any] = {} + for multipart_field in multipart_fields: + multipart_entry = body.get(multipart_field) + if isinstance(multipart_entry, list): + files.extend([(multipart_field, e) for e in multipart_entry]) + elif multipart_entry: + files.append((multipart_field, multipart_entry)) + + for data_field in data_fields: + data_entry = body.get(data_field) + if data_entry: + data[data_field] = serialize_multipart_data_entry(data_entry) + + return files, data diff --git a/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/azure/ai/language/questionanswering/authoring/_validation.py b/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/azure/ai/language/questionanswering/authoring/_validation.py new file mode 100644 index 000000000000..f5af3a4eb8a2 --- /dev/null +++ b/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/azure/ai/language/questionanswering/authoring/_validation.py @@ -0,0 +1,66 @@ +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import functools + + +def api_version_validation(**kwargs): + params_added_on = kwargs.pop("params_added_on", {}) + method_added_on = kwargs.pop("method_added_on", "") + api_versions_list = kwargs.pop("api_versions_list", []) + + def _index_with_default(value: str, default: int = -1) -> int: + """Get the index of value in lst, or return default if not found. + + :param value: The value to search for in the api_versions_list. + :type value: str + :param default: The default value to return if the value is not found. + :type default: int + :return: The index of the value in the list, or the default value if not found. + :rtype: int + """ + try: + return api_versions_list.index(value) + except ValueError: + return default + + def decorator(func): + @functools.wraps(func) + def wrapper(*args, **kwargs): + try: + # this assumes the client has an _api_version attribute + client = args[0] + client_api_version = client._config.api_version # pylint: disable=protected-access + except AttributeError: + return func(*args, **kwargs) + + if _index_with_default(method_added_on) > _index_with_default(client_api_version): + raise ValueError( + f"'{func.__name__}' is not available in API version " + f"{client_api_version}. Pass service API version {method_added_on} or newer to your client." + ) + + unsupported = { + parameter: api_version + for api_version, parameters in params_added_on.items() + for parameter in parameters + if parameter in kwargs and _index_with_default(api_version) > _index_with_default(client_api_version) + } + if unsupported: + raise ValueError( + "".join( + [ + f"'{param}' is not available in API version {client_api_version}. " + f"Use service API version {version} or newer.\n" + for param, version in unsupported.items() + ] + ) + ) + return func(*args, **kwargs) + + return wrapper + + return decorator diff --git a/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/azure/ai/language/questionanswering/authoring/_version.py b/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/azure/ai/language/questionanswering/authoring/_version.py new file mode 100644 index 000000000000..be71c81bd282 --- /dev/null +++ b/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/azure/ai/language/questionanswering/authoring/_version.py @@ -0,0 +1,9 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +VERSION = "1.0.0b1" diff --git a/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/azure/ai/language/questionanswering/authoring/aio/__init__.py b/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/azure/ai/language/questionanswering/authoring/aio/__init__.py new file mode 100644 index 000000000000..bfc94b514d6b --- /dev/null +++ b/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/azure/ai/language/questionanswering/authoring/aio/__init__.py @@ -0,0 +1,29 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=wrong-import-position + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from ._patch import * # pylint: disable=unused-wildcard-import + +from ._client import QuestionAnsweringAuthoringClient # type: ignore + +try: + from ._patch import __all__ as _patch_all + from ._patch import * +except ImportError: + _patch_all = [] +from ._patch import patch_sdk as _patch_sdk + +__all__ = [ + "QuestionAnsweringAuthoringClient", +] +__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore + +_patch_sdk() diff --git a/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/azure/ai/language/questionanswering/authoring/aio/_client.py b/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/azure/ai/language/questionanswering/authoring/aio/_client.py new file mode 100644 index 000000000000..a1050ede4efc --- /dev/null +++ b/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/azure/ai/language/questionanswering/authoring/aio/_client.py @@ -0,0 +1,109 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from copy import deepcopy +from typing import Any, Awaitable, TYPE_CHECKING, Union +from typing_extensions import Self + +from azure.core import AsyncPipelineClient +from azure.core.credentials import AzureKeyCredential +from azure.core.pipeline import policies +from azure.core.rest import AsyncHttpResponse, HttpRequest + +from .._utils.serialization import Deserializer, Serializer +from ._configuration import QuestionAnsweringAuthoringClientConfiguration +from ._operations import _QuestionAnsweringAuthoringClientOperationsMixin + +if TYPE_CHECKING: + from azure.core.credentials_async import AsyncTokenCredential + + +class QuestionAnsweringAuthoringClient(_QuestionAnsweringAuthoringClientOperationsMixin): + """QuestionAnsweringAuthoringClient. + + :param endpoint: Supported Cognitive Services endpoint (e.g., + https://.api.cognitiveservices.azure.com). Required. + :type endpoint: str + :param credential: Credential used to authenticate requests to the service. Is either a key + credential type or a token credential type. Required. + :type credential: ~azure.core.credentials.AzureKeyCredential or + ~azure.core.credentials_async.AsyncTokenCredential + :keyword api_version: The API version to use for this operation. Default value is + "2025-05-15-preview". Note that overriding this default value may result in unsupported + behavior. + :paramtype api_version: str + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + """ + + def __init__( + self, endpoint: str, credential: Union[AzureKeyCredential, "AsyncTokenCredential"], **kwargs: Any + ) -> None: + _endpoint = "{Endpoint}/language" + self._config = QuestionAnsweringAuthoringClientConfiguration(endpoint=endpoint, credential=credential, **kwargs) + + _policies = kwargs.pop("policies", None) + if _policies is None: + _policies = [ + policies.RequestIdPolicy(**kwargs), + self._config.headers_policy, + self._config.user_agent_policy, + self._config.proxy_policy, + policies.ContentDecodePolicy(**kwargs), + self._config.redirect_policy, + self._config.retry_policy, + self._config.authentication_policy, + self._config.custom_hook_policy, + self._config.logging_policy, + policies.DistributedTracingPolicy(**kwargs), + policies.SensitiveHeaderCleanupPolicy(**kwargs) if self._config.redirect_policy else None, + self._config.http_logging_policy, + ] + self._client: AsyncPipelineClient = AsyncPipelineClient(base_url=_endpoint, policies=_policies, **kwargs) + + self._serialize = Serializer() + self._deserialize = Deserializer() + self._serialize.client_side_validation = False + + def send_request( + self, request: HttpRequest, *, stream: bool = False, **kwargs: Any + ) -> Awaitable[AsyncHttpResponse]: + """Runs the network request through the client's chained policies. + + >>> from azure.core.rest import HttpRequest + >>> request = HttpRequest("GET", "https://www.example.org/") + + >>> response = await client.send_request(request) + + + For more information on this code flow, see https://aka.ms/azsdk/dpcodegen/python/send_request + + :param request: The network request you want to make. Required. + :type request: ~azure.core.rest.HttpRequest + :keyword bool stream: Whether the response payload will be streamed. Defaults to False. + :return: The response of your network call. Does not do error handling on your response. + :rtype: ~azure.core.rest.AsyncHttpResponse + """ + + request_copy = deepcopy(request) + path_format_arguments = { + "Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + + request_copy.url = self._client.format_url(request_copy.url, **path_format_arguments) + return self._client.send_request(request_copy, stream=stream, **kwargs) # type: ignore + + async def close(self) -> None: + await self._client.close() + + async def __aenter__(self) -> Self: + await self._client.__aenter__() + return self + + async def __aexit__(self, *exc_details: Any) -> None: + await self._client.__aexit__(*exc_details) diff --git a/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/azure/ai/language/questionanswering/authoring/aio/_configuration.py b/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/azure/ai/language/questionanswering/authoring/aio/_configuration.py new file mode 100644 index 000000000000..8109298b1735 --- /dev/null +++ b/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/azure/ai/language/questionanswering/authoring/aio/_configuration.py @@ -0,0 +1,76 @@ +# pylint: disable=too-many-lines,duplicate-code +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from typing import Any, TYPE_CHECKING, Union + +from azure.core.credentials import AzureKeyCredential +from azure.core.pipeline import policies + +from .._version import VERSION + +if TYPE_CHECKING: + from azure.core.credentials_async import AsyncTokenCredential + + +class QuestionAnsweringAuthoringClientConfiguration: # pylint: disable=too-many-instance-attributes,name-too-long + """Configuration for QuestionAnsweringAuthoringClient. + + Note that all parameters used to create this instance are saved as instance + attributes. + + :param endpoint: Supported Cognitive Services endpoint (e.g., + https://.api.cognitiveservices.azure.com). Required. + :type endpoint: str + :param credential: Credential used to authenticate requests to the service. Is either a key + credential type or a token credential type. Required. + :type credential: ~azure.core.credentials.AzureKeyCredential or + ~azure.core.credentials_async.AsyncTokenCredential + :keyword api_version: The API version to use for this operation. Default value is + "2025-05-15-preview". Note that overriding this default value may result in unsupported + behavior. + :paramtype api_version: str + """ + + def __init__( + self, endpoint: str, credential: Union[AzureKeyCredential, "AsyncTokenCredential"], **kwargs: Any + ) -> None: + api_version: str = kwargs.pop("api_version", "2025-05-15-preview") + + if endpoint is None: + raise ValueError("Parameter 'endpoint' must not be None.") + if credential is None: + raise ValueError("Parameter 'credential' must not be None.") + + self.endpoint = endpoint + self.credential = credential + self.api_version = api_version + self.credential_scopes = kwargs.pop("credential_scopes", ["https://cognitiveservices.azure.com/.default"]) + kwargs.setdefault("sdk_moniker", "ai-language-questionanswering-authoring/{}".format(VERSION)) + self.polling_interval = kwargs.get("polling_interval", 30) + self._configure(**kwargs) + + def _infer_policy(self, **kwargs): + if isinstance(self.credential, AzureKeyCredential): + return policies.AzureKeyCredentialPolicy(self.credential, "Ocp-Apim-Subscription-Key", **kwargs) + if hasattr(self.credential, "get_token"): + return policies.AsyncBearerTokenCredentialPolicy(self.credential, *self.credential_scopes, **kwargs) + raise TypeError(f"Unsupported credential: {self.credential}") + + def _configure(self, **kwargs: Any) -> None: + self.user_agent_policy = kwargs.get("user_agent_policy") or policies.UserAgentPolicy(**kwargs) + self.headers_policy = kwargs.get("headers_policy") or policies.HeadersPolicy(**kwargs) + self.proxy_policy = kwargs.get("proxy_policy") or policies.ProxyPolicy(**kwargs) + self.logging_policy = kwargs.get("logging_policy") or policies.NetworkTraceLoggingPolicy(**kwargs) + self.http_logging_policy = kwargs.get("http_logging_policy") or policies.HttpLoggingPolicy(**kwargs) + self.custom_hook_policy = kwargs.get("custom_hook_policy") or policies.CustomHookPolicy(**kwargs) + self.redirect_policy = kwargs.get("redirect_policy") or policies.AsyncRedirectPolicy(**kwargs) + self.retry_policy = kwargs.get("retry_policy") or policies.AsyncRetryPolicy(**kwargs) + self.authentication_policy = kwargs.get("authentication_policy") + if self.credential and not self.authentication_policy: + self.authentication_policy = self._infer_policy(**kwargs) diff --git a/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/azure/ai/language/questionanswering/authoring/aio/_operations/__init__.py b/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/azure/ai/language/questionanswering/authoring/aio/_operations/__init__.py new file mode 100644 index 000000000000..af23bedf5354 --- /dev/null +++ b/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/azure/ai/language/questionanswering/authoring/aio/_operations/__init__.py @@ -0,0 +1,24 @@ +# pylint: disable=line-too-long,useless-suppression +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=wrong-import-position + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from ._patch import * # pylint: disable=unused-wildcard-import + +from ._operations import _QuestionAnsweringAuthoringClientOperationsMixin # type: ignore # pylint: disable=unused-import + +from ._patch import __all__ as _patch_all +from ._patch import * +from ._patch import patch_sdk as _patch_sdk + +__all__ = [] +__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore +_patch_sdk() diff --git a/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/azure/ai/language/questionanswering/authoring/aio/_operations/_operations.py b/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/azure/ai/language/questionanswering/authoring/aio/_operations/_operations.py new file mode 100644 index 000000000000..ac857b0bc2b8 --- /dev/null +++ b/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/azure/ai/language/questionanswering/authoring/aio/_operations/_operations.py @@ -0,0 +1,2697 @@ +# pylint: disable=line-too-long,useless-suppression,too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from collections.abc import MutableMapping +from io import IOBase +import json +from typing import Any, AsyncIterator, Callable, IO, Optional, TypeVar, Union, cast, overload +import urllib.parse + +from azure.core import AsyncPipelineClient +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, + map_error, +) +from azure.core.pipeline import PipelineResponse +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.core.polling.async_base_polling import AsyncLROBasePolling +from azure.core.rest import AsyncHttpResponse, HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict + +from ... import models as _models +from ..._operations._operations import ( + build_question_answering_authoring_add_feedback_request, + build_question_answering_authoring_begin_import_assets_request, + build_question_answering_authoring_create_project_request, + build_question_answering_authoring_delete_project_request, + build_question_answering_authoring_deploy_project_request, + build_question_answering_authoring_export_request, + build_question_answering_authoring_get_delete_status_request, + build_question_answering_authoring_get_deploy_status_request, + build_question_answering_authoring_get_export_status_request, + build_question_answering_authoring_get_import_status_request, + build_question_answering_authoring_get_project_details_request, + build_question_answering_authoring_get_update_qnas_status_request, + build_question_answering_authoring_get_update_sources_status_request, + build_question_answering_authoring_import_from_files_request, + build_question_answering_authoring_list_deployments_request, + build_question_answering_authoring_list_projects_request, + build_question_answering_authoring_list_qnas_request, + build_question_answering_authoring_list_sources_request, + build_question_answering_authoring_list_synonyms_request, + build_question_answering_authoring_update_qnas_request, + build_question_answering_authoring_update_sources_from_files_request, + build_question_answering_authoring_update_sources_request, + build_question_answering_authoring_update_synonyms_request, +) +from ..._utils.model_base import Model as _Model, SdkJSONEncoder, _deserialize, _failsafe_deserialize +from ..._utils.utils import ClientMixinABC, prepare_multipart_form_data +from ..._validation import api_version_validation +from .._configuration import QuestionAnsweringAuthoringClientConfiguration + +JSON = MutableMapping[str, Any] +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, dict[str, Any]], Any]] + + +class _QuestionAnsweringAuthoringClientOperationsMixin( # pylint: disable=too-many-public-methods + ClientMixinABC[AsyncPipelineClient[HttpRequest, AsyncHttpResponse], QuestionAnsweringAuthoringClientConfiguration] +): + + @distributed_trace + def list_projects( + self, *, top: Optional[int] = None, skip: Optional[int] = None, **kwargs: Any + ) -> AsyncItemPaged["_models.QuestionAnsweringProject"]: + """Gets all projects for a user. + + :keyword top: The maximum number of resources to return from the collection. Default value is + None. + :paramtype top: int + :keyword skip: An offset into the collection of the first resource to be returned. Default + value is None. + :paramtype skip: int + :return: An iterator like instance of QuestionAnsweringProject + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.ai.language.questionanswering.authoring.models.QuestionAnsweringProject] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + maxpagesize = kwargs.pop("maxpagesize", None) + cls: ClsType[list[_models.QuestionAnsweringProject]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_question_answering_authoring_list_projects_request( + top=top, + skip=skip, + maxpagesize=maxpagesize, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "Endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "Endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(list[_models.QuestionAnsweringProject], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + @distributed_trace_async + async def get_project_details(self, project_name: str, **kwargs: Any) -> _models.QuestionAnsweringProject: + """Get the requested project metadata. + + :param project_name: Name of the project. Required. + :type project_name: str + :return: QuestionAnsweringProject. The QuestionAnsweringProject is compatible with + MutableMapping + :rtype: ~azure.ai.language.questionanswering.authoring.models.QuestionAnsweringProject + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.QuestionAnsweringProject] = kwargs.pop("cls", None) + + _request = build_question_answering_authoring_get_project_details_request( + project_name=project_name, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_models.QuestionAnsweringProject, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @overload + async def create_project( + self, + project_name: str, + body: _models.QuestionAnsweringProject, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.QuestionAnsweringProject: + """Create or update a project. + + :param project_name: Name of the project. Required. + :type project_name: str + :param body: The resource instance. Required. + :type body: ~azure.ai.language.questionanswering.authoring.models.QuestionAnsweringProject + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: QuestionAnsweringProject. The QuestionAnsweringProject is compatible with + MutableMapping + :rtype: ~azure.ai.language.questionanswering.authoring.models.QuestionAnsweringProject + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def create_project( + self, project_name: str, body: JSON, *, content_type: str = "application/json", **kwargs: Any + ) -> _models.QuestionAnsweringProject: + """Create or update a project. + + :param project_name: Name of the project. Required. + :type project_name: str + :param body: The resource instance. Required. + :type body: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: QuestionAnsweringProject. The QuestionAnsweringProject is compatible with + MutableMapping + :rtype: ~azure.ai.language.questionanswering.authoring.models.QuestionAnsweringProject + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def create_project( + self, project_name: str, body: IO[bytes], *, content_type: str = "application/json", **kwargs: Any + ) -> _models.QuestionAnsweringProject: + """Create or update a project. + + :param project_name: Name of the project. Required. + :type project_name: str + :param body: The resource instance. Required. + :type body: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: QuestionAnsweringProject. The QuestionAnsweringProject is compatible with + MutableMapping + :rtype: ~azure.ai.language.questionanswering.authoring.models.QuestionAnsweringProject + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def create_project( + self, project_name: str, body: Union[_models.QuestionAnsweringProject, JSON, IO[bytes]], **kwargs: Any + ) -> _models.QuestionAnsweringProject: + """Create or update a project. + + :param project_name: Name of the project. Required. + :type project_name: str + :param body: The resource instance. Is one of the following types: QuestionAnsweringProject, + JSON, IO[bytes] Required. + :type body: ~azure.ai.language.questionanswering.authoring.models.QuestionAnsweringProject or + JSON or IO[bytes] + :return: QuestionAnsweringProject. The QuestionAnsweringProject is compatible with + MutableMapping + :rtype: ~azure.ai.language.questionanswering.authoring.models.QuestionAnsweringProject + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.QuestionAnsweringProject] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_question_answering_authoring_create_project_request( + project_name=project_name, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_models.QuestionAnsweringProject, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + async def _delete_project_initial(self, project_name: str, **kwargs: Any) -> AsyncIterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + _request = build_question_answering_authoring_delete_project_request( + project_name=project_name, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["Operation-Location"] = self._deserialize("str", response.headers.get("Operation-Location")) + + deserialized = response.iter_bytes() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def begin_delete_project(self, project_name: str, **kwargs: Any) -> AsyncLROPoller[None]: + """Delete the project. + + :param project_name: The name of the project to use. Required. + :type project_name: str + :return: An instance of AsyncLROPoller that returns None + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._delete_project_initial( + project_name=project_name, cls=lambda x, y, z: x, headers=_headers, params=_params, **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + path_format_arguments = { + "Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, + AsyncLROBasePolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs), + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[None].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + @distributed_trace_async + async def _get_delete_status( + self, job_id: str, **kwargs: Any + ) -> _models.QuestionAnsweringAuthoringProjectDeletionJobState: + """Gets the status of a Project delete job. + + :param job_id: The job ID. Required. + :type job_id: str + :return: QuestionAnsweringAuthoringProjectDeletionJobState. The + QuestionAnsweringAuthoringProjectDeletionJobState is compatible with MutableMapping + :rtype: + ~azure.ai.language.questionanswering.authoring.models.QuestionAnsweringAuthoringProjectDeletionJobState + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.QuestionAnsweringAuthoringProjectDeletionJobState] = kwargs.pop("cls", None) + + _request = build_question_answering_authoring_get_delete_status_request( + job_id=job_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_models.QuestionAnsweringAuthoringProjectDeletionJobState, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + async def _export_initial( + self, + project_name: str, + *, + file_format: Optional[Union[str, _models.Format]] = None, + asset_kind: Optional[Union[str, _models.AssetKind]] = None, + **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + _request = build_question_answering_authoring_export_request( + project_name=project_name, + file_format=file_format, + asset_kind=asset_kind, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["Operation-Location"] = self._deserialize("str", response.headers.get("Operation-Location")) + + deserialized = response.iter_bytes() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def begin_export( + self, + project_name: str, + *, + file_format: Optional[Union[str, _models.Format]] = None, + asset_kind: Optional[Union[str, _models.AssetKind]] = None, + **kwargs: Any + ) -> AsyncLROPoller[None]: + """Export project metadata and assets. + + :param project_name: The name of the project to use. Required. + :type project_name: str + :keyword file_format: Knowledge base Import or Export format. Known values are: "json", "tsv", + and "excel". Default value is None. + :paramtype file_format: str or ~azure.ai.language.questionanswering.authoring.models.Format + :keyword asset_kind: Kind of the asset of the project. Known values are: "qnas" and "synonyms". + Default value is None. + :paramtype asset_kind: str or ~azure.ai.language.questionanswering.authoring.models.AssetKind + :return: An instance of AsyncLROPoller that returns None + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._export_initial( + project_name=project_name, + file_format=file_format, + asset_kind=asset_kind, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + path_format_arguments = { + "Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, + AsyncLROBasePolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs), + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[None].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + @distributed_trace_async + async def _get_export_status( + self, project_name: str, job_id: str, **kwargs: Any + ) -> _models.QuestionAnsweringAuthoringExportJobState: + """Gets the status of an Export job, once job completes, returns the project + metadata, and assets. + + :param project_name: Name of the project. Required. + :type project_name: str + :param job_id: The job ID. Required. + :type job_id: str + :return: QuestionAnsweringAuthoringExportJobState. The QuestionAnsweringAuthoringExportJobState + is compatible with MutableMapping + :rtype: + ~azure.ai.language.questionanswering.authoring.models.QuestionAnsweringAuthoringExportJobState + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.QuestionAnsweringAuthoringExportJobState] = kwargs.pop("cls", None) + + _request = build_question_answering_authoring_get_export_status_request( + project_name=project_name, + job_id=job_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_models.QuestionAnsweringAuthoringExportJobState, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + async def _begin_import_assets_initial( + self, + project_name: str, + body: Optional[Union[_models.ImportJobOptions, JSON, IO[bytes]]] = None, + *, + file_format: Optional[Union[str, _models.Format]] = None, + asset_kind: Optional[Union[str, _models.AssetKind]] = None, + **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + content_type = content_type if body else None + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" if body else None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + if body is not None: + _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + else: + _content = None + + _request = build_question_answering_authoring_begin_import_assets_request( + project_name=project_name, + file_format=file_format, + asset_kind=asset_kind, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["Operation-Location"] = self._deserialize("str", response.headers.get("Operation-Location")) + + deserialized = response.iter_bytes() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + async def begin_import_assets( + self, + project_name: str, + body: Optional[_models.ImportJobOptions] = None, + *, + file_format: Optional[Union[str, _models.Format]] = None, + asset_kind: Optional[Union[str, _models.AssetKind]] = None, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[None]: + """Import project assets. + + :param project_name: The name of the project to use. Required. + :type project_name: str + :param body: Project assets the needs to be imported. Default value is None. + :type body: ~azure.ai.language.questionanswering.authoring.models.ImportJobOptions + :keyword file_format: Knowledge base Import or Export format. Known values are: "json", "tsv", + and "excel". Default value is None. + :paramtype file_format: str or ~azure.ai.language.questionanswering.authoring.models.Format + :keyword asset_kind: Kind of the asset of the project. Known values are: "qnas" and "synonyms". + Default value is None. + :paramtype asset_kind: str or ~azure.ai.language.questionanswering.authoring.models.AssetKind + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns None + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_import_assets( + self, + project_name: str, + body: Optional[JSON] = None, + *, + file_format: Optional[Union[str, _models.Format]] = None, + asset_kind: Optional[Union[str, _models.AssetKind]] = None, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[None]: + """Import project assets. + + :param project_name: The name of the project to use. Required. + :type project_name: str + :param body: Project assets the needs to be imported. Default value is None. + :type body: JSON + :keyword file_format: Knowledge base Import or Export format. Known values are: "json", "tsv", + and "excel". Default value is None. + :paramtype file_format: str or ~azure.ai.language.questionanswering.authoring.models.Format + :keyword asset_kind: Kind of the asset of the project. Known values are: "qnas" and "synonyms". + Default value is None. + :paramtype asset_kind: str or ~azure.ai.language.questionanswering.authoring.models.AssetKind + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns None + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_import_assets( + self, + project_name: str, + body: Optional[IO[bytes]] = None, + *, + file_format: Optional[Union[str, _models.Format]] = None, + asset_kind: Optional[Union[str, _models.AssetKind]] = None, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[None]: + """Import project assets. + + :param project_name: The name of the project to use. Required. + :type project_name: str + :param body: Project assets the needs to be imported. Default value is None. + :type body: IO[bytes] + :keyword file_format: Knowledge base Import or Export format. Known values are: "json", "tsv", + and "excel". Default value is None. + :paramtype file_format: str or ~azure.ai.language.questionanswering.authoring.models.Format + :keyword asset_kind: Kind of the asset of the project. Known values are: "qnas" and "synonyms". + Default value is None. + :paramtype asset_kind: str or ~azure.ai.language.questionanswering.authoring.models.AssetKind + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns None + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_import_assets( + self, + project_name: str, + body: Optional[Union[_models.ImportJobOptions, JSON, IO[bytes]]] = None, + *, + file_format: Optional[Union[str, _models.Format]] = None, + asset_kind: Optional[Union[str, _models.AssetKind]] = None, + **kwargs: Any + ) -> AsyncLROPoller[None]: + """Import project assets. + + :param project_name: The name of the project to use. Required. + :type project_name: str + :param body: Project assets the needs to be imported. Is one of the following types: + ImportJobOptions, JSON, IO[bytes] Default value is None. + :type body: ~azure.ai.language.questionanswering.authoring.models.ImportJobOptions or JSON or + IO[bytes] + :keyword file_format: Knowledge base Import or Export format. Known values are: "json", "tsv", + and "excel". Default value is None. + :paramtype file_format: str or ~azure.ai.language.questionanswering.authoring.models.Format + :keyword asset_kind: Kind of the asset of the project. Known values are: "qnas" and "synonyms". + Default value is None. + :paramtype asset_kind: str or ~azure.ai.language.questionanswering.authoring.models.AssetKind + :return: An instance of AsyncLROPoller that returns None + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + content_type = content_type if body else None + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._begin_import_assets_initial( + project_name=project_name, + body=body, + file_format=file_format, + asset_kind=asset_kind, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + path_format_arguments = { + "Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, + AsyncLROBasePolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs), + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[None].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + @api_version_validation( + method_added_on="2025-05-15-preview", + params_added_on={"2025-05-15-preview": ["api_version", "content_type", "project_name", "asset_kind"]}, + api_versions_list=["2025-05-15-preview"], + ) + async def _import_from_files_initial( + self, + project_name: str, + body: Union[_models.ImportFiles, JSON], + *, + asset_kind: Optional[Union[str, _models.AssetKind]] = None, + **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + _body = body.as_dict() if isinstance(body, _Model) else body + _file_fields: list[str] = ["files"] + _data_fields: list[str] = [] + _files, _data = prepare_multipart_form_data(_body, _file_fields, _data_fields) + + _request = build_question_answering_authoring_import_from_files_request( + project_name=project_name, + asset_kind=asset_kind, + api_version=self._config.api_version, + files=_files, + data=_data, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["Operation-Location"] = self._deserialize("str", response.headers.get("Operation-Location")) + + deserialized = response.iter_bytes() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + async def begin_import_from_files( + self, + project_name: str, + body: _models.ImportFiles, + *, + asset_kind: Optional[Union[str, _models.AssetKind]] = None, + **kwargs: Any + ) -> AsyncLROPoller[None]: + """Import project assets from file. + + :param project_name: The name of the project to use. Required. + :type project_name: str + :param body: Collection of files containing project assets the needs to be imported. Required. + :type body: ~azure.ai.language.questionanswering.authoring.models.ImportFiles + :keyword asset_kind: Kind of the asset of the project. Known values are: "qnas" and "synonyms". + Default value is None. + :paramtype asset_kind: str or ~azure.ai.language.questionanswering.authoring.models.AssetKind + :return: An instance of AsyncLROPoller that returns None + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_import_from_files( + self, + project_name: str, + body: JSON, + *, + asset_kind: Optional[Union[str, _models.AssetKind]] = None, + **kwargs: Any + ) -> AsyncLROPoller[None]: + """Import project assets from file. + + :param project_name: The name of the project to use. Required. + :type project_name: str + :param body: Collection of files containing project assets the needs to be imported. Required. + :type body: JSON + :keyword asset_kind: Kind of the asset of the project. Known values are: "qnas" and "synonyms". + Default value is None. + :paramtype asset_kind: str or ~azure.ai.language.questionanswering.authoring.models.AssetKind + :return: An instance of AsyncLROPoller that returns None + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + @api_version_validation( + method_added_on="2025-05-15-preview", + params_added_on={"2025-05-15-preview": ["api_version", "content_type", "project_name", "asset_kind"]}, + api_versions_list=["2025-05-15-preview"], + ) + async def begin_import_from_files( + self, + project_name: str, + body: Union[_models.ImportFiles, JSON], + *, + asset_kind: Optional[Union[str, _models.AssetKind]] = None, + **kwargs: Any + ) -> AsyncLROPoller[None]: + """Import project assets from file. + + :param project_name: The name of the project to use. Required. + :type project_name: str + :param body: Collection of files containing project assets the needs to be imported. Is either + a ImportFiles type or a JSON type. Required. + :type body: ~azure.ai.language.questionanswering.authoring.models.ImportFiles or JSON + :keyword asset_kind: Kind of the asset of the project. Known values are: "qnas" and "synonyms". + Default value is None. + :paramtype asset_kind: str or ~azure.ai.language.questionanswering.authoring.models.AssetKind + :return: An instance of AsyncLROPoller that returns None + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._import_from_files_initial( + project_name=project_name, + body=body, + asset_kind=asset_kind, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + path_format_arguments = { + "Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, + AsyncLROBasePolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs), + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[None].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + @distributed_trace_async + async def _get_import_status( + self, project_name: str, job_id: str, **kwargs: Any + ) -> _models.QuestionAnsweringAuthoringImportJobState: + """Gets the status of an Import job. + + :param project_name: Name of the project. Required. + :type project_name: str + :param job_id: The job ID. Required. + :type job_id: str + :return: QuestionAnsweringAuthoringImportJobState. The QuestionAnsweringAuthoringImportJobState + is compatible with MutableMapping + :rtype: + ~azure.ai.language.questionanswering.authoring.models.QuestionAnsweringAuthoringImportJobState + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.QuestionAnsweringAuthoringImportJobState] = kwargs.pop("cls", None) + + _request = build_question_answering_authoring_get_import_status_request( + project_name=project_name, + job_id=job_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_models.QuestionAnsweringAuthoringImportJobState, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + async def _deploy_project_initial( + self, project_name: str, deployment_name: str, **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + _request = build_question_answering_authoring_deploy_project_request( + project_name=project_name, + deployment_name=deployment_name, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["Operation-Location"] = self._deserialize("str", response.headers.get("Operation-Location")) + + deserialized = response.iter_bytes() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def begin_deploy_project( + self, project_name: str, deployment_name: str, **kwargs: Any + ) -> AsyncLROPoller[None]: + """Deploy project to production. + + :param project_name: The name of the project to use. Required. + :type project_name: str + :param deployment_name: The name of the specific deployment of the project to use. Required. + :type deployment_name: str + :return: An instance of AsyncLROPoller that returns None + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._deploy_project_initial( + project_name=project_name, + deployment_name=deployment_name, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + path_format_arguments = { + "Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, + AsyncLROBasePolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs), + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[None].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + @distributed_trace_async + async def _get_deploy_status( + self, project_name: str, deployment_name: str, job_id: str, **kwargs: Any + ) -> _models.QuestionAnsweringAuthoringProjectDeploymentJobState: + """Gets the status of a Deploy job. + + :param project_name: Name of the project. Required. + :type project_name: str + :param deployment_name: Name of the deployment. Required. + :type deployment_name: str + :param job_id: The job ID. Required. + :type job_id: str + :return: QuestionAnsweringAuthoringProjectDeploymentJobState. The + QuestionAnsweringAuthoringProjectDeploymentJobState is compatible with MutableMapping + :rtype: + ~azure.ai.language.questionanswering.authoring.models.QuestionAnsweringAuthoringProjectDeploymentJobState + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.QuestionAnsweringAuthoringProjectDeploymentJobState] = kwargs.pop("cls", None) + + _request = build_question_answering_authoring_get_deploy_status_request( + project_name=project_name, + deployment_name=deployment_name, + job_id=job_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_models.QuestionAnsweringAuthoringProjectDeploymentJobState, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def list_deployments( + self, project_name: str, *, top: Optional[int] = None, skip: Optional[int] = None, **kwargs: Any + ) -> AsyncItemPaged["_models.ProjectDeployment"]: + """List all deployments of a project. + + :param project_name: Name of the project. Required. + :type project_name: str + :keyword top: The maximum number of resources to return from the collection. Default value is + None. + :paramtype top: int + :keyword skip: An offset into the collection of the first resource to be returned. Default + value is None. + :paramtype skip: int + :return: An iterator like instance of ProjectDeployment + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.ai.language.questionanswering.authoring.models.ProjectDeployment] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + maxpagesize = kwargs.pop("maxpagesize", None) + cls: ClsType[list[_models.ProjectDeployment]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_question_answering_authoring_list_deployments_request( + project_name=project_name, + top=top, + skip=skip, + maxpagesize=maxpagesize, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "Endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "Endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(list[_models.ProjectDeployment], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + @distributed_trace + def list_synonyms( + self, project_name: str, *, top: Optional[int] = None, skip: Optional[int] = None, **kwargs: Any + ) -> AsyncItemPaged["_models.WordAlterations"]: + """Gets all the synonyms of a project. + + :param project_name: Name of the project. Required. + :type project_name: str + :keyword top: The maximum number of resources to return from the collection. Default value is + None. + :paramtype top: int + :keyword skip: An offset into the collection of the first resource to be returned. Default + value is None. + :paramtype skip: int + :return: An iterator like instance of WordAlterations + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.ai.language.questionanswering.authoring.models.WordAlterations] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + maxpagesize = kwargs.pop("maxpagesize", None) + cls: ClsType[list[_models.WordAlterations]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_question_answering_authoring_list_synonyms_request( + project_name=project_name, + top=top, + skip=skip, + maxpagesize=maxpagesize, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "Endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "Endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(list[_models.WordAlterations], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + @overload + async def update_synonyms( + self, project_name: str, body: _models.SynonymAssets, *, content_type: str = "application/json", **kwargs: Any + ) -> None: + """Updates all the synonyms of a project. + + :param project_name: The name of the project to use. Required. + :type project_name: str + :param body: All the synonyms of a project. Required. + :type body: ~azure.ai.language.questionanswering.authoring.models.SynonymAssets + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def update_synonyms( + self, project_name: str, body: JSON, *, content_type: str = "application/json", **kwargs: Any + ) -> None: + """Updates all the synonyms of a project. + + :param project_name: The name of the project to use. Required. + :type project_name: str + :param body: All the synonyms of a project. Required. + :type body: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def update_synonyms( + self, project_name: str, body: IO[bytes], *, content_type: str = "application/json", **kwargs: Any + ) -> None: + """Updates all the synonyms of a project. + + :param project_name: The name of the project to use. Required. + :type project_name: str + :param body: All the synonyms of a project. Required. + :type body: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def update_synonyms( + self, project_name: str, body: Union[_models.SynonymAssets, JSON, IO[bytes]], **kwargs: Any + ) -> None: + """Updates all the synonyms of a project. + + :param project_name: The name of the project to use. Required. + :type project_name: str + :param body: All the synonyms of a project. Is one of the following types: SynonymAssets, JSON, + IO[bytes] Required. + :type body: ~azure.ai.language.questionanswering.authoring.models.SynonymAssets or JSON or + IO[bytes] + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[None] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_question_answering_authoring_update_synonyms_request( + project_name=project_name, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error) + + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + @distributed_trace + def list_sources( + self, project_name: str, *, top: Optional[int] = None, skip: Optional[int] = None, **kwargs: Any + ) -> AsyncItemPaged["_models.QnaSourceRecord"]: + """Gets all the sources of a project. + + :param project_name: Name of the project. Required. + :type project_name: str + :keyword top: The maximum number of resources to return from the collection. Default value is + None. + :paramtype top: int + :keyword skip: An offset into the collection of the first resource to be returned. Default + value is None. + :paramtype skip: int + :return: An iterator like instance of QnaSourceRecord + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.ai.language.questionanswering.authoring.models.QnaSourceRecord] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + maxpagesize = kwargs.pop("maxpagesize", None) + cls: ClsType[list[_models.QnaSourceRecord]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_question_answering_authoring_list_sources_request( + project_name=project_name, + top=top, + skip=skip, + maxpagesize=maxpagesize, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "Endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "Endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(list[_models.QnaSourceRecord], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + async def _update_sources_initial( + self, project_name: str, body: Union[list[_models.UpdateSourceRecord], IO[bytes]], **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_question_answering_authoring_update_sources_request( + project_name=project_name, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["Operation-Location"] = self._deserialize("str", response.headers.get("Operation-Location")) + + deserialized = response.iter_bytes() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + async def begin_update_sources( + self, + project_name: str, + body: list[_models.UpdateSourceRecord], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[None]: + """Updates the sources of a project. + + :param project_name: The name of the project to use. Required. + :type project_name: str + :param body: Update sources parameters of a project. Required. + :type body: list[~azure.ai.language.questionanswering.authoring.models.UpdateSourceRecord] + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns None + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_update_sources( + self, project_name: str, body: IO[bytes], *, content_type: str = "application/json", **kwargs: Any + ) -> AsyncLROPoller[None]: + """Updates the sources of a project. + + :param project_name: The name of the project to use. Required. + :type project_name: str + :param body: Update sources parameters of a project. Required. + :type body: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns None + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_update_sources( + self, project_name: str, body: Union[list[_models.UpdateSourceRecord], IO[bytes]], **kwargs: Any + ) -> AsyncLROPoller[None]: + """Updates the sources of a project. + + :param project_name: The name of the project to use. Required. + :type project_name: str + :param body: Update sources parameters of a project. Is either a [UpdateSourceRecord] type or a + IO[bytes] type. Required. + :type body: list[~azure.ai.language.questionanswering.authoring.models.UpdateSourceRecord] or + IO[bytes] + :return: An instance of AsyncLROPoller that returns None + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._update_sources_initial( + project_name=project_name, + body=body, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + path_format_arguments = { + "Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, + AsyncLROBasePolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs), + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[None].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + @api_version_validation( + method_added_on="2025-05-15-preview", + params_added_on={"2025-05-15-preview": ["api_version", "content_type", "project_name"]}, + api_versions_list=["2025-05-15-preview"], + ) + async def _update_sources_from_files_initial( + self, project_name: str, body: Union[_models.UpdateSourceFiles, JSON], **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + _body = body.as_dict() if isinstance(body, _Model) else body + _file_fields: list[str] = ["files"] + _data_fields: list[str] = ["fileOperations"] + _files, _data = prepare_multipart_form_data(_body, _file_fields, _data_fields) + + _request = build_question_answering_authoring_update_sources_from_files_request( + project_name=project_name, + api_version=self._config.api_version, + files=_files, + data=_data, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["Operation-Location"] = self._deserialize("str", response.headers.get("Operation-Location")) + + deserialized = response.iter_bytes() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + async def begin_update_sources_from_files( + self, project_name: str, body: _models.UpdateSourceFiles, **kwargs: Any + ) -> AsyncLROPoller[None]: + """Updates the sources of a project from files. + + :param project_name: The name of the project to use. Required. + :type project_name: str + :param body: Collection of files containing project assets the needs to be imported. Required. + :type body: ~azure.ai.language.questionanswering.authoring.models.UpdateSourceFiles + :return: An instance of AsyncLROPoller that returns None + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_update_sources_from_files( + self, project_name: str, body: JSON, **kwargs: Any + ) -> AsyncLROPoller[None]: + """Updates the sources of a project from files. + + :param project_name: The name of the project to use. Required. + :type project_name: str + :param body: Collection of files containing project assets the needs to be imported. Required. + :type body: JSON + :return: An instance of AsyncLROPoller that returns None + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + @api_version_validation( + method_added_on="2025-05-15-preview", + params_added_on={"2025-05-15-preview": ["api_version", "content_type", "project_name"]}, + api_versions_list=["2025-05-15-preview"], + ) + async def begin_update_sources_from_files( + self, project_name: str, body: Union[_models.UpdateSourceFiles, JSON], **kwargs: Any + ) -> AsyncLROPoller[None]: + """Updates the sources of a project from files. + + :param project_name: The name of the project to use. Required. + :type project_name: str + :param body: Collection of files containing project assets the needs to be imported. Is either + a UpdateSourceFiles type or a JSON type. Required. + :type body: ~azure.ai.language.questionanswering.authoring.models.UpdateSourceFiles or JSON + :return: An instance of AsyncLROPoller that returns None + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._update_sources_from_files_initial( + project_name=project_name, body=body, cls=lambda x, y, z: x, headers=_headers, params=_params, **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + path_format_arguments = { + "Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, + AsyncLROBasePolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs), + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[None].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + @distributed_trace_async + async def _get_update_sources_status( + self, project_name: str, job_id: str, **kwargs: Any + ) -> _models.QuestionAnsweringAuthoringUpdateSourcesJobState: + """Gets the status of update sources job. + + :param project_name: Name of the project. Required. + :type project_name: str + :param job_id: The job ID. Required. + :type job_id: str + :return: QuestionAnsweringAuthoringUpdateSourcesJobState. The + QuestionAnsweringAuthoringUpdateSourcesJobState is compatible with MutableMapping + :rtype: + ~azure.ai.language.questionanswering.authoring.models.QuestionAnsweringAuthoringUpdateSourcesJobState + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.QuestionAnsweringAuthoringUpdateSourcesJobState] = kwargs.pop("cls", None) + + _request = build_question_answering_authoring_get_update_sources_status_request( + project_name=project_name, + job_id=job_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_models.QuestionAnsweringAuthoringUpdateSourcesJobState, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def list_qnas( + self, + project_name: str, + *, + top: Optional[int] = None, + skip: Optional[int] = None, + source: Optional[str] = None, + **kwargs: Any + ) -> AsyncItemPaged["_models.RetrieveQnaRecord"]: + """Gets all the QnAs of a project. + + :param project_name: Name of the project. Required. + :type project_name: str + :keyword top: The maximum number of resources to return from the collection. Default value is + None. + :paramtype top: int + :keyword skip: An offset into the collection of the first resource to be returned. Default + value is None. + :paramtype skip: int + :keyword source: Source of the QnA. Default value is None. + :paramtype source: str + :return: An iterator like instance of RetrieveQnaRecord + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.ai.language.questionanswering.authoring.models.RetrieveQnaRecord] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + maxpagesize = kwargs.pop("maxpagesize", None) + cls: ClsType[list[_models.RetrieveQnaRecord]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_question_answering_authoring_list_qnas_request( + project_name=project_name, + top=top, + skip=skip, + maxpagesize=maxpagesize, + source=source, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "Endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "Endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(list[_models.RetrieveQnaRecord], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + async def _update_qnas_initial( + self, project_name: str, body: Union[list[_models.UpdateQnaRecord], IO[bytes]], **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_question_answering_authoring_update_qnas_request( + project_name=project_name, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["Operation-Location"] = self._deserialize("str", response.headers.get("Operation-Location")) + + deserialized = response.iter_bytes() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + async def begin_update_qnas( + self, + project_name: str, + body: list[_models.UpdateQnaRecord], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[None]: + """Updates the QnAs of a project. + + :param project_name: The name of the project to use. Required. + :type project_name: str + :param body: Update QnAs parameters of a project. Required. + :type body: list[~azure.ai.language.questionanswering.authoring.models.UpdateQnaRecord] + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns None + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_update_qnas( + self, project_name: str, body: IO[bytes], *, content_type: str = "application/json", **kwargs: Any + ) -> AsyncLROPoller[None]: + """Updates the QnAs of a project. + + :param project_name: The name of the project to use. Required. + :type project_name: str + :param body: Update QnAs parameters of a project. Required. + :type body: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns None + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_update_qnas( + self, project_name: str, body: Union[list[_models.UpdateQnaRecord], IO[bytes]], **kwargs: Any + ) -> AsyncLROPoller[None]: + """Updates the QnAs of a project. + + :param project_name: The name of the project to use. Required. + :type project_name: str + :param body: Update QnAs parameters of a project. Is either a [UpdateQnaRecord] type or a + IO[bytes] type. Required. + :type body: list[~azure.ai.language.questionanswering.authoring.models.UpdateQnaRecord] or + IO[bytes] + :return: An instance of AsyncLROPoller that returns None + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._update_qnas_initial( + project_name=project_name, + body=body, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + path_format_arguments = { + "Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, + AsyncLROBasePolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs), + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[None].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + @distributed_trace_async + async def _get_update_qnas_status( + self, project_name: str, job_id: str, **kwargs: Any + ) -> _models.QuestionAnsweringAuthoringUpdateQnasJobState: + """Gets the status of update QnAs job. + + :param project_name: Name of the project. Required. + :type project_name: str + :param job_id: The job ID. Required. + :type job_id: str + :return: QuestionAnsweringAuthoringUpdateQnasJobState. The + QuestionAnsweringAuthoringUpdateQnasJobState is compatible with MutableMapping + :rtype: + ~azure.ai.language.questionanswering.authoring.models.QuestionAnsweringAuthoringUpdateQnasJobState + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.QuestionAnsweringAuthoringUpdateQnasJobState] = kwargs.pop("cls", None) + + _request = build_question_answering_authoring_get_update_qnas_status_request( + project_name=project_name, + job_id=job_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_models.QuestionAnsweringAuthoringUpdateQnasJobState, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @overload + async def add_feedback( + self, + project_name: str, + body: _models.ActiveLearningFeedback, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> None: + """Update Active Learning feedback. + + :param project_name: The name of the project to use. Required. + :type project_name: str + :param body: Feedback for Active Learning. Required. + :type body: ~azure.ai.language.questionanswering.authoring.models.ActiveLearningFeedback + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def add_feedback( + self, project_name: str, body: JSON, *, content_type: str = "application/json", **kwargs: Any + ) -> None: + """Update Active Learning feedback. + + :param project_name: The name of the project to use. Required. + :type project_name: str + :param body: Feedback for Active Learning. Required. + :type body: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def add_feedback( + self, project_name: str, body: IO[bytes], *, content_type: str = "application/json", **kwargs: Any + ) -> None: + """Update Active Learning feedback. + + :param project_name: The name of the project to use. Required. + :type project_name: str + :param body: Feedback for Active Learning. Required. + :type body: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def add_feedback( + self, project_name: str, body: Union[_models.ActiveLearningFeedback, JSON, IO[bytes]], **kwargs: Any + ) -> None: + """Update Active Learning feedback. + + :param project_name: The name of the project to use. Required. + :type project_name: str + :param body: Feedback for Active Learning. Is one of the following types: + ActiveLearningFeedback, JSON, IO[bytes] Required. + :type body: ~azure.ai.language.questionanswering.authoring.models.ActiveLearningFeedback or + JSON or IO[bytes] + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[None] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_question_answering_authoring_add_feedback_request( + project_name=project_name, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error) + + if cls: + return cls(pipeline_response, None, {}) # type: ignore diff --git a/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/azure/ai/language/questionanswering/authoring/aio/_operations/_patch.py b/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/azure/ai/language/questionanswering/authoring/aio/_operations/_patch.py new file mode 100644 index 000000000000..c469c2262b85 --- /dev/null +++ b/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/azure/ai/language/questionanswering/authoring/aio/_operations/_patch.py @@ -0,0 +1,321 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------- +# pylint: disable=line-too-long,useless-suppression,duplicate-code,arguments-renamed,missing-module-docstring,missing-class-docstring,missing-function-docstring +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" +from collections.abc import MutableMapping +from typing import Any, Union, overload, IO +from azure.core.polling import AsyncLROPoller +from azure.core.tracing.decorator_async import distributed_trace_async +from ... import models as _models +from ._operations import ( + _QuestionAnsweringAuthoringClientOperationsMixin as _QuestionAnsweringAuthoringClientOperationsMixinGenerated, +) + +JSON = MutableMapping[str, Any] + + +class _QuestionAnsweringAuthoringClientOperationsMixin(_QuestionAnsweringAuthoringClientOperationsMixinGenerated): + """Mixin class for patching methods with backward compatible parameter names.""" + + # create_project overloads with 'options' parameter + @overload # type: ignore + async def create_project( + self, + project_name: str, + options: _models.QuestionAnsweringProject, + *, + content_type: str = "application/json", + **kwargs: Any, + ) -> _models.QuestionAnsweringProject: + """Create or update a project. + + :param project_name: Name of the project. Required. + :type project_name: str + :param options: The resource instance. Required. + :type options: ~azure.ai.language.questionanswering.authoring.models.QuestionAnsweringProject + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: QuestionAnsweringProject. The QuestionAnsweringProject is compatible with MutableMapping + :rtype: ~azure.ai.language.questionanswering.authoring.models.QuestionAnsweringProject + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload # type: ignore + async def create_project( + self, project_name: str, options: JSON, *, content_type: str = "application/json", **kwargs: Any + ) -> _models.QuestionAnsweringProject: + """Create or update a project. + + :param project_name: Name of the project. Required. + :type project_name: str + :param options: The resource instance. Required. + :type options: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: QuestionAnsweringProject. The QuestionAnsweringProject is compatible with + MutableMapping + :rtype: ~azure.ai.language.questionanswering.authoring.models.QuestionAnsweringProject + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload # type: ignore + async def create_project( + self, project_name: str, options: IO[bytes], *, content_type: str = "application/json", **kwargs: Any + ) -> _models.QuestionAnsweringProject: + """Create or update a project. + + :param project_name: Name of the project. Required. + :type project_name: str + :param options: The resource instance. Required. + :type options: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: QuestionAnsweringProject. The QuestionAnsweringProject is compatible with + MutableMapping + :rtype: ~azure.ai.language.questionanswering.authoring.models.QuestionAnsweringProject + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def create_project( # pyright: ignore[reportIncompatibleMethodOverride] + self, project_name: str, options: Union[_models.QuestionAnsweringProject, JSON, IO[bytes]], **kwargs: Any + ) -> _models.QuestionAnsweringProject: + """Create or update a project. + + :param project_name: Name of the project. Required. + :type project_name: str + :param options: The resource instance. Is one of the following types: QuestionAnsweringProject, + JSON, IO[bytes] Required. + :type options: ~azure.ai.language.questionanswering.authoring.models.QuestionAnsweringProject or + JSON or IO[bytes] + :return: QuestionAnsweringProject. The QuestionAnsweringProject is compatible with + MutableMapping + :rtype: ~azure.ai.language.questionanswering.authoring.models.QuestionAnsweringProject + :raises ~azure.core.exceptions.HttpResponseError: + """ + # Call the parent implementation with 'body' parameter for backward compatibility + return await super().create_project(project_name=project_name, body=options, **kwargs) + + # update_synonyms overloads with 'synonyms' parameter + @overload # type: ignore + async def update_synonyms( + self, + project_name: str, + synonyms: _models.SynonymAssets, + *, + content_type: str = "application/json", + **kwargs: Any, + ) -> None: + """Updates all the synonyms of a project. + + :param project_name: The name of the project to use. Required. + :type project_name: str + :param synonyms: All the synonyms of a project. Required. + :type synonyms: ~azure.ai.language.questionanswering.authoring.models.SynonymAssets + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload # type: ignore + async def update_synonyms( + self, project_name: str, synonyms: JSON, *, content_type: str = "application/json", **kwargs: Any + ) -> None: + """Updates all the synonyms of a project. + + :param project_name: The name of the project to use. Required. + :type project_name: str + :param synonyms: All the synonyms of a project. Required. + :type synonyms: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload # type: ignore + async def update_synonyms( + self, project_name: str, synonyms: IO[bytes], *, content_type: str = "application/json", **kwargs: Any + ) -> None: + """Updates all the synonyms of a project. + + :param project_name: The name of the project to use. Required. + :type project_name: str + :param synonyms: All the synonyms of a project. Required. + :type synonyms: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def update_synonyms( # pyright: ignore[reportIncompatibleMethodOverride] + self, project_name: str, synonyms: Union[_models.SynonymAssets, JSON, IO[bytes]], **kwargs: Any + ) -> None: + """Updates all the synonyms of a project. + + :param project_name: The name of the project to use. Required. + :type project_name: str + :param synonyms: All the synonyms of a project. Is one of the following types: SynonymAssets, JSON, + IO[bytes] Required. + :type synonyms: ~azure.ai.language.questionanswering.authoring.models.SynonymAssets or JSON or + IO[bytes] + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + # Call the parent implementation with 'body' parameter for backward compatibility + return await super().update_synonyms(project_name=project_name, body=synonyms, **kwargs) + + # begin_update_qnas overloads with 'qnas' parameter + @overload # type: ignore + async def begin_update_qnas( + self, + project_name: str, + qnas: list[_models.UpdateQnaRecord], + *, + content_type: str = "application/json", + **kwargs: Any, + ) -> AsyncLROPoller[None]: + """Updates the QnAs of a project. + + :param project_name: The name of the project to use. Required. + :type project_name: str + :param qnas: Update QnAs parameters of a project. Required. + :type qnas: list[~azure.ai.language.questionanswering.authoring.models.UpdateQnaRecord] + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns None + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload # type: ignore + async def begin_update_qnas( + self, project_name: str, qnas: IO[bytes], *, content_type: str = "application/json", **kwargs: Any + ) -> AsyncLROPoller[None]: + """Updates the QnAs of a project. + + :param project_name: The name of the project to use. Required. + :type project_name: str + :param qnas: Update QnAs parameters of a project. Required. + :type qnas: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns None + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_update_qnas( # pyright: ignore[reportIncompatibleMethodOverride] + self, project_name: str, qnas: Union[list[_models.UpdateQnaRecord], IO[bytes]], **kwargs: Any + ) -> AsyncLROPoller[None]: + """Updates the QnAs of a project. + + :param project_name: The name of the project to use. Required. + :type project_name: str + :param qnas: Update QnAs parameters of a project. Is either a [UpdateQnaRecord] type or a + IO[bytes] type. Required. + :type qnas: list[~azure.ai.language.questionanswering.authoring.models.UpdateQnaRecord] or + IO[bytes] + :return: An instance of AsyncLROPoller that returns None + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + # Call the parent implementation with 'body' parameter for backward compatibility + return await super().begin_update_qnas(project_name=project_name, body=qnas, **kwargs) + + # begin_update_sources overloads with 'sources' parameter + @overload # type: ignore + async def begin_update_sources( + self, + project_name: str, + sources: list[_models.UpdateSourceRecord], + *, + content_type: str = "application/json", + **kwargs: Any, + ) -> AsyncLROPoller[None]: + """Updates the sources of a project. + + :param project_name: The name of the project to use. Required. + :type project_name: str + :param sources: Update sources parameters of a project. Required. + :type sources: list[~azure.ai.language.questionanswering.authoring.models.UpdateSourceRecord] + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns None + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload # type: ignore + async def begin_update_sources( + self, project_name: str, sources: IO[bytes], *, content_type: str = "application/json", **kwargs: Any + ) -> AsyncLROPoller[None]: + """Updates the sources of a project. + + :param project_name: The name of the project to use. Required. + :type project_name: str + :param sources: Update sources parameters of a project. Required. + :type sources: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns None + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_update_sources( # pyright: ignore[reportIncompatibleMethodOverride] + self, project_name: str, sources: Union[list[_models.UpdateSourceRecord], IO[bytes]], **kwargs: Any + ) -> AsyncLROPoller[None]: + """Updates the sources of a project. + + :param project_name: The name of the project to use. Required. + :type project_name: str + :param sources: Update sources parameters of a project. Is either a [UpdateSourceRecord] type or a + IO[bytes] type. Required. + :type sources: list[~azure.ai.language.questionanswering.authoring.models.UpdateSourceRecord] or + IO[bytes] + :return: An instance of AsyncLROPoller that returns None + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + # Call the parent implementation with 'body' parameter for backward compatibility + return await super().begin_update_sources(project_name=project_name, body=sources, **kwargs) + + +__all__: list[str] = [ + "_QuestionAnsweringAuthoringClientOperationsMixin" +] # Add all objects you want publicly available to users at this package level + + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/azure/ai/language/questionanswering/authoring/aio/_patch.py b/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/azure/ai/language/questionanswering/authoring/aio/_patch.py new file mode 100644 index 000000000000..87676c65a8f0 --- /dev/null +++ b/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/azure/ai/language/questionanswering/authoring/aio/_patch.py @@ -0,0 +1,21 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------- +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" + + +__all__: list[str] = [] # Add all objects you want publicly available to users at this package level + + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/azure/ai/language/questionanswering/authoring/models/__init__.py b/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/azure/ai/language/questionanswering/authoring/models/__init__.py new file mode 100644 index 000000000000..c8d505308054 --- /dev/null +++ b/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/azure/ai/language/questionanswering/authoring/models/__init__.py @@ -0,0 +1,108 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=wrong-import-position + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from ._patch import * # pylint: disable=unused-wildcard-import + + +from ._models import ( # type: ignore + ActiveLearningFeedback, + Assets, + Error, + ErrorResponse, + FeedbackRecord, + ImportFiles, + ImportJobOptions, + ImportQnaRecord, + InnerErrorModel, + ProjectDeployment, + ProjectSettings, + QnaDialog, + QnaPrompt, + QnaRecord, + QnaSourceRecord, + QuestionAnsweringAuthoringExportJobState, + QuestionAnsweringAuthoringImportJobState, + QuestionAnsweringAuthoringProjectDeletionJobState, + QuestionAnsweringAuthoringProjectDeploymentJobState, + QuestionAnsweringAuthoringUpdateQnasJobState, + QuestionAnsweringAuthoringUpdateSourcesJobState, + QuestionAnsweringProject, + RetrieveQnaRecord, + SuggestedQuestion, + SuggestedQuestionsCluster, + SynonymAssets, + UpdateQnaRecord, + UpdateQnaSourceRecord, + UpdateSourceFiles, + UpdateSourceFromFileOperationRecord, + UpdateSourceRecord, + WordAlterations, +) + +from ._enums import ( # type: ignore + AssetKind, + ErrorCode, + Format, + InnerErrorCode, + JobStatus, + SourceContentStructureKind, + SourceKind, + UpdateOperationKind, +) +from ._patch import __all__ as _patch_all +from ._patch import * +from ._patch import patch_sdk as _patch_sdk + +__all__ = [ + "ActiveLearningFeedback", + "Assets", + "Error", + "ErrorResponse", + "FeedbackRecord", + "ImportFiles", + "ImportJobOptions", + "ImportQnaRecord", + "InnerErrorModel", + "ProjectDeployment", + "ProjectSettings", + "QnaDialog", + "QnaPrompt", + "QnaRecord", + "QnaSourceRecord", + "QuestionAnsweringAuthoringExportJobState", + "QuestionAnsweringAuthoringImportJobState", + "QuestionAnsweringAuthoringProjectDeletionJobState", + "QuestionAnsweringAuthoringProjectDeploymentJobState", + "QuestionAnsweringAuthoringUpdateQnasJobState", + "QuestionAnsweringAuthoringUpdateSourcesJobState", + "QuestionAnsweringProject", + "RetrieveQnaRecord", + "SuggestedQuestion", + "SuggestedQuestionsCluster", + "SynonymAssets", + "UpdateQnaRecord", + "UpdateQnaSourceRecord", + "UpdateSourceFiles", + "UpdateSourceFromFileOperationRecord", + "UpdateSourceRecord", + "WordAlterations", + "AssetKind", + "ErrorCode", + "Format", + "InnerErrorCode", + "JobStatus", + "SourceContentStructureKind", + "SourceKind", + "UpdateOperationKind", +] +__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore +_patch_sdk() diff --git a/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/azure/ai/language/questionanswering/authoring/models/_enums.py b/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/azure/ai/language/questionanswering/authoring/models/_enums.py new file mode 100644 index 000000000000..602a8d151f6b --- /dev/null +++ b/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/azure/ai/language/questionanswering/authoring/models/_enums.py @@ -0,0 +1,145 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from enum import Enum +from azure.core import CaseInsensitiveEnumMeta + + +class AssetKind(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Kind of the asset of the project.""" + + QNAS = "qnas" + """Question and Answers.""" + SYNONYMS = "synonyms" + """Synonyms.""" + + +class ErrorCode(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Human-readable error code.""" + + INVALID_REQUEST = "InvalidRequest" + """Invalid request error""" + INVALID_ARGUMENT = "InvalidArgument" + """Invalid argument error""" + UNAUTHORIZED = "Unauthorized" + """Unauthorized access error""" + FORBIDDEN = "Forbidden" + """Forbidden access error""" + NOT_FOUND = "NotFound" + """Not found error""" + PROJECT_NOT_FOUND = "ProjectNotFound" + """Project not found error""" + OPERATION_NOT_FOUND = "OperationNotFound" + """Operation not found error""" + AZURE_COGNITIVE_SEARCH_NOT_FOUND = "AzureCognitiveSearchNotFound" + """Azure Cognitive Search not found error""" + AZURE_COGNITIVE_SEARCH_INDEX_NOT_FOUND = "AzureCognitiveSearchIndexNotFound" + """Azure Cognitive Search index not found error""" + TOO_MANY_REQUESTS = "TooManyRequests" + """Too many requests error""" + AZURE_COGNITIVE_SEARCH_THROTTLING = "AzureCognitiveSearchThrottling" + """Azure Cognitive Search throttling error""" + AZURE_COGNITIVE_SEARCH_INDEX_LIMIT_REACHED = "AzureCognitiveSearchIndexLimitReached" + """Azure Cognitive Search index limit reached error""" + INTERNAL_SERVER_ERROR = "InternalServerError" + """Internal server error""" + SERVICE_UNAVAILABLE = "ServiceUnavailable" + """Service unavailable error""" + TIMEOUT = "Timeout" + """Timeout error""" + QUOTA_EXCEEDED = "QuotaExceeded" + """Quota exceeded error""" + CONFLICT = "Conflict" + """Conflict error""" + WARNING = "Warning" + """Warning error""" + + +class Format(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Knowledge base Import or Export format.""" + + JSON = "json" + """Export or Import QnA assets in JSON format.""" + TSV = "tsv" + """Export or Import knowledge base replica including all assets and metadata in + Excel format.""" + EXCEL = "excel" + """Export or Import knowledge base replica including all assets and metadata in + Tsv format.""" + + +class InnerErrorCode(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Human-readable error code.""" + + INVALID_REQUEST = "InvalidRequest" + """Invalid request error""" + INVALID_PARAMETER_VALUE = "InvalidParameterValue" + """Invalid parameter value error""" + KNOWLEDGE_BASE_NOT_FOUND = "KnowledgeBaseNotFound" + """Knowledge base not found error""" + AZURE_COGNITIVE_SEARCH_NOT_FOUND = "AzureCognitiveSearchNotFound" + """Azure Cognitive Search not found error""" + AZURE_COGNITIVE_SEARCH_THROTTLING = "AzureCognitiveSearchThrottling" + """Azure Cognitive Search throttling error""" + EXTRACTION_FAILURE = "ExtractionFailure" + """Extraction failure error""" + INVALID_REQUEST_BODY_FORMAT = "InvalidRequestBodyFormat" + """Invalid request body format error""" + EMPTY_REQUEST = "EmptyRequest" + """Empty request error""" + MISSING_INPUT_DOCUMENTS = "MissingInputDocuments" + """Missing input documents error""" + INVALID_DOCUMENT = "InvalidDocument" + """Invalid document error""" + MODEL_VERSION_INCORRECT = "ModelVersionIncorrect" + """Model version incorrect error""" + INVALID_DOCUMENT_BATCH = "InvalidDocumentBatch" + """Invalid document batch error""" + UNSUPPORTED_LANGUAGE_CODE = "UnsupportedLanguageCode" + """Unsupported language code error""" + INVALID_COUNTRY_HINT = "InvalidCountryHint" + """Invalid country hint error""" + + +class JobStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Job Status.""" + + NOT_STARTED = "notStarted" + RUNNING = "running" + SUCCEEDED = "succeeded" + FAILED = "failed" + CANCELLED = "cancelled" + CANCELLING = "cancelling" + PARTIALLY_COMPLETED = "partiallyCompleted" + + +class SourceContentStructureKind(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Content structure type for sources.""" + + UNSTRUCTURED = "unstructured" + """Unstructured.""" + + +class SourceKind(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Supported source types.""" + + FILE = "file" + """File Source.""" + URL = "url" + """URI Source.""" + + +class UpdateOperationKind(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Update operation type for assets.""" + + ADD = "add" + """Add operation.""" + DELETE = "delete" + """Delete operation.""" + REPLACE = "replace" + """Replace operation.""" diff --git a/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/azure/ai/language/questionanswering/authoring/models/_models.py b/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/azure/ai/language/questionanswering/authoring/models/_models.py new file mode 100644 index 000000000000..1832073533ee --- /dev/null +++ b/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/azure/ai/language/questionanswering/authoring/models/_models.py @@ -0,0 +1,1614 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=useless-super-delegation + +import datetime +from typing import Any, Mapping, Optional, TYPE_CHECKING, Union, overload + +from azure.core.exceptions import ODataV4Format + +from .._utils.model_base import Model as _Model, rest_field +from .._utils.utils import FileType + +if TYPE_CHECKING: + from .. import models as _models + + +class ActiveLearningFeedback(_Model): + """Feedback for Active Learning. + + :ivar records: A list of Feedback Records for Active Learning. + :vartype records: list[~azure.ai.language.questionanswering.authoring.models.FeedbackRecord] + """ + + records: Optional[list["_models.FeedbackRecord"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """A list of Feedback Records for Active Learning.""" + + @overload + def __init__( + self, + *, + records: Optional[list["_models.FeedbackRecord"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class Assets(_Model): + """All assets for this project. + + :ivar synonyms: Collection of synonyms. + :vartype synonyms: list[~azure.ai.language.questionanswering.authoring.models.WordAlterations] + :ivar qnas: List of QnA records to import. + :vartype qnas: list[~azure.ai.language.questionanswering.authoring.models.ImportQnaRecord] + """ + + synonyms: Optional[list["_models.WordAlterations"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Collection of synonyms.""" + qnas: Optional[list["_models.ImportQnaRecord"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """List of QnA records to import.""" + + @overload + def __init__( + self, + *, + synonyms: Optional[list["_models.WordAlterations"]] = None, + qnas: Optional[list["_models.ImportQnaRecord"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class Error(_Model): + """The error object. + + :ivar code: One of a server-defined set of error codes. Required. Known values are: + "InvalidRequest", "InvalidArgument", "Unauthorized", "Forbidden", "NotFound", + "ProjectNotFound", "OperationNotFound", "AzureCognitiveSearchNotFound", + "AzureCognitiveSearchIndexNotFound", "TooManyRequests", "AzureCognitiveSearchThrottling", + "AzureCognitiveSearchIndexLimitReached", "InternalServerError", "ServiceUnavailable", + "Timeout", "QuotaExceeded", "Conflict", and "Warning". + :vartype code: str or ~azure.ai.language.questionanswering.authoring.models.ErrorCode + :ivar message: A human-readable representation of the error. Required. + :vartype message: str + :ivar target: The target of the error. + :vartype target: str + :ivar details: An array of details about specific errors that led to this reported error. + :vartype details: list[~azure.ai.language.questionanswering.authoring.models.Error] + :ivar innererror: An object containing more specific information than the current object about + the error. + :vartype innererror: ~azure.ai.language.questionanswering.authoring.models.InnerErrorModel + """ + + code: Union[str, "_models.ErrorCode"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """One of a server-defined set of error codes. Required. Known values are: \"InvalidRequest\", + \"InvalidArgument\", \"Unauthorized\", \"Forbidden\", \"NotFound\", \"ProjectNotFound\", + \"OperationNotFound\", \"AzureCognitiveSearchNotFound\", \"AzureCognitiveSearchIndexNotFound\", + \"TooManyRequests\", \"AzureCognitiveSearchThrottling\", + \"AzureCognitiveSearchIndexLimitReached\", \"InternalServerError\", \"ServiceUnavailable\", + \"Timeout\", \"QuotaExceeded\", \"Conflict\", and \"Warning\".""" + message: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """A human-readable representation of the error. Required.""" + target: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The target of the error.""" + details: Optional[list["_models.Error"]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """An array of details about specific errors that led to this reported error.""" + innererror: Optional["_models.InnerErrorModel"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """An object containing more specific information than the current object about + the error.""" + + @overload + def __init__( + self, + *, + code: Union[str, "_models.ErrorCode"], + message: str, + target: Optional[str] = None, + details: Optional[list["_models.Error"]] = None, + innererror: Optional["_models.InnerErrorModel"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ErrorResponse(_Model): + """Error response. + + :ivar error: The error object. Required. + :vartype error: ~azure.ai.language.questionanswering.authoring.models.Error + """ + + error: "_models.Error" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The error object. Required.""" + + @overload + def __init__( + self, + *, + error: "_models.Error", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class FeedbackRecord(_Model): + """Feedback Record for Active Learning. + + :ivar user_id: Unique identifier of the user. + :vartype user_id: str + :ivar user_question: User suggested question for the QnA. + :vartype user_question: str + :ivar qna_id: Unique ID of the QnA. + :vartype qna_id: int + """ + + user_id: Optional[str] = rest_field(name="userId", visibility=["read", "create", "update", "delete", "query"]) + """Unique identifier of the user.""" + user_question: Optional[str] = rest_field( + name="userQuestion", visibility=["read", "create", "update", "delete", "query"] + ) + """User suggested question for the QnA.""" + qna_id: Optional[int] = rest_field(name="qnaId", visibility=["read", "create", "update", "delete", "query"]) + """Unique ID of the QnA.""" + + @overload + def __init__( + self, + *, + user_id: Optional[str] = None, + user_question: Optional[str] = None, + qna_id: Optional[int] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ImportFiles(_Model): + """Collection of files containing project assets that need to be imported. + + :ivar files: Collection of files where the fileName is required. Required. + :vartype files: list[~azure.ai.language.questionanswering.authoring._utils.utils.FileType] + """ + + files: list[FileType] = rest_field( + visibility=["read", "create", "update", "delete", "query"], is_multipart_file_input=True + ) + """Collection of files where the fileName is required. Required.""" + + @overload + def __init__( + self, + *, + files: list[FileType], + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ImportJobOptions(_Model): + """Project assets that need to be imported. + + :ivar metadata: Parameters needed to create the project. + :vartype metadata: + ~azure.ai.language.questionanswering.authoring.models.QuestionAnsweringProject + :ivar assets: All assets for this project. + :vartype assets: ~azure.ai.language.questionanswering.authoring.models.Assets + :ivar file_uri: Import data File URI. + :vartype file_uri: str + """ + + metadata: Optional["_models.QuestionAnsweringProject"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Parameters needed to create the project.""" + assets: Optional["_models.Assets"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """All assets for this project.""" + file_uri: Optional[str] = rest_field(name="fileUri", visibility=["read", "create", "update", "delete", "query"]) + """Import data File URI.""" + + @overload + def __init__( + self, + *, + metadata: Optional["_models.QuestionAnsweringProject"] = None, + assets: Optional["_models.Assets"] = None, + file_uri: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ImportQnaRecord(_Model): + """Import QnA Record. + + :ivar id: Unique ID for the QnA. Required. + :vartype id: int + :ivar answer: Answer text. + :vartype answer: str + :ivar source: Source from which QnA was indexed e.g. + `https://docs.microsoft.com/en-us/azure/cognitive-services/QnAMaker/FAQs + `_ . + :vartype source: str + :ivar questions: List of questions associated with the answer. + :vartype questions: list[str] + :ivar metadata: Metadata associated with the answer, useful to categorize or filter question + answers. + :vartype metadata: dict[str, str] + :ivar dialog: Context of a QnA. + :vartype dialog: ~azure.ai.language.questionanswering.authoring.models.QnaDialog + :ivar active_learning_suggestions: List of Active Learning suggestions for the QnA. + :vartype active_learning_suggestions: + list[~azure.ai.language.questionanswering.authoring.models.SuggestedQuestionsCluster] + :ivar last_updated_date_time: Date-time when the QnA was last updated. + :vartype last_updated_date_time: ~datetime.datetime + :ivar source_display_name: Friendly name of the Source. + :vartype source_display_name: str + """ + + id: int = rest_field(visibility=["read", "create"]) + """Unique ID for the QnA. Required.""" + answer: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Answer text.""" + source: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Source from which QnA was indexed e.g. + `https://docs.microsoft.com/en-us/azure/cognitive-services/QnAMaker/FAQs + `_ .""" + questions: Optional[list[str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """List of questions associated with the answer.""" + metadata: Optional[dict[str, str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Metadata associated with the answer, useful to categorize or filter question + answers.""" + dialog: Optional["_models.QnaDialog"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Context of a QnA.""" + active_learning_suggestions: Optional[list["_models.SuggestedQuestionsCluster"]] = rest_field( + name="activeLearningSuggestions", visibility=["read", "create", "update", "delete", "query"] + ) + """List of Active Learning suggestions for the QnA.""" + last_updated_date_time: Optional[datetime.datetime] = rest_field( + name="lastUpdatedDateTime", visibility=["read", "create", "update", "delete", "query"], format="rfc3339" + ) + """Date-time when the QnA was last updated.""" + source_display_name: Optional[str] = rest_field( + name="sourceDisplayName", visibility=["read", "create", "update", "delete", "query"] + ) + """Friendly name of the Source.""" + + @overload + def __init__( + self, + *, + id: int, # pylint: disable=redefined-builtin + answer: Optional[str] = None, + source: Optional[str] = None, + questions: Optional[list[str]] = None, + metadata: Optional[dict[str, str]] = None, + dialog: Optional["_models.QnaDialog"] = None, + active_learning_suggestions: Optional[list["_models.SuggestedQuestionsCluster"]] = None, + last_updated_date_time: Optional[datetime.datetime] = None, + source_display_name: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class InnerErrorModel(_Model): + """An object containing more specific information about the error. As per + Microsoft One API guidelines - + `https://github.com/Microsoft/api-guidelines/blob/vNext/Guidelines.md#7102-error-condition-responses + `_. + + :ivar code: One of a server-defined set of error codes. Required. Known values are: + "InvalidRequest", "InvalidParameterValue", "KnowledgeBaseNotFound", + "AzureCognitiveSearchNotFound", "AzureCognitiveSearchThrottling", "ExtractionFailure", + "InvalidRequestBodyFormat", "EmptyRequest", "MissingInputDocuments", "InvalidDocument", + "ModelVersionIncorrect", "InvalidDocumentBatch", "UnsupportedLanguageCode", and + "InvalidCountryHint". + :vartype code: str or ~azure.ai.language.questionanswering.authoring.models.InnerErrorCode + :ivar message: Error message. Required. + :vartype message: str + :ivar details: Error details. + :vartype details: dict[str, str] + :ivar target: Error target. + :vartype target: str + :ivar innererror: An object containing more specific information than the current object about + the error. + :vartype innererror: ~azure.ai.language.questionanswering.authoring.models.InnerErrorModel + """ + + code: Union[str, "_models.InnerErrorCode"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """One of a server-defined set of error codes. Required. Known values are: \"InvalidRequest\", + \"InvalidParameterValue\", \"KnowledgeBaseNotFound\", \"AzureCognitiveSearchNotFound\", + \"AzureCognitiveSearchThrottling\", \"ExtractionFailure\", \"InvalidRequestBodyFormat\", + \"EmptyRequest\", \"MissingInputDocuments\", \"InvalidDocument\", \"ModelVersionIncorrect\", + \"InvalidDocumentBatch\", \"UnsupportedLanguageCode\", and \"InvalidCountryHint\".""" + message: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Error message. Required.""" + details: Optional[dict[str, str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Error details.""" + target: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Error target.""" + innererror: Optional["_models.InnerErrorModel"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """An object containing more specific information than the current object about + the error.""" + + @overload + def __init__( + self, + *, + code: Union[str, "_models.InnerErrorCode"], + message: str, + details: Optional[dict[str, str]] = None, + target: Optional[str] = None, + innererror: Optional["_models.InnerErrorModel"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ProjectDeployment(_Model): + """Project deployment details. + + :ivar deployment_name: Name of the deployment. Required. + :vartype deployment_name: str + :ivar last_deployed_date_time: Represents the project last deployment date-time. + :vartype last_deployed_date_time: ~datetime.datetime + """ + + deployment_name: str = rest_field(name="deploymentName", visibility=["read"]) + """Name of the deployment. Required.""" + last_deployed_date_time: Optional[datetime.datetime] = rest_field( + name="lastDeployedDateTime", visibility=["read", "create", "update", "delete", "query"], format="rfc3339" + ) + """Represents the project last deployment date-time.""" + + @overload + def __init__( + self, + *, + last_deployed_date_time: Optional[datetime.datetime] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ProjectSettings(_Model): + """Configurable settings of the Project. + + :ivar default_answer: Default Answer response when no good match is found in the knowledge + base. + :vartype default_answer: str + """ + + default_answer: Optional[str] = rest_field( + name="defaultAnswer", visibility=["read", "create", "update", "delete", "query"] + ) + """Default Answer response when no good match is found in the knowledge base.""" + + @overload + def __init__( + self, + *, + default_answer: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class QnaDialog(_Model): + """Dialog associated with QnA Record. + + :ivar is_context_only: To mark if a prompt is relevant only with a previous question or not. If + true, + do not include this QnA as answer for queries without context; otherwise, + ignores context and includes this QnA in answers. + :vartype is_context_only: bool + :ivar prompts: List of prompts associated with the answer. + :vartype prompts: list[~azure.ai.language.questionanswering.authoring.models.QnaPrompt] + """ + + is_context_only: Optional[bool] = rest_field( + name="isContextOnly", visibility=["read", "create", "update", "delete", "query"] + ) + """To mark if a prompt is relevant only with a previous question or not. If true, + do not include this QnA as answer for queries without context; otherwise, + ignores context and includes this QnA in answers.""" + prompts: Optional[list["_models.QnaPrompt"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """List of prompts associated with the answer.""" + + @overload + def __init__( + self, + *, + is_context_only: Optional[bool] = None, + prompts: Optional[list["_models.QnaPrompt"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class QnaPrompt(_Model): + """Prompt for an answer. + + :ivar display_order: Index of the prompt. It is used for ordering of the prompts. + :vartype display_order: int + :ivar qna_id: ID of the QnA corresponding to the prompt. + :vartype qna_id: int + :ivar qna: QnA record. Either QnAId or QnA record needs to be present in a Prompt. + :vartype qna: ~azure.ai.language.questionanswering.authoring.models.QnaRecord + :ivar display_text: Text displayed to represent a follow up question prompt. + :vartype display_text: str + """ + + display_order: Optional[int] = rest_field( + name="displayOrder", visibility=["read", "create", "update", "delete", "query"] + ) + """Index of the prompt. It is used for ordering of the prompts.""" + qna_id: Optional[int] = rest_field(name="qnaId", visibility=["read", "create", "update", "delete", "query"]) + """ID of the QnA corresponding to the prompt.""" + qna: Optional["_models.QnaRecord"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """QnA record. Either QnAId or QnA record needs to be present in a Prompt.""" + display_text: Optional[str] = rest_field( + name="displayText", visibility=["read", "create", "update", "delete", "query"] + ) + """Text displayed to represent a follow up question prompt.""" + + @overload + def __init__( + self, + *, + display_order: Optional[int] = None, + qna_id: Optional[int] = None, + qna: Optional["_models.QnaRecord"] = None, + display_text: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class QnaRecord(_Model): + """QnA record. + + :ivar id: Unique ID for the QnA. Required. + :vartype id: int + :ivar answer: Answer text. + :vartype answer: str + :ivar source: Source from which QnA was indexed e.g. + `https://docs.microsoft.com/en-us/azure/cognitive-services/QnAMaker/FAQs + `_ . + :vartype source: str + :ivar questions: List of questions associated with the answer. + :vartype questions: list[str] + :ivar metadata: Metadata associated with the answer, useful to categorize or filter question + answers. + :vartype metadata: dict[str, str] + :ivar dialog: Context of a QnA. + :vartype dialog: ~azure.ai.language.questionanswering.authoring.models.QnaDialog + :ivar active_learning_suggestions: List of Active Learning suggestions for the QnA. + :vartype active_learning_suggestions: + list[~azure.ai.language.questionanswering.authoring.models.SuggestedQuestionsCluster] + """ + + id: int = rest_field(visibility=["read", "create"]) + """Unique ID for the QnA. Required.""" + answer: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Answer text.""" + source: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Source from which QnA was indexed e.g. + `https://docs.microsoft.com/en-us/azure/cognitive-services/QnAMaker/FAQs + `_ .""" + questions: Optional[list[str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """List of questions associated with the answer.""" + metadata: Optional[dict[str, str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Metadata associated with the answer, useful to categorize or filter question + answers.""" + dialog: Optional["_models.QnaDialog"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Context of a QnA.""" + active_learning_suggestions: Optional[list["_models.SuggestedQuestionsCluster"]] = rest_field( + name="activeLearningSuggestions", visibility=["read", "create", "update", "delete", "query"] + ) + """List of Active Learning suggestions for the QnA.""" + + @overload + def __init__( + self, + *, + id: int, # pylint: disable=redefined-builtin + answer: Optional[str] = None, + source: Optional[str] = None, + questions: Optional[list[str]] = None, + metadata: Optional[dict[str, str]] = None, + dialog: Optional["_models.QnaDialog"] = None, + active_learning_suggestions: Optional[list["_models.SuggestedQuestionsCluster"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class QnaSourceRecord(_Model): + """Custom source record with last updated date-time. + + :ivar display_name: Friendly name of the Source. + :vartype display_name: str + :ivar source: Unique source identifier. Name of the file if it's a 'file' source; otherwise, + the complete URL if it's a 'url' source. Required. + :vartype source: str + :ivar source_uri: URI location for the file or url. Required. + :vartype source_uri: str + :ivar source_kind: Supported source types. Required. Known values are: "file" and "url". + :vartype source_kind: str or ~azure.ai.language.questionanswering.authoring.models.SourceKind + :ivar content_structure_kind: Content structure type for sources. "unstructured" + :vartype content_structure_kind: str or + ~azure.ai.language.questionanswering.authoring.models.SourceContentStructureKind + :ivar last_updated_date_time: Date-time when the QnA was last updated. + :vartype last_updated_date_time: ~datetime.datetime + """ + + display_name: Optional[str] = rest_field( + name="displayName", visibility=["read", "create", "update", "delete", "query"] + ) + """Friendly name of the Source.""" + source: str = rest_field(visibility=["read", "create", "update"]) + """Unique source identifier. Name of the file if it's a 'file' source; otherwise, + the complete URL if it's a 'url' source. Required.""" + source_uri: str = rest_field(name="sourceUri", visibility=["read", "create", "update", "delete", "query"]) + """URI location for the file or url. Required.""" + source_kind: Union[str, "_models.SourceKind"] = rest_field( + name="sourceKind", visibility=["read", "create", "update", "delete", "query"] + ) + """Supported source types. Required. Known values are: \"file\" and \"url\".""" + content_structure_kind: Optional[Union[str, "_models.SourceContentStructureKind"]] = rest_field( + name="contentStructureKind", visibility=["read", "create", "update", "delete", "query"] + ) + """Content structure type for sources. \"unstructured\"""" + last_updated_date_time: Optional[datetime.datetime] = rest_field( + name="lastUpdatedDateTime", visibility=["read", "create", "update", "delete", "query"], format="rfc3339" + ) + """Date-time when the QnA was last updated.""" + + @overload + def __init__( + self, + *, + source: str, + source_uri: str, + source_kind: Union[str, "_models.SourceKind"], + display_name: Optional[str] = None, + content_structure_kind: Optional[Union[str, "_models.SourceContentStructureKind"]] = None, + last_updated_date_time: Optional[datetime.datetime] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class QuestionAnsweringAuthoringExportJobState(_Model): + """Export job status, project metadata, and assets. + + :ivar created_date_time: The creation date time of the job. Required. + :vartype created_date_time: ~datetime.datetime + :ivar expiration_date_time: The expiration date time of the job. + :vartype expiration_date_time: ~datetime.datetime + :ivar job_id: The job ID. Required. + :vartype job_id: str + :ivar last_updated_date_time: The last date time the job was updated. Required. + :vartype last_updated_date_time: ~datetime.datetime + :ivar status: Job Status. Required. Known values are: "notStarted", "running", "succeeded", + "failed", "cancelled", "cancelling", and "partiallyCompleted". + :vartype status: str or ~azure.ai.language.questionanswering.authoring.models.JobStatus + :ivar errors: The errors encountered while executing the job. + :vartype errors: list[~azure.core.ODataV4Format] + :ivar result_url: URL to download the result of the Export Job. Required. + :vartype result_url: str + """ + + created_date_time: datetime.datetime = rest_field( + name="createdDateTime", visibility=["read", "create", "update", "delete", "query"], format="rfc3339" + ) + """The creation date time of the job. Required.""" + expiration_date_time: Optional[datetime.datetime] = rest_field( + name="expirationDateTime", visibility=["read", "create", "update", "delete", "query"], format="rfc3339" + ) + """The expiration date time of the job.""" + job_id: str = rest_field(name="jobId", visibility=["read"]) + """The job ID. Required.""" + last_updated_date_time: datetime.datetime = rest_field( + name="lastUpdatedDateTime", visibility=["read", "create", "update", "delete", "query"], format="rfc3339" + ) + """The last date time the job was updated. Required.""" + status: Union[str, "_models.JobStatus"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Job Status. Required. Known values are: \"notStarted\", \"running\", \"succeeded\", \"failed\", + \"cancelled\", \"cancelling\", and \"partiallyCompleted\".""" + errors: Optional[list[ODataV4Format]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The errors encountered while executing the job.""" + result_url: str = rest_field(name="resultUrl", visibility=["read", "create", "update", "delete", "query"]) + """URL to download the result of the Export Job. Required.""" + + @overload + def __init__( + self, + *, + created_date_time: datetime.datetime, + last_updated_date_time: datetime.datetime, + status: Union[str, "_models.JobStatus"], + result_url: str, + expiration_date_time: Optional[datetime.datetime] = None, + errors: Optional[list[ODataV4Format]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class QuestionAnsweringAuthoringImportJobState(_Model): + """Import job status, project metadata, and assets. + + :ivar created_date_time: The creation date time of the job. Required. + :vartype created_date_time: ~datetime.datetime + :ivar expiration_date_time: The expiration date time of the job. + :vartype expiration_date_time: ~datetime.datetime + :ivar job_id: The job ID. Required. + :vartype job_id: str + :ivar last_updated_date_time: The last date time the job was updated. Required. + :vartype last_updated_date_time: ~datetime.datetime + :ivar status: Job Status. Required. Known values are: "notStarted", "running", "succeeded", + "failed", "cancelled", "cancelling", and "partiallyCompleted". + :vartype status: str or ~azure.ai.language.questionanswering.authoring.models.JobStatus + :ivar errors: The errors encountered while executing the job. + :vartype errors: list[~azure.core.ODataV4Format] + """ + + created_date_time: datetime.datetime = rest_field( + name="createdDateTime", visibility=["read", "create", "update", "delete", "query"], format="rfc3339" + ) + """The creation date time of the job. Required.""" + expiration_date_time: Optional[datetime.datetime] = rest_field( + name="expirationDateTime", visibility=["read", "create", "update", "delete", "query"], format="rfc3339" + ) + """The expiration date time of the job.""" + job_id: str = rest_field(name="jobId", visibility=["read"]) + """The job ID. Required.""" + last_updated_date_time: datetime.datetime = rest_field( + name="lastUpdatedDateTime", visibility=["read", "create", "update", "delete", "query"], format="rfc3339" + ) + """The last date time the job was updated. Required.""" + status: Union[str, "_models.JobStatus"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Job Status. Required. Known values are: \"notStarted\", \"running\", \"succeeded\", \"failed\", + \"cancelled\", \"cancelling\", and \"partiallyCompleted\".""" + errors: Optional[list[ODataV4Format]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The errors encountered while executing the job.""" + + @overload + def __init__( + self, + *, + created_date_time: datetime.datetime, + last_updated_date_time: datetime.datetime, + status: Union[str, "_models.JobStatus"], + expiration_date_time: Optional[datetime.datetime] = None, + errors: Optional[list[ODataV4Format]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class QuestionAnsweringAuthoringProjectDeletionJobState(_Model): # pylint: disable=name-too-long + """Represents the state of a project deletion job. + + :ivar created_date_time: The creation date time of the job. Required. + :vartype created_date_time: ~datetime.datetime + :ivar expiration_date_time: The expiration date time of the job. + :vartype expiration_date_time: ~datetime.datetime + :ivar job_id: The job ID. Required. + :vartype job_id: str + :ivar last_updated_date_time: The last date time the job was updated. Required. + :vartype last_updated_date_time: ~datetime.datetime + :ivar status: Job Status. Required. Known values are: "notStarted", "running", "succeeded", + "failed", "cancelled", "cancelling", and "partiallyCompleted". + :vartype status: str or ~azure.ai.language.questionanswering.authoring.models.JobStatus + :ivar errors: The errors encountered while executing the job. + :vartype errors: list[~azure.core.ODataV4Format] + """ + + created_date_time: datetime.datetime = rest_field( + name="createdDateTime", visibility=["read", "create", "update", "delete", "query"], format="rfc3339" + ) + """The creation date time of the job. Required.""" + expiration_date_time: Optional[datetime.datetime] = rest_field( + name="expirationDateTime", visibility=["read", "create", "update", "delete", "query"], format="rfc3339" + ) + """The expiration date time of the job.""" + job_id: str = rest_field(name="jobId", visibility=["read"]) + """The job ID. Required.""" + last_updated_date_time: datetime.datetime = rest_field( + name="lastUpdatedDateTime", visibility=["read", "create", "update", "delete", "query"], format="rfc3339" + ) + """The last date time the job was updated. Required.""" + status: Union[str, "_models.JobStatus"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Job Status. Required. Known values are: \"notStarted\", \"running\", \"succeeded\", \"failed\", + \"cancelled\", \"cancelling\", and \"partiallyCompleted\".""" + errors: Optional[list[ODataV4Format]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The errors encountered while executing the job.""" + + @overload + def __init__( + self, + *, + created_date_time: datetime.datetime, + last_updated_date_time: datetime.datetime, + status: Union[str, "_models.JobStatus"], + expiration_date_time: Optional[datetime.datetime] = None, + errors: Optional[list[ODataV4Format]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class QuestionAnsweringAuthoringProjectDeploymentJobState(_Model): # pylint: disable=name-too-long + """Job state represents the job metadata and any errors. + + :ivar created_date_time: The creation date time of the job. Required. + :vartype created_date_time: ~datetime.datetime + :ivar expiration_date_time: The expiration date time of the job. + :vartype expiration_date_time: ~datetime.datetime + :ivar job_id: The job ID. Required. + :vartype job_id: str + :ivar last_updated_date_time: The last date time the job was updated. Required. + :vartype last_updated_date_time: ~datetime.datetime + :ivar status: Job Status. Required. Known values are: "notStarted", "running", "succeeded", + "failed", "cancelled", "cancelling", and "partiallyCompleted". + :vartype status: str or ~azure.ai.language.questionanswering.authoring.models.JobStatus + :ivar errors: The errors encountered while executing the job. + :vartype errors: list[~azure.core.ODataV4Format] + """ + + created_date_time: datetime.datetime = rest_field( + name="createdDateTime", visibility=["read", "create", "update", "delete", "query"], format="rfc3339" + ) + """The creation date time of the job. Required.""" + expiration_date_time: Optional[datetime.datetime] = rest_field( + name="expirationDateTime", visibility=["read", "create", "update", "delete", "query"], format="rfc3339" + ) + """The expiration date time of the job.""" + job_id: str = rest_field(name="jobId", visibility=["read"]) + """The job ID. Required.""" + last_updated_date_time: datetime.datetime = rest_field( + name="lastUpdatedDateTime", visibility=["read", "create", "update", "delete", "query"], format="rfc3339" + ) + """The last date time the job was updated. Required.""" + status: Union[str, "_models.JobStatus"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Job Status. Required. Known values are: \"notStarted\", \"running\", \"succeeded\", \"failed\", + \"cancelled\", \"cancelling\", and \"partiallyCompleted\".""" + errors: Optional[list[ODataV4Format]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The errors encountered while executing the job.""" + + @overload + def __init__( + self, + *, + created_date_time: datetime.datetime, + last_updated_date_time: datetime.datetime, + status: Union[str, "_models.JobStatus"], + expiration_date_time: Optional[datetime.datetime] = None, + errors: Optional[list[ODataV4Format]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class QuestionAnsweringAuthoringUpdateQnasJobState(_Model): # pylint: disable=name-too-long + """Job state represents the job metadata and any errors. + + :ivar created_date_time: The creation date time of the job. Required. + :vartype created_date_time: ~datetime.datetime + :ivar expiration_date_time: The expiration date time of the job. + :vartype expiration_date_time: ~datetime.datetime + :ivar job_id: The job ID. Required. + :vartype job_id: str + :ivar last_updated_date_time: The last date time the job was updated. Required. + :vartype last_updated_date_time: ~datetime.datetime + :ivar status: Job Status. Required. Known values are: "notStarted", "running", "succeeded", + "failed", "cancelled", "cancelling", and "partiallyCompleted". + :vartype status: str or ~azure.ai.language.questionanswering.authoring.models.JobStatus + :ivar errors: The errors encountered while executing the job. + :vartype errors: list[~azure.core.ODataV4Format] + """ + + created_date_time: datetime.datetime = rest_field( + name="createdDateTime", visibility=["read", "create", "update", "delete", "query"], format="rfc3339" + ) + """The creation date time of the job. Required.""" + expiration_date_time: Optional[datetime.datetime] = rest_field( + name="expirationDateTime", visibility=["read", "create", "update", "delete", "query"], format="rfc3339" + ) + """The expiration date time of the job.""" + job_id: str = rest_field(name="jobId", visibility=["read"]) + """The job ID. Required.""" + last_updated_date_time: datetime.datetime = rest_field( + name="lastUpdatedDateTime", visibility=["read", "create", "update", "delete", "query"], format="rfc3339" + ) + """The last date time the job was updated. Required.""" + status: Union[str, "_models.JobStatus"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Job Status. Required. Known values are: \"notStarted\", \"running\", \"succeeded\", \"failed\", + \"cancelled\", \"cancelling\", and \"partiallyCompleted\".""" + errors: Optional[list[ODataV4Format]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The errors encountered while executing the job.""" + + @overload + def __init__( + self, + *, + created_date_time: datetime.datetime, + last_updated_date_time: datetime.datetime, + status: Union[str, "_models.JobStatus"], + expiration_date_time: Optional[datetime.datetime] = None, + errors: Optional[list[ODataV4Format]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class QuestionAnsweringAuthoringUpdateSourcesJobState(_Model): # pylint: disable=name-too-long + """Job state represents the job metadata and any errors. + + :ivar created_date_time: The creation date time of the job. Required. + :vartype created_date_time: ~datetime.datetime + :ivar expiration_date_time: The expiration date time of the job. + :vartype expiration_date_time: ~datetime.datetime + :ivar job_id: The job ID. Required. + :vartype job_id: str + :ivar last_updated_date_time: The last date time the job was updated. Required. + :vartype last_updated_date_time: ~datetime.datetime + :ivar status: Job Status. Required. Known values are: "notStarted", "running", "succeeded", + "failed", "cancelled", "cancelling", and "partiallyCompleted". + :vartype status: str or ~azure.ai.language.questionanswering.authoring.models.JobStatus + :ivar errors: The errors encountered while executing the job. + :vartype errors: list[~azure.core.ODataV4Format] + """ + + created_date_time: datetime.datetime = rest_field( + name="createdDateTime", visibility=["read", "create", "update", "delete", "query"], format="rfc3339" + ) + """The creation date time of the job. Required.""" + expiration_date_time: Optional[datetime.datetime] = rest_field( + name="expirationDateTime", visibility=["read", "create", "update", "delete", "query"], format="rfc3339" + ) + """The expiration date time of the job.""" + job_id: str = rest_field(name="jobId", visibility=["read"]) + """The job ID. Required.""" + last_updated_date_time: datetime.datetime = rest_field( + name="lastUpdatedDateTime", visibility=["read", "create", "update", "delete", "query"], format="rfc3339" + ) + """The last date time the job was updated. Required.""" + status: Union[str, "_models.JobStatus"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Job Status. Required. Known values are: \"notStarted\", \"running\", \"succeeded\", \"failed\", + \"cancelled\", \"cancelling\", and \"partiallyCompleted\".""" + errors: Optional[list[ODataV4Format]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The errors encountered while executing the job.""" + + @overload + def __init__( + self, + *, + created_date_time: datetime.datetime, + last_updated_date_time: datetime.datetime, + status: Union[str, "_models.JobStatus"], + expiration_date_time: Optional[datetime.datetime] = None, + errors: Optional[list[ODataV4Format]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class QuestionAnsweringProject(_Model): + """Represents the project. + + :ivar project_name: Name of the project. Required. + :vartype project_name: str + :ivar description: Description of the project. + :vartype description: str + :ivar language: Language of the text records. This is BCP-47 representation of a language. For + example, use "en" for English; "es" for Spanish etc. If not set, use "en" for + English as default. + :vartype language: str + :ivar multilingual_resource: Resource enabled for multiple languages across projects or not. + :vartype multilingual_resource: bool + :ivar settings: Configurable settings of the Project. + :vartype settings: ~azure.ai.language.questionanswering.authoring.models.ProjectSettings + :ivar created_date_time: Project creation date-time. + :vartype created_date_time: ~datetime.datetime + :ivar last_modified_date_time: Represents the project last modified date-time. + :vartype last_modified_date_time: ~datetime.datetime + :ivar last_deployed_date_time: Represents the project last deployment date-time. + :vartype last_deployed_date_time: ~datetime.datetime + :ivar configure_semantic_ranking: Represents if semantic ranking is configured. + :vartype configure_semantic_ranking: bool + """ + + project_name: str = rest_field(name="projectName", visibility=["read"]) + """Name of the project. Required.""" + description: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Description of the project.""" + language: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Language of the text records. This is BCP-47 representation of a language. For + example, use \"en\" for English; \"es\" for Spanish etc. If not set, use \"en\" for + English as default.""" + multilingual_resource: Optional[bool] = rest_field( + name="multilingualResource", visibility=["read", "create", "update", "delete", "query"] + ) + """Resource enabled for multiple languages across projects or not.""" + settings: Optional["_models.ProjectSettings"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Configurable settings of the Project.""" + created_date_time: Optional[datetime.datetime] = rest_field( + name="createdDateTime", visibility=["read"], format="rfc3339" + ) + """Project creation date-time.""" + last_modified_date_time: Optional[datetime.datetime] = rest_field( + name="lastModifiedDateTime", visibility=["read"], format="rfc3339" + ) + """Represents the project last modified date-time.""" + last_deployed_date_time: Optional[datetime.datetime] = rest_field( + name="lastDeployedDateTime", visibility=["read"], format="rfc3339" + ) + """Represents the project last deployment date-time.""" + configure_semantic_ranking: Optional[bool] = rest_field( + name="configureSemanticRanking", visibility=["read", "create", "update", "delete", "query"] + ) + """Represents if semantic ranking is configured.""" + + @overload + def __init__( + self, + *, + description: Optional[str] = None, + language: Optional[str] = None, + multilingual_resource: Optional[bool] = None, + settings: Optional["_models.ProjectSettings"] = None, + configure_semantic_ranking: Optional[bool] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class RetrieveQnaRecord(_Model): + """QnA Record with last updated date-time. + + :ivar id: Unique ID for the QnA. Required. + :vartype id: int + :ivar answer: Answer text. + :vartype answer: str + :ivar source: Source from which QnA was indexed e.g. + `https://docs.microsoft.com/en-us/azure/cognitive-services/QnAMaker/FAQs + `_ . + :vartype source: str + :ivar questions: List of questions associated with the answer. + :vartype questions: list[str] + :ivar metadata: Metadata associated with the answer, useful to categorize or filter question + answers. + :vartype metadata: dict[str, str] + :ivar dialog: Context of a QnA. + :vartype dialog: ~azure.ai.language.questionanswering.authoring.models.QnaDialog + :ivar active_learning_suggestions: List of Active Learning suggestions for the QnA. + :vartype active_learning_suggestions: + list[~azure.ai.language.questionanswering.authoring.models.SuggestedQuestionsCluster] + :ivar last_updated_date_time: Date-time when the QnA was last updated. + :vartype last_updated_date_time: ~datetime.datetime + """ + + id: int = rest_field(visibility=["read", "create"]) + """Unique ID for the QnA. Required.""" + answer: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Answer text.""" + source: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Source from which QnA was indexed e.g. + `https://docs.microsoft.com/en-us/azure/cognitive-services/QnAMaker/FAQs + `_ .""" + questions: Optional[list[str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """List of questions associated with the answer.""" + metadata: Optional[dict[str, str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Metadata associated with the answer, useful to categorize or filter question + answers.""" + dialog: Optional["_models.QnaDialog"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Context of a QnA.""" + active_learning_suggestions: Optional[list["_models.SuggestedQuestionsCluster"]] = rest_field( + name="activeLearningSuggestions", visibility=["read", "create", "update", "delete", "query"] + ) + """List of Active Learning suggestions for the QnA.""" + last_updated_date_time: Optional[datetime.datetime] = rest_field( + name="lastUpdatedDateTime", visibility=["read", "create", "update", "delete", "query"], format="rfc3339" + ) + """Date-time when the QnA was last updated.""" + + @overload + def __init__( + self, + *, + id: int, # pylint: disable=redefined-builtin + answer: Optional[str] = None, + source: Optional[str] = None, + questions: Optional[list[str]] = None, + metadata: Optional[dict[str, str]] = None, + dialog: Optional["_models.QnaDialog"] = None, + active_learning_suggestions: Optional[list["_models.SuggestedQuestionsCluster"]] = None, + last_updated_date_time: Optional[datetime.datetime] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class SuggestedQuestion(_Model): + """Question suggested by the Active Learning feature. + + :ivar question: Question suggested by the Active Learning feature. + :vartype question: str + :ivar user_suggested_count: The number of times the question was suggested explicitly by the + user. + :vartype user_suggested_count: int + :ivar auto_suggested_count: The number of times the question was suggested automatically by the + Active + Learning algorithm. + :vartype auto_suggested_count: int + """ + + question: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Question suggested by the Active Learning feature.""" + user_suggested_count: Optional[int] = rest_field( + name="userSuggestedCount", visibility=["read", "create", "update", "delete", "query"] + ) + """The number of times the question was suggested explicitly by the user.""" + auto_suggested_count: Optional[int] = rest_field( + name="autoSuggestedCount", visibility=["read", "create", "update", "delete", "query"] + ) + """The number of times the question was suggested automatically by the Active + Learning algorithm.""" + + @overload + def __init__( + self, + *, + question: Optional[str] = None, + user_suggested_count: Optional[int] = None, + auto_suggested_count: Optional[int] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class SuggestedQuestionsCluster(_Model): + """Active Learning suggested questions cluster details. + + :ivar cluster_head: Question chosen as the head of suggested questions cluster by Active + Learning + clustering algorithm. + :vartype cluster_head: str + :ivar suggested_questions: List of all suggested questions for the QnA. + :vartype suggested_questions: + list[~azure.ai.language.questionanswering.authoring.models.SuggestedQuestion] + """ + + cluster_head: Optional[str] = rest_field( + name="clusterHead", visibility=["read", "create", "update", "delete", "query"] + ) + """Question chosen as the head of suggested questions cluster by Active Learning + clustering algorithm.""" + suggested_questions: Optional[list["_models.SuggestedQuestion"]] = rest_field( + name="suggestedQuestions", visibility=["read", "create", "update", "delete", "query"] + ) + """List of all suggested questions for the QnA.""" + + @overload + def __init__( + self, + *, + cluster_head: Optional[str] = None, + suggested_questions: Optional[list["_models.SuggestedQuestion"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class SynonymAssets(_Model): + """Request payload for updating synonyms. + + :ivar value: The WordAlterations items on this page. Required. + :vartype value: list[~azure.ai.language.questionanswering.authoring.models.WordAlterations] + :ivar next_link: The link to the next page of items. + :vartype next_link: str + """ + + value: list["_models.WordAlterations"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The WordAlterations items on this page. Required.""" + next_link: Optional[str] = rest_field(name="nextLink", visibility=["read", "create", "update", "delete", "query"]) + """The link to the next page of items.""" + + @overload + def __init__( + self, + *, + value: list["_models.WordAlterations"], + next_link: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class UpdateQnaRecord(_Model): + """QnA record to be updated. + + :ivar op: Update operation type for assets. Required. Known values are: "add", "delete", and + "replace". + :vartype op: str or ~azure.ai.language.questionanswering.authoring.models.UpdateOperationKind + :ivar value: QnA record. Required. + :vartype value: ~azure.ai.language.questionanswering.authoring.models.QnaRecord + """ + + op: Union[str, "_models.UpdateOperationKind"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Update operation type for assets. Required. Known values are: \"add\", \"delete\", and + \"replace\".""" + value: "_models.QnaRecord" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """QnA record. Required.""" + + @overload + def __init__( + self, + *, + op: Union[str, "_models.UpdateOperationKind"], + value: "_models.QnaRecord", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class UpdateQnaSourceRecord(_Model): + """Update source record. + + :ivar display_name: Friendly name of the Source. + :vartype display_name: str + :ivar source: Unique source identifier. Name of the file if it's a 'file' source; otherwise, + the complete URL if it's a 'url' source. Required. + :vartype source: str + :ivar source_uri: URI location for the file or url. Required. + :vartype source_uri: str + :ivar source_kind: Supported source types. Required. Known values are: "file" and "url". + :vartype source_kind: str or ~azure.ai.language.questionanswering.authoring.models.SourceKind + :ivar content_structure_kind: Content structure type for sources. "unstructured" + :vartype content_structure_kind: str or + ~azure.ai.language.questionanswering.authoring.models.SourceContentStructureKind + :ivar refresh: Boolean flag used to refresh data from the Source. + :vartype refresh: bool + """ + + display_name: Optional[str] = rest_field( + name="displayName", visibility=["read", "create", "update", "delete", "query"] + ) + """Friendly name of the Source.""" + source: str = rest_field(visibility=["read", "create", "update"]) + """Unique source identifier. Name of the file if it's a 'file' source; otherwise, + the complete URL if it's a 'url' source. Required.""" + source_uri: str = rest_field(name="sourceUri", visibility=["read", "create", "update", "delete", "query"]) + """URI location for the file or url. Required.""" + source_kind: Union[str, "_models.SourceKind"] = rest_field( + name="sourceKind", visibility=["read", "create", "update", "delete", "query"] + ) + """Supported source types. Required. Known values are: \"file\" and \"url\".""" + content_structure_kind: Optional[Union[str, "_models.SourceContentStructureKind"]] = rest_field( + name="contentStructureKind", visibility=["read", "create", "update", "delete", "query"] + ) + """Content structure type for sources. \"unstructured\"""" + refresh: Optional[bool] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Boolean flag used to refresh data from the Source.""" + + @overload + def __init__( + self, + *, + source: str, + source_uri: str, + source_kind: Union[str, "_models.SourceKind"], + display_name: Optional[str] = None, + content_structure_kind: Optional[Union[str, "_models.SourceContentStructureKind"]] = None, + refresh: Optional[bool] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class UpdateSourceFiles(_Model): + """Collection of files containing project assets that need to be imported. + + :ivar files: Collection of files where the fileName is required. Required. + :vartype files: list[~azure.ai.language.questionanswering.authoring._utils.utils.FileType] + :ivar file_operations: Collection of file operations to perform during update of sources. + Required. + :vartype file_operations: + list[~azure.ai.language.questionanswering.authoring.models.UpdateSourceFromFileOperationRecord] + """ + + files: list[FileType] = rest_field( + visibility=["read", "create", "update", "delete", "query"], is_multipart_file_input=True + ) + """Collection of files where the fileName is required. Required.""" + file_operations: list["_models.UpdateSourceFromFileOperationRecord"] = rest_field( + name="fileOperations", visibility=["read", "create", "update", "delete", "query"] + ) + """Collection of file operations to perform during update of sources. Required.""" + + @overload + def __init__( + self, + *, + files: list[FileType], + file_operations: list["_models.UpdateSourceFromFileOperationRecord"], + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class UpdateSourceFromFileOperationRecord(_Model): + """A single file operation for updating sources for a project. Used to add, remove or replace a + source from a knowledgebase. + + :ivar operation: Update operation type for assets. Required. Known values are: "add", "delete", + and "replace". + :vartype operation: str or + ~azure.ai.language.questionanswering.authoring.models.UpdateOperationKind + :ivar file_display_name: Friendly name of the Source. If not provided, would be equal to + fileSource. + :vartype file_display_name: str + :ivar file_name: This would be name of the file. This links to the fileName of the file being + sent in multipart. Required. + :vartype file_name: str + :ivar refresh: Bool value for refresh. + :vartype refresh: bool + """ + + operation: Union[str, "_models.UpdateOperationKind"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Update operation type for assets. Required. Known values are: \"add\", \"delete\", and + \"replace\".""" + file_display_name: Optional[str] = rest_field( + name="fileDisplayName", visibility=["read", "create", "update", "delete", "query"] + ) + """Friendly name of the Source. If not provided, would be equal to fileSource.""" + file_name: str = rest_field(name="fileName", visibility=["read", "create", "update", "delete", "query"]) + """This would be name of the file. This links to the fileName of the file being sent in multipart. + Required.""" + refresh: Optional[bool] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Bool value for refresh.""" + + @overload + def __init__( + self, + *, + operation: Union[str, "_models.UpdateOperationKind"], + file_name: str, + file_display_name: Optional[str] = None, + refresh: Optional[bool] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class UpdateSourceRecord(_Model): + """Source to be updated. + + :ivar op: Update operation type for assets. Required. Known values are: "add", "delete", and + "replace". + :vartype op: str or ~azure.ai.language.questionanswering.authoring.models.UpdateOperationKind + :ivar value: Update source record. Required. + :vartype value: ~azure.ai.language.questionanswering.authoring.models.UpdateQnaSourceRecord + """ + + op: Union[str, "_models.UpdateOperationKind"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Update operation type for assets. Required. Known values are: \"add\", \"delete\", and + \"replace\".""" + value: "_models.UpdateQnaSourceRecord" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Update source record. Required.""" + + @overload + def __init__( + self, + *, + op: Union[str, "_models.UpdateOperationKind"], + value: "_models.UpdateQnaSourceRecord", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class WordAlterations(_Model): + """Collection of word alterations. + + :ivar alterations: Collection of word alterations. Required. + :vartype alterations: list[str] + """ + + alterations: list[str] = rest_field(visibility=["read", "create", "update"]) + """Collection of word alterations. Required.""" + + @overload + def __init__( + self, + *, + alterations: list[str], + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) diff --git a/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/azure/ai/language/questionanswering/authoring/models/_patch.py b/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/azure/ai/language/questionanswering/authoring/models/_patch.py new file mode 100644 index 000000000000..87676c65a8f0 --- /dev/null +++ b/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/azure/ai/language/questionanswering/authoring/models/_patch.py @@ -0,0 +1,21 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------- +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" + + +__all__: list[str] = [] # Add all objects you want publicly available to users at this package level + + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/azure/ai/language/questionanswering/authoring/py.typed b/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/azure/ai/language/questionanswering/authoring/py.typed new file mode 100644 index 000000000000..e5aff4f83af8 --- /dev/null +++ b/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/azure/ai/language/questionanswering/authoring/py.typed @@ -0,0 +1 @@ +# Marker file for PEP 561. \ No newline at end of file diff --git a/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/dev_requirements.txt b/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/dev_requirements.txt new file mode 100644 index 000000000000..0e53b6a72db5 --- /dev/null +++ b/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/dev_requirements.txt @@ -0,0 +1,3 @@ +-e ../../../eng/tools/azure-sdk-tools +../../core/azure-core +aiohttp \ No newline at end of file diff --git a/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/migration_guide_authoring.md b/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/migration_guide_authoring.md new file mode 100644 index 000000000000..1ec454346653 --- /dev/null +++ b/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/migration_guide_authoring.md @@ -0,0 +1,201 @@ +# Migration Guide: From Combined Package to Authoring-only Package + +This guide helps you migrate from the previous combined `azure-ai-language-questionanswering` package to the new **authoring‑only** package: `azure-ai-language-questionanswering-authoring`. + +## 1. Why the Split + +The original package contained both: + +* Runtime querying (answering questions against a deployed project) +* Authoring management (create / update / import / export / deploy projects, knowledge sources, QnAs, synonyms) + +Most users need only one side. After the split: + +* `azure-ai-language-questionanswering-authoring` focuses solely on project (authoring) management. +* A dedicated runtime package (or continued combined distribution) may follow separately. + +## 2. Package and Client Renames + +| Previous (Combined) | New Authoring Package | +|---------------------|-----------------------| +| Install: `azure-ai-language-questionanswering` | Install: `azure-ai-language-questionanswering-authoring` | +| Client: `AuthoringClient` | Client: `QuestionAnsweringAuthoringClient` | +| Async Client: `AuthoringClient` (aio) | Async: `QuestionAnsweringAuthoringClient` (aio) | + +Import paths remain under `azure.ai.language.questionanswering.authoring`, but the distribution name and client class changed for clarity. + +## 3. Installation + +```bash +pip uninstall azure-ai-language-questionanswering -y # if previously installed +pip install --pre azure-ai-language-questionanswering-authoring +``` + +From source (repo root): +```bash +pip install -e ./sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring +``` + +## 4. Quick Code Changes + +Old: + +```python +from azure.ai.language.questionanswering.authoring import AuthoringClient +client = AuthoringClient(endpoint, AzureKeyCredential(key)) +``` + +New: + +```python +from azure.ai.language.questionanswering.authoring import QuestionAnsweringAuthoringClient +client = QuestionAnsweringAuthoringClient(endpoint, AzureKeyCredential(key)) +``` + +Async: + +```python +from azure.ai.language.questionanswering.authoring.aio import QuestionAnsweringAuthoringClient +``` + + +## 5. Updated / Changed APIs + +Key changes from earlier previews or combined forms: + +| Area | Before | Now | +|------|--------|-----| +| Deployment poller result | Dict with metadata (assumed) | `None` (completion only) | +| Export poller result | URL payload (assumed) | `None` (no payload yet) | + +All other authoring method names (`create_project`, `begin_update_sources`, `begin_update_qnas`, `begin_deploy_project`, `list_projects`, `list_deployments`) remain. + +## 6. Long-Running Operations (LRO) Semantics + +Current preview LRO pollers return `None` from `.result()`. Treat `.result()` as a completion signal only. Affected operations: + +* `begin_update_sources` +* `begin_update_qnas` +* `begin_export` +* `begin_import_assets` +* `begin_deploy_project` + +Future versions may introduce typed results; avoid relying on payload shape. + +### Example (Import) + +```python +from azure.ai.language.questionanswering.authoring import QuestionAnsweringAuthoringClient, models as qa_models +from azure.core.credentials import AzureKeyCredential + +client = QuestionAnsweringAuthoringClient(endpoint, AzureKeyCredential(key)) +assets = qa_models.ImportJobOptions( + assets=qa_models.Assets( + qnas=[ + qa_models.ImportQnaRecord( + id=1, + answer="Example", + source="https://contoso/faq", + questions=["Example?"], + ) + ] + ) +) +poller = client.begin_import_assets(project_name="MyProject", body=assets, format="json") +poller.result() # None (just completion) +``` + + +## 7. Authentication Guidance + +Preferred: Azure Active Directory with `DefaultAzureCredential`. + +```python +from azure.identity import DefaultAzureCredential +from azure.ai.language.questionanswering.authoring import QuestionAnsweringAuthoringClient + +endpoint = "https://.cognitiveservices.azure.com" +credential = DefaultAzureCredential() +client = QuestionAnsweringAuthoringClient(endpoint, credential) +``` + +Fallback (resource key): + +```python +from azure.core.credentials import AzureKeyCredential +client = QuestionAnsweringAuthoringClient(endpoint, AzureKeyCredential(key)) +``` + +Ensure you use the custom subdomain endpoint for AAD (not a regional alias) so the challenge flow succeeds. + +## 8. Python & Dependencies + +* Python >= 3.9 +* Core dependencies: `azure-core`, `isodate`, (conditionally) `typing-extensions` (<3.11) + +Upgrade from Python 3.8 before installing. + +## 9. Environment Variables (Optional Convenience) + +| Variable | Purpose | +|----------|---------| +| `AZURE_QUESTIONANSWERING_ENDPOINT` | Resource endpoint | +| `AZURE_QUESTIONANSWERING_KEY` | Key credential (if not using AAD) | +| `AZURE_QUESTIONANSWERING_PROJECT` | Optional: default project name for samples | + + +## 10. Sample & Test Layout + +* `samples/` and `async_samples/` now contain **authoring-only** scenarios. +* Runtime question querying samples were removed from this package. +* Live test fixtures trimmed to authoring necessities only. + + +## 11. Backward / Rollback Strategy + +If you still need runtime querying before a dedicated runtime package is published, you can temporarily continue using the combined package version you had. Plan to migrate fully once the runtime split is available. + + +## 12. Synonyms, Sources, and QnAs Model-Based Updates + +Use model classes instead of raw dictionaries for clarity and forward compatibility: + +```python +from azure.ai.language.questionanswering.authoring import models as qa_models +client.begin_update_sources( + project_name="MyProject", + sources=[ + qa_models.UpdateSourceRecord( + op="add", + value=qa_models.UpdateQnaSourceRecord( + display_name="FAQSrc", + source="https://contoso.com/faq", + source_uri="https://contoso.com/faq", + source_kind="url", + content_structure_kind="unstructured", + refresh=False, + ), + ) + ], +).result() +``` + + +## 13. Known Preview Limitations + +| Limitation | Detail | +|-----------|--------| +| LRO result payloads | All `begin_*` currently return `None` from `.result()` | +| Export artifact access | No direct download URL surfaced yet | +| Metadata evolution | Model names/fields may still change before GA | + +## 15. Filing Issues + +Provide: + +* Old & new package versions +* Code snippet (minimal repro) +* Full stack trace (if exception) + +--- +Additional runtime migration notes will be added once the runtime package is published. diff --git a/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/pyproject.toml b/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/pyproject.toml new file mode 100644 index 000000000000..7e3628f87782 --- /dev/null +++ b/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/pyproject.toml @@ -0,0 +1,62 @@ +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +[build-system] +requires = ["setuptools>=77.0.3", "wheel"] +build-backend = "setuptools.build_meta" + +[project] +name = "azure-ai-language-questionanswering-authoring" +authors = [ + { name = "Microsoft Corporation", email = "azpysdkhelp@microsoft.com" }, +] +description = "Microsoft Corporation Azure Ai Language Questionanswering Authoring Client Library for Python" +license = "MIT" +classifiers = [ + "Development Status :: 4 - Beta", + "Programming Language :: Python", + "Programming Language :: Python :: 3 :: Only", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", +] +requires-python = ">=3.9" +keywords = ["azure", "azure sdk"] + +dependencies = [ + "isodate>=0.6.1", + "azure-core>=1.35.0", + "typing-extensions>=4.6.0", + "azure-ai-language-questionanswering>=2.0.0b1", +] +dynamic = [ +"version", "readme" +] + +[project.urls] +repository = "https://github.com/Azure/azure-sdk-for-python" + +[tool.setuptools.dynamic] +version = {attr = "azure.ai.language.questionanswering.authoring._version.VERSION"} +readme = {file = ["README.md", "CHANGELOG.md"], content-type = "text/markdown"} + +[tool.setuptools.packages.find] +exclude = [ + "tests*", + "samples*", + "doc*", + "azure", + "azure.ai", + "azure.ai.language", + "azure.ai.language.questionanswering", +] + +[tool.setuptools.package-data] +pytyped = ["py.typed"] diff --git a/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/samples/README.md b/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/samples/README.md new file mode 100644 index 000000000000..246d5e25931d --- /dev/null +++ b/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/samples/README.md @@ -0,0 +1,127 @@ +--- +page_type: sample +languages: + - python +products: + - azure + - azure-ai-language-service +name: Azure AI Language Question Answering Authoring Python samples +description: Code samples that demonstrate how to author (create, update, deploy, export/import) Question Answering projects using the azure-ai-language-questionanswering-authoring package. +--- + +# Azure AI Language Question Answering Authoring Samples + +These samples show how to use the `azure-ai-language-questionanswering-authoring` client library to manage (author) Question Answering projects: create a project, update sources and QnA pairs, manage synonyms, deploy, and export/import assets. They complement runtime (query) samples found in the separate `azure-ai-language-questionanswering` package. + +> NOTE: This library version is in preview; APIs and models may change before GA. + +## Prerequisites + +1. An Azure subscription. +2. An Azure AI Language resource with Question Answering enabled (custom subdomain endpoint recommended). +3. Python 3.9+. +4. Install the package: + - Release: `pip install --pre azure-ai-language-questionanswering-authoring` + - Dev/editable from repo root: `pip install -e .` (run inside the package directory) + Optional for AAD auth: `pip install azure-identity` + +## Environment variables + +Set the following (key auth shown below). If using AAD, set the identity environment variables (`AZURE_CLIENT_ID`, `AZURE_TENANT_ID`, `AZURE_CLIENT_SECRET`) instead of the key. + +| Variable | Required | Purpose | +|----------|----------|---------| +| `AZURE_QUESTIONANSWERING_ENDPOINT` | Yes | Your resource endpoint (`https://.cognitiveservices.azure.com/`). | +| `AZURE_QUESTIONANSWERING_KEY` | Yes (for key auth) | API key for the resource. | +| `AZURE_QUESTIONANSWERING_PROJECT` | Optional* | Existing project name used by export/import samples; created if absent in some scenarios. | + +Example (PowerShell): +```powershell +$env:AZURE_QUESTIONANSWERING_ENDPOINT="https://.cognitiveservices.azure.com/" +$env:AZURE_QUESTIONANSWERING_KEY="" +$env:AZURE_QUESTIONANSWERING_PROJECT="myProject" +``` + +## Sample list + +| File (sync) | Scenario | Key operations | Async equivalent | +|-------------|----------|----------------|------------------| +| `sample_create_and_deploy_project.py` | Create a project, add a knowledge source, deploy to `production` | `create_project`, `begin_update_sources`, `begin_deploy_project` | `async_samples/sample_create_and_deploy_project_async.py` | +| `sample_export_import_project.py` | Export an existing project then import as a new one | `begin_export`, `begin_import_assets` | `async_samples/sample_export_import_project_async.py` | +| `sample_update_knowledge_sources.py` | Batch modify sources, add QnA pairs, manage synonyms | `begin_update_sources`, `begin_update_qnas`, `update_synonyms` | `async_samples/sample_update_knowledge_sources_async.py` | + +All async samples mirror their sync counterparts and reside in `async_samples/`. + +## Running samples + +Navigate to this `samples` directory (paths shown PowerShell; adjust for your shell): + +Run a single sync sample: +```powershell +python sample_create_and_deploy_project.py +``` + +Run corresponding async sample: +```powershell +python async_samples/sample_create_and_deploy_project_async.py +``` + +Optionally run all samples with tox (from package root): +```powershell +tox run -e samples -c ../../../eng/tox/tox.ini --root . +``` + +If you see authentication errors: +- Verify endpoint spelling (must be the custom subdomain). +- For key auth: confirm the key matches the resource. +- For AAD: ensure the service principal has the appropriate Cognitive Services role. + +## Snippets & documentation extraction + +To surface code blocks in reference documentation or README files, enclose regions with: +```python +# [START my_region] +... code ... +# [END my_region] +``` +Keywords must be unique across sync + async samples. These markers allow Sphinx `literalinclude` + snippet tooling to pull consistent, tested code. +See the repository guide for details: `doc/dev/code_snippets.md`. + +## Long-running operations + +Authoring operations that start with `begin_` return an `LROPoller`. In the current preview, `.result()` returns `None` (no payload). Treat completion as success; poller exceptions surface service errors. + +## Cleanup + +Delete temporary or experimental projects to keep your resource tidy. Use the `begin_delete_project` LRO: +```python +from azure.core.credentials import AzureKeyCredential +from azure.ai.language.questionanswering.authoring import QuestionAnsweringAuthoringClient + +client = QuestionAnsweringAuthoringClient(endpoint, AzureKeyCredential(key)) +client.begin_delete_project(project_name="FAQ").result() # returns None on completion +``` +Remove exported JSON/TSV assets under `./ExportedProject` if no longer needed. + +## Next steps + +- Explore authoring samples in more depth (open each file for inline comments) +- Try runtime Q&A with the `azure-ai-language-questionanswering` package +- Read the authoring REST documentation +- Review the main Authoring client README for conceptual details + +## Troubleshooting quick notes + +- 401 / 403: Check key validity or AAD role assignment +- 404 on project: Ensure you created it (see create/deploy sample) +- Empty export file: Make sure the project has at least one QnA/source + +## Contributing + +Issues and contributions are welcome. See the repository `CONTRIBUTING.md` for guidelines. + + +[rest_authoring]: https://learn.microsoft.com/rest/api/language/question-answering-projects +[code_snippets_guide]: https://github.com/Azure/azure-sdk-for-python/blob/main/doc/dev/code_snippets.md +[authoring_readme]: https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/README.md +[runtime_package]: https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/cognitivelanguage/azure-ai-language-questionanswering/README.md diff --git a/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/samples/async_samples/sample_create_and_deploy_project_async.py b/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/samples/async_samples/sample_create_and_deploy_project_async.py new file mode 100644 index 000000000000..788f800fc8d0 --- /dev/null +++ b/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/samples/async_samples/sample_create_and_deploy_project_async.py @@ -0,0 +1,64 @@ +"""Async sample - Create and deploy a Question Answering authoring project.""" + +import os +import asyncio +from azure.core.credentials import AzureKeyCredential +from azure.ai.language.questionanswering.authoring.aio import QuestionAnsweringAuthoringClient +from azure.ai.language.questionanswering.authoring import models as _models + + +async def sample_create_and_deploy_project_async(): + # [START create_and_deploy_project] + endpoint = os.environ["AZURE_QUESTIONANSWERING_ENDPOINT"] + key = os.environ["AZURE_QUESTIONANSWERING_KEY"] + + client = QuestionAnsweringAuthoringClient(endpoint, AzureKeyCredential(key)) + async with client: + project_name = "IsaacNewton" + project = await client.create_project( + project_name=project_name, + options={ + "description": "Biography of Sir Isaac Newton", + "language": "en", + "multilingualResource": True, + "settings": {"defaultAnswer": "no answer"}, + }, + ) + print(f"Created project {project['projectName']}") + + print("Listing projects and confirming creation...") + async for p in client.list_projects(): + if p["projectName"] == project_name: + print(f"Found project {p['projectName']}") + + update_sources_poller = await client.begin_update_sources( + project_name=project_name, + sources=[ + _models.UpdateSourceRecord( + op="add", + value=_models.UpdateQnaSourceRecord( + display_name="Isaac Newton Bio", + source="https://wikipedia.org/wiki/Isaac_Newton", + source_uri="https://wikipedia.org/wiki/Isaac_Newton", + source_kind="url", + content_structure_kind="unstructured", + refresh=False, + ), + ) + ], + ) + await update_sources_poller.result() + print("Knowledge sources updated (1 URL added)") + + deployment_poller = await client.begin_deploy_project(project_name=project_name, deployment_name="production") + await deployment_poller.result() # completes; no payload + print("Deployment created: production") + + print("Project deployments:") + async for d in client.list_deployments(project_name=project_name): + print(f" {d['deploymentName']}") + # [END create_and_deploy_project] + + +if __name__ == "__main__": + asyncio.run(sample_create_and_deploy_project_async()) diff --git a/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/samples/async_samples/sample_export_import_project_async.py b/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/samples/async_samples/sample_export_import_project_async.py new file mode 100644 index 000000000000..a1f3258484af --- /dev/null +++ b/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/samples/async_samples/sample_export_import_project_async.py @@ -0,0 +1,42 @@ +"""Async sample - Export and import a Question Answering authoring project.""" + +import os +import asyncio +from azure.core.credentials import AzureKeyCredential +from azure.ai.language.questionanswering.authoring.aio import QuestionAnsweringAuthoringClient + + +async def sample_export_import_project_async(): + # [START export_import_project] + endpoint = os.environ["AZURE_QUESTIONANSWERING_ENDPOINT"] + key = os.environ["AZURE_QUESTIONANSWERING_KEY"] + project_name = os.environ["AZURE_QUESTIONANSWERING_PROJECT"] + + client = QuestionAnsweringAuthoringClient(endpoint, AzureKeyCredential(key)) + async with client: + export_format = "json" + export_poller = await client.begin_export(project_name=project_name, file_format=export_format) + await export_poller.result() # completes; no payload + # No export URL available from the poller result in current API; skipping download section. + minimal_assets = { + "assets": { + "qnas": [ + { + "id": 1, + "answer": "Placeholder answer for imported project.", + "source": "https://contoso.example/source", + "questions": ["Sample question?"], + } + ] + } + } + import_poller = await client.begin_import_assets( + project_name=project_name, body=minimal_assets, file_format="json" + ) + await import_poller.result() + print(f"Imported project as {project_name} (minimal assets)") + # [END export_import_project] + + +if __name__ == "__main__": + asyncio.run(sample_export_import_project_async()) diff --git a/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/samples/async_samples/sample_update_knowledge_sources_async.py b/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/samples/async_samples/sample_update_knowledge_sources_async.py new file mode 100644 index 000000000000..5fbc34812413 --- /dev/null +++ b/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/samples/async_samples/sample_update_knowledge_sources_async.py @@ -0,0 +1,82 @@ +"""Async sample - Update project knowledge sources and add QnAs & synonyms.""" + +import os +import asyncio +from azure.core.credentials import AzureKeyCredential +from azure.ai.language.questionanswering.authoring.aio import QuestionAnsweringAuthoringClient +from azure.ai.language.questionanswering.authoring import models as _models + + +async def sample_update_knowledge_sources_async(): + # [START update_knowledge_sources] + endpoint = os.environ["AZURE_QUESTIONANSWERING_ENDPOINT"] + key = os.environ["AZURE_QUESTIONANSWERING_KEY"] + + client = QuestionAnsweringAuthoringClient(endpoint, AzureKeyCredential(key)) + async with client: + project_name = "MicrosoftFAQProject" + await client.create_project( + project_name=project_name, + options={ + "description": "Test project for some Microsoft QnAs", + "language": "en", + "multilingualResource": True, + "settings": {"defaultAnswer": "no answer"}, + }, + ) + + sources_poller = await client.begin_update_sources( + project_name=project_name, + sources=[ + _models.UpdateSourceRecord( + op="add", + value=_models.UpdateQnaSourceRecord( + display_name="MicrosoftFAQ", + source="https://www.microsoft.com/en-in/software-download/faq", + source_uri="https://www.microsoft.com/en-in/software-download/faq", + source_kind="url", + content_structure_kind="unstructured", + refresh=False, + ), + ) + ], + ) + await sources_poller.result() + print("Knowledge source added (MicrosoftFAQ)") + + qna_poller = await client.begin_update_qnas( + project_name=project_name, + qnas=[ + _models.UpdateQnaRecord( + op="add", + value=_models.QnaRecord( + id=1, + questions=["What is the easiest way to use azure services in my .NET project?"], + answer="Using Microsoft's Azure SDKs", + source="manual", + ), + ) + ], + ) + await qna_poller.result() + print("QnA added (1 record)") + + await client.update_synonyms( + project_name=project_name, + synonyms=_models.SynonymAssets( + value=[ + _models.WordAlterations(alterations=["qnamaker", "qna maker"]), + _models.WordAlterations(alterations=["qna", "question and answer"]), + ] + ), + ) + synonyms = client.list_synonyms(project_name=project_name) + async for item in synonyms: + print("Synonyms group:") + for alt in item["alterations"]: + print(f" {alt}") + # [END update_knowledge_sources] + + +if __name__ == "__main__": + asyncio.run(sample_update_knowledge_sources_async()) diff --git a/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/samples/sample_create_and_deploy_project.py b/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/samples/sample_create_and_deploy_project.py new file mode 100644 index 000000000000..de1a97d75277 --- /dev/null +++ b/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/samples/sample_create_and_deploy_project.py @@ -0,0 +1,79 @@ +"""Sample - Create and deploy a Question Answering authoring project. + +This sample demonstrates how to: + * Create an authoring project + * Add a knowledge source + * Deploy the project + * List deployments + +Environment variables required: + * AZURE_QUESTIONANSWERING_ENDPOINT - endpoint of your Language resource + * AZURE_QUESTIONANSWERING_KEY - API key + +Run with: python sample_create_and_deploy_project.py +""" + +import os +from azure.core.credentials import AzureKeyCredential +from azure.ai.language.questionanswering.authoring import QuestionAnsweringAuthoringClient, models as _models + + +def sample_create_and_deploy_project(): + # [START create_and_deploy_project] + endpoint = os.environ["AZURE_QUESTIONANSWERING_ENDPOINT"] + key = os.environ["AZURE_QUESTIONANSWERING_KEY"] + + client = QuestionAnsweringAuthoringClient(endpoint, AzureKeyCredential(key)) + with client: + project_name = "IsaacNewton" + project = client.create_project( + project_name=project_name, + options={ + "description": "Biography of Sir Isaac Newton", + "language": "en", + "multilingualResource": True, + "settings": {"defaultAnswer": "no answer"}, + }, + ) + + print("Created project:") + print(f" name: {project['projectName']}") + print(f" language: {project['language']}") + print(f" description: {project['description']}") + + print("Listing projects and confirming creation...") + for p in client.list_projects(): + if p["projectName"] == project_name: + print(f"Found project {p['projectName']}") + + update_sources_poller = client.begin_update_sources( + project_name=project_name, + sources=[ + _models.UpdateSourceRecord( + op="add", + value=_models.UpdateQnaSourceRecord( + display_name="Isaac Newton Bio", + source="https://wikipedia.org/wiki/Isaac_Newton", # source id + source_uri="https://wikipedia.org/wiki/Isaac_Newton", + source_kind="url", + content_structure_kind="unstructured", + refresh=False, + ), + ) + ], + ) + update_sources_poller.result() # completes; no return payload + print("Knowledge sources updated (1 URL added)") + + deployment_poller = client.begin_deploy_project(project_name=project_name, deployment_name="production") + deployment_poller.result() # LRO completes; no deployment payload returned in current SDK + print("Deployment created: production") + + print("Project deployments:") + for d in client.list_deployments(project_name=project_name): + print(f" {d['deploymentName']} (lastDeployed: {d.get('lastDeployedDateTime', 'N/A')})") + # [END create_and_deploy_project] + + +if __name__ == "__main__": + sample_create_and_deploy_project() diff --git a/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/samples/sample_export_import_project.py b/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/samples/sample_export_import_project.py new file mode 100644 index 000000000000..da92154cc985 --- /dev/null +++ b/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/samples/sample_export_import_project.py @@ -0,0 +1,56 @@ +"""Sample - Export and import a Question Answering authoring project. + +Shows how to: + * Export an existing project to JSON + * Re-import the assets as a new project + +Environment variables required: + * AZURE_QUESTIONANSWERING_ENDPOINT + * AZURE_QUESTIONANSWERING_KEY + * AZURE_QUESTIONANSWERING_PROJECT - existing project to export + +Run with: python sample_export_import_project.py +""" + +import os +from azure.core.credentials import AzureKeyCredential +from azure.ai.language.questionanswering.authoring import QuestionAnsweringAuthoringClient + + +def sample_export_import_project(): + # [START export_import_project] + endpoint = os.environ["AZURE_QUESTIONANSWERING_ENDPOINT"] + key = os.environ["AZURE_QUESTIONANSWERING_KEY"] + project_name = os.environ["AZURE_QUESTIONANSWERING_PROJECT"] + + client = QuestionAnsweringAuthoringClient(endpoint, AzureKeyCredential(key)) + with client: + export_format = "json" + # Updated: parameter is now 'file_format', and LRO result is None (no metadata dict). + export_poller = client.begin_export(project_name=project_name, file_format=export_format) + export_poller.result() # completes; no result payload + # In the new API surface an export URL isn't returned via poller.result(); a separate + # retrieval step would be needed if/when service exposes it. This sample now focuses on + # demonstrating the LRO pattern only. + # For illustration, we skip downloading assets (no resultUrl available in current LRO shape). + # Import demonstration: provide minimal valid assets payload manually. + minimal_assets = { + "assets": { + "qnas": [ + { + "id": 1, + "answer": "Placeholder answer for imported project.", + "source": "https://contoso.example/source", + "questions": ["Sample question?"], + } + ] + } + } + import_poller = client.begin_import_assets(project_name=project_name, body=minimal_assets, file_format="json") + import_poller.result() # completes; no result payload + print(f"Imported project as {project_name} (minimal assets)") + # [END export_import_project] + + +if __name__ == "__main__": + sample_export_import_project() diff --git a/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/samples/sample_update_knowledge_sources.py b/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/samples/sample_update_knowledge_sources.py new file mode 100644 index 000000000000..cdeec7abca36 --- /dev/null +++ b/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/samples/sample_update_knowledge_sources.py @@ -0,0 +1,93 @@ +"""Sample - Update project knowledge sources and add QnAs & synonyms. + +Demonstrates: + * Creating a project + * Adding a URL knowledge source + * Adding QnA pairs + * Adding synonyms + +Environment variables required: + * AZURE_QUESTIONANSWERING_ENDPOINT + * AZURE_QUESTIONANSWERING_KEY + +Run with: python sample_update_knowledge_sources.py +""" + +import os +from azure.core.credentials import AzureKeyCredential +from azure.ai.language.questionanswering.authoring import QuestionAnsweringAuthoringClient, models as _models + + +def sample_update_knowledge_sources(): + # [START update_knowledge_sources] + endpoint = os.environ["AZURE_QUESTIONANSWERING_ENDPOINT"] + key = os.environ["AZURE_QUESTIONANSWERING_KEY"] + + client = QuestionAnsweringAuthoringClient(endpoint, AzureKeyCredential(key)) + with client: + project_name = "MicrosoftFAQProject" + client.create_project( + project_name=project_name, + options={ + "description": "Test project for some Microsoft QnAs", + "language": "en", + "multilingualResource": True, + "settings": {"defaultAnswer": "no answer"}, + }, + ) + + sources_poller = client.begin_update_sources( + project_name=project_name, + sources=[ + _models.UpdateSourceRecord( + op="add", + value=_models.UpdateQnaSourceRecord( + display_name="MicrosoftFAQ", + source="https://www.microsoft.com/en-in/software-download/faq", + source_uri="https://www.microsoft.com/en-in/software-download/faq", + source_kind="url", + content_structure_kind="unstructured", + refresh=False, + ), + ) + ], + ) + sources_poller.result() + print("Knowledge source added (MicrosoftFAQ)") + + qna_poller = client.begin_update_qnas( + project_name=project_name, + qnas=[ + _models.UpdateQnaRecord( + op="add", + value=_models.QnaRecord( + id=1, + questions=["What is the easiest way to use azure services in my .NET project?"], + answer="Using Microsoft's Azure SDKs", + source="manual", + ), + ) + ], + ) + qna_poller.result() + print("QnA added (1 record)") + + client.update_synonyms( + project_name=project_name, + synonyms=_models.SynonymAssets( + value=[ + _models.WordAlterations(alterations=["qnamaker", "qna maker"]), + _models.WordAlterations(alterations=["qna", "question and answer"]), + ] + ), + ) + synonyms = client.list_synonyms(project_name=project_name) + for item in synonyms: # ItemPaged + print("Synonyms group:") + for alt in item["alterations"]: + print(f" {alt}") + # [END update_knowledge_sources] + + +if __name__ == "__main__": + sample_update_knowledge_sources() diff --git a/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/sdk_packaging.toml b/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/sdk_packaging.toml new file mode 100644 index 000000000000..8d938a9ebff5 --- /dev/null +++ b/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/sdk_packaging.toml @@ -0,0 +1,41 @@ +[packaging] +auto_update = false + +[package] +name = "azure-ai-language-questionanswering-authoring" +display_name = "Azure AI Language Question Answering Authoring" +description = "Authoring client for Azure AI Language Question Answering (preview)." +keywords = ["azure", "cognitive services", "language", "question answering", "authoring"] +license = "MIT" +repository_url = "https://github.com/Azure/azure-sdk-for-python" +# First preview version; adjust if coordinating with broader release plan +version = "1.0.0b1" +# Minimum Python version (align with repo policy; many language packages now require >=3.8) +python_min = "3.9" + +[package.readme] +path = "README.md" + +[package.changelog] +path = "CHANGELOG.md" + +[package.namespace] +# Root import path +name = "azure.ai.language.questionanswering.authoring" + +[dependencies] +azure-core = ">=1.28.0" # aligned with setup.py requirement +isodate = ">=0.6.1" +typing-extensions = ">=4.0.1; python_version<'3.11'" + +[dev-dependencies] +azure-identity = ">=1.15.0" +pytest = "*" + +[build] +# Standard artifacts +artifacts = ["sdist", "wheel"] + +[metadata] +# Extra metadata fields consumed by doc generation or release tooling +service_name = "cognitivelanguage" diff --git a/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/tests/conftest.py b/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/tests/conftest.py new file mode 100644 index 000000000000..5c6c45fb15a3 --- /dev/null +++ b/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/tests/conftest.py @@ -0,0 +1,35 @@ +import pytest +from devtools_testutils.sanitizers import ( + add_header_regex_sanitizer, + add_oauth_response_sanitizer, +) + +ENV_ENDPOINT = "AZURE_QUESTIONANSWERING_ENDPOINT" +ENV_KEY = "AZURE_QUESTIONANSWERING_KEY" +ENV_PROJECT = "AZURE_QUESTIONANSWERING_PROJECT" + +TEST_ENDPOINT = "https://test-resource.cognitiveservices.azure.com/" +TEST_KEY = "0000000000000000" +TEST_PROJECT = "test-project" + + +@pytest.fixture(scope="session", autouse=True) +def add_sanitizers(test_proxy, environment_variables): # type: ignore[name-defined] + environment_variables.sanitize_batch( + { + ENV_ENDPOINT: TEST_ENDPOINT, + ENV_KEY: TEST_KEY, + ENV_PROJECT: TEST_PROJECT, + } + ) + add_oauth_response_sanitizer() + add_header_regex_sanitizer(key="Set-Cookie", value="[set-cookie;]") + + +@pytest.fixture(scope="session") +def qna_authoring_creds(environment_variables): # type: ignore[name-defined] + yield { + "endpoint": environment_variables.get(ENV_ENDPOINT), + "key": environment_variables.get(ENV_KEY), + "project": environment_variables.get(ENV_PROJECT), + } diff --git a/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/tests/helpers.py b/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/tests/helpers.py new file mode 100644 index 000000000000..9806e69dad4f --- /dev/null +++ b/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/tests/helpers.py @@ -0,0 +1,120 @@ +class AuthoringTestHelper: + """Utility helper for creating and exporting authoring test projects.""" + + @staticmethod + def create_test_project( + client, + project_name="IsaacNewton", + is_deployable=False, + add_sources=False, + get_export_url=False, + delete_old_project=False, + **kwargs, + ): + # Separate polling keyword (if present) from feature flags to avoid accidental binding confusions + polling_interval = kwargs.pop("polling_interval", None) + client.create_project( + project_name=project_name, + options={ + "description": "Biography of Sir Isaac Newton", + "language": "en", + "multilingualResource": True, + "settings": {"defaultAnswer": "no answer"}, + }, + ) + source_kwargs = {"polling_interval": polling_interval} if polling_interval is not None else {} + if is_deployable or add_sources: + AuthoringTestHelper.add_sources(client, project_name, **source_kwargs) + if get_export_url: + return AuthoringTestHelper.export_project( + client, project_name, delete_project=delete_old_project, **source_kwargs + ) + + @staticmethod + def add_sources(client, project_name, **kwargs): + poller = client.begin_update_sources( + project_name=project_name, + sources=[ + { + "op": "add", + "value": { + "displayName": "Isaac Newton Bio", + "sourceUri": "https://wikipedia.org/wiki/Isaac_Newton", + "sourceKind": "url", + }, + } + ], + **kwargs, + ) + poller.result() + + @staticmethod + def export_project(client, project_name, delete_project=True, **kwargs): + # begin_export poller is typed as LROPoller[None]; generator currently discards + # the final body so result() returns None. We only validate successful completion. + export_poller = client.begin_export(project_name=project_name, file_format="json", **kwargs) + export_poller.result() # ensure completion (raises on failure) + if delete_project: + delete_poller = client.begin_delete_project(project_name=project_name, **kwargs) + delete_poller.result() + # No export URL available due to None payload; caller should not depend on return value. + return None + + +class AuthoringAsyncTestHelper: + """Async utility helper for creating and exporting authoring test projects.""" + + @staticmethod + async def create_test_project( + client, + project_name="IsaacNewton", + is_deployable=False, + add_sources=False, + get_export_url=False, + delete_old_project=False, + **kwargs, + ): + polling_interval = kwargs.pop("polling_interval", None) + await client.create_project( + project_name=project_name, + options={ + "description": "Biography of Sir Isaac Newton", + "language": "en", + "multilingualResource": True, + "settings": {"defaultAnswer": "no answer"}, + }, + ) + source_kwargs = {"polling_interval": polling_interval} if polling_interval is not None else {} + if is_deployable or add_sources: + await AuthoringAsyncTestHelper.add_sources(client, project_name, **source_kwargs) + if get_export_url: + return await AuthoringAsyncTestHelper.export_project( + client, project_name, delete_project=delete_old_project, **source_kwargs + ) + + @staticmethod + async def add_sources(client, project_name, **kwargs): + poller = await client.begin_update_sources( + project_name=project_name, + sources=[ + { + "op": "add", + "value": { + "displayName": "Isaac Newton Bio", + "sourceUri": "https://wikipedia.org/wiki/Isaac_Newton", + "sourceKind": "url", + }, + } + ], + **kwargs, + ) + await poller.result() + + @staticmethod + async def export_project(client, project_name, delete_project=True, **kwargs): + export_poller = await client.begin_export(project_name=project_name, file_format="json", **kwargs) + await export_poller.result() + if delete_project: + delete_poller = await client.begin_delete_project(project_name=project_name, **kwargs) + await delete_poller.result() + return None diff --git a/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/tests/test_create_and_deploy_project.py b/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/tests/test_create_and_deploy_project.py new file mode 100644 index 000000000000..6a22ab86837c --- /dev/null +++ b/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/tests/test_create_and_deploy_project.py @@ -0,0 +1,58 @@ +# pylint: disable=line-too-long,useless-suppression +import pytest +from typing import Any, Dict, cast +from azure.core.credentials import AzureKeyCredential +from azure.ai.language.questionanswering.authoring import QuestionAnsweringAuthoringClient + +from helpers import AuthoringTestHelper +from testcase import QuestionAnsweringAuthoringTestCase + + +class TestCreateAndDeploy(QuestionAnsweringAuthoringTestCase): + def test_polling_interval(self, qna_authoring_creds): + client = QuestionAnsweringAuthoringClient( + qna_authoring_creds["endpoint"], AzureKeyCredential(qna_authoring_creds["key"]) + ) + # Default polling interval may change across previews; assert it is a positive int (previously 30) instead of a fixed legacy value + assert isinstance(client._config.polling_interval, int) and client._config.polling_interval > 0 + client = QuestionAnsweringAuthoringClient( + qna_authoring_creds["endpoint"], AzureKeyCredential(qna_authoring_creds["key"]), polling_interval=1 + ) + assert client._config.polling_interval == 1 + + def test_create_project(self, recorded_test, qna_authoring_creds): # type: ignore[name-defined] + client = QuestionAnsweringAuthoringClient( + qna_authoring_creds["endpoint"], AzureKeyCredential(qna_authoring_creds["key"]) + ) + project_name = "IsaacNewton" + client.create_project( + project_name=project_name, + options={ + "description": "Biography of Sir Isaac Newton", + "language": "en", + "multilingualResource": True, + "settings": {"defaultAnswer": "no answer"}, + }, + ) + found = any(p.get("projectName") == project_name for p in client.list_projects()) + assert found + + def test_deploy_project(self, recorded_test, qna_authoring_creds): # type: ignore[name-defined] + client = QuestionAnsweringAuthoringClient( + qna_authoring_creds["endpoint"], AzureKeyCredential(qna_authoring_creds["key"]) + ) + project_name = "IsaacNewton" + AuthoringTestHelper.create_test_project( + client, + project_name=project_name, + is_deployable=True, + polling_interval=0 if self.is_playback else None, + ) + deployment_poller = client.begin_deploy_project( + project_name=project_name, + deployment_name="production", + polling_interval=0 if self.is_playback else None, + ) + # Preview LRO returns None; just ensure it completes without error + deployment_poller.result() + assert any(d.get("deploymentName") == "production" for d in client.list_deployments(project_name=project_name)) diff --git a/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/tests/test_create_and_deploy_project_async.py b/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/tests/test_create_and_deploy_project_async.py new file mode 100644 index 000000000000..4385d4d3c4e9 --- /dev/null +++ b/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/tests/test_create_and_deploy_project_async.py @@ -0,0 +1,58 @@ +import pytest +from typing import Any, Dict, cast +from azure.core.credentials import AzureKeyCredential +from azure.ai.language.questionanswering.authoring.aio import QuestionAnsweringAuthoringClient + +from helpers import AuthoringAsyncTestHelper +from testcase import QuestionAnsweringAuthoringTestCase + + +class TestCreateAndDeployAsync(QuestionAnsweringAuthoringTestCase): + @pytest.mark.asyncio + async def test_create_project(self, recorded_test, qna_authoring_creds): # type: ignore[name-defined] + client = QuestionAnsweringAuthoringClient( + qna_authoring_creds["endpoint"], AzureKeyCredential(qna_authoring_creds["key"]) + ) + project_name = "IsaacNewton" + async with client: + await client.create_project( + project_name=project_name, + options={ + "description": "Biography of Sir Isaac Newton", + "language": "en", + "multilingualResource": True, + "settings": {"defaultAnswer": "no answer"}, + }, + ) + found = False + async for p in client.list_projects(): + if p.get("projectName") == project_name: + found = True + assert found + + @pytest.mark.asyncio + async def test_deploy_project(self, recorded_test, qna_authoring_creds): # type: ignore[name-defined] + client = QuestionAnsweringAuthoringClient( + qna_authoring_creds["endpoint"], AzureKeyCredential(qna_authoring_creds["key"]) + ) + project_name = "IsaacNewton" + async with client: + await AuthoringAsyncTestHelper.create_test_project( + client, + project_name=project_name, + is_deployable=True, + polling_interval=0 if self.is_playback else None, + ) + deployment_poller = await client.begin_deploy_project( + project_name=project_name, + deployment_name="production", + polling_interval=0 if self.is_playback else None, + ) + # Preview LRO returns None; just await completion + await deployment_poller.result() + deployments = client.list_deployments(project_name=project_name) + found = False + async for d in deployments: + if d.get("deploymentName") == "production": + found = True + assert found diff --git a/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/tests/test_export_import_project.py b/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/tests/test_export_import_project.py new file mode 100644 index 000000000000..69d2912feed5 --- /dev/null +++ b/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/tests/test_export_import_project.py @@ -0,0 +1,70 @@ +from azure.core.credentials import AzureKeyCredential +from azure.ai.language.questionanswering.authoring import QuestionAnsweringAuthoringClient, models as _models + +from helpers import AuthoringTestHelper +from testcase import QuestionAnsweringAuthoringTestCase + + +class TestExportAndImport(QuestionAnsweringAuthoringTestCase): + def test_export_project(self, recorded_test, qna_authoring_creds): # type: ignore[name-defined] + client = QuestionAnsweringAuthoringClient( + qna_authoring_creds["endpoint"], AzureKeyCredential(qna_authoring_creds["key"]) + ) + project_name = "IsaacNewton" + AuthoringTestHelper.create_test_project( + client, project_name=project_name, polling_interval=0 if self.is_playback else None + ) + export_poller = client.begin_export( + project_name=project_name, + file_format="json", + polling_interval=0 if self.is_playback else None, + ) + export_poller.result() # LROPoller[None]; ensure no exception + assert export_poller.done() + + def test_import_project(self, recorded_test, qna_authoring_creds): # type: ignore[name-defined] + client = QuestionAnsweringAuthoringClient( + qna_authoring_creds["endpoint"], AzureKeyCredential(qna_authoring_creds["key"]) + ) + project_name = "IsaacNewton" + # Create project without deleting it; we just need it present for import. + AuthoringTestHelper.create_test_project( + client, + project_name=project_name, + get_export_url=False, + delete_old_project=False, + polling_interval=0 if self.is_playback else None, + ) + # Wait briefly until project is visible (eventual consistency safeguard) + visible = any(p.get("projectName") == project_name for p in client.list_projects()) + if not visible: + import time + + for _ in range(5): + time.sleep(1) + if any(p.get("projectName") == project_name for p in client.list_projects()): + visible = True + break + assert visible, "Project not visible for import" + # Provide a minimal valid ImportJobOptions with one QnA to avoid empty list validation failure. + project_payload = _models.ImportJobOptions( + assets=_models.Assets( + qnas=[ + _models.ImportQnaRecord( + id=1, + answer="Gravity is a force of attraction.", + source="https://wikipedia.org/wiki/Isaac_Newton", + questions=["What is gravity?"], + ) + ] + ) + ) + import_poller = client.begin_import_assets( + project_name=project_name, + body=project_payload, + file_format="json", + polling_interval=0 if self.is_playback else None, + ) + import_poller.result() # LROPoller[None]; ensure completion + assert import_poller.done() + assert any(p.get("projectName") == project_name for p in client.list_projects()) diff --git a/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/tests/test_export_import_project_async.py b/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/tests/test_export_import_project_async.py new file mode 100644 index 000000000000..eb6836402720 --- /dev/null +++ b/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/tests/test_export_import_project_async.py @@ -0,0 +1,81 @@ +import pytest +import asyncio +from typing import Any, Dict, cast +from azure.ai.language.questionanswering.authoring import models as _models +from azure.core.credentials import AzureKeyCredential +from azure.ai.language.questionanswering.authoring.aio import QuestionAnsweringAuthoringClient + +from helpers import AuthoringAsyncTestHelper +from testcase import QuestionAnsweringAuthoringTestCase + + +class TestExportAndImportAsync(QuestionAnsweringAuthoringTestCase): + @pytest.mark.asyncio + async def test_export_project(self, recorded_test, qna_authoring_creds): # type: ignore[name-defined] + client = QuestionAnsweringAuthoringClient( + qna_authoring_creds["endpoint"], AzureKeyCredential(qna_authoring_creds["key"]) + ) + project_name = "IsaacNewton" + polling_interval = self.kwargs_for_polling.get("polling_interval") + async with client: + await AuthoringAsyncTestHelper.create_test_project( + client, project_name=project_name, polling_interval=polling_interval + ) + poller = await client.begin_export( + project_name=project_name, file_format="json", polling_interval=polling_interval + ) + await poller.result() # LROPoller[None]; ensure completion + assert poller.done() + + @pytest.mark.asyncio + async def test_import_project(self, recorded_test, qna_authoring_creds): # type: ignore[name-defined] + client = QuestionAnsweringAuthoringClient( + qna_authoring_creds["endpoint"], AzureKeyCredential(qna_authoring_creds["key"]) + ) + project_name = "IsaacNewton" + polling_interval = self.kwargs_for_polling.get("polling_interval") + async with client: + # For import, ensure project exists; do NOT delete it beforehand to avoid 404. + await AuthoringAsyncTestHelper.create_test_project( + client, + project_name=project_name, + get_export_url=False, + delete_old_project=False, + polling_interval=polling_interval, + ) + # Wait for project to be observable (eventual consistency) before import. + project_visible = False + for _ in range(5): + async for proj in client.list_projects(): + if proj.get("projectName") == project_name: + project_visible = True + break + if project_visible: + break + await asyncio.sleep(1) + assert project_visible, "Project not visible after creation" + + # Construct minimal valid ImportJobOptions (metadata fields are read-only server side; + # import focuses on assets. Provide an empty Assets object to exercise path.) + import_body = _models.ImportJobOptions( + assets=_models.Assets( + qnas=[ + _models.ImportQnaRecord( + id=1, + answer="Gravity is a force of attraction.", + source="https://wikipedia.org/wiki/Isaac_Newton", + questions=["What is gravity?"], + ) + ] + ) + ) + poller = await client.begin_import_assets( + project_name=project_name, body=import_body, file_format="json", polling_interval=polling_interval + ) + await poller.result() # LROPoller[None]; ensure completion + assert poller.done() + found = False + async for p in client.list_projects(): + if p.get("projectName") == project_name: + found = True + assert found diff --git a/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/tests/test_update_knowledge_sources.py b/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/tests/test_update_knowledge_sources.py new file mode 100644 index 000000000000..9c8e324e8288 --- /dev/null +++ b/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/tests/test_update_knowledge_sources.py @@ -0,0 +1,93 @@ +from azure.core.credentials import AzureKeyCredential +from azure.ai.language.questionanswering.authoring import QuestionAnsweringAuthoringClient +from azure.ai.language.questionanswering.authoring import models as _models +from typing import cast + +from helpers import AuthoringTestHelper +from testcase import QuestionAnsweringAuthoringTestCase + + +class TestSourcesQnasSynonyms(QuestionAnsweringAuthoringTestCase): + def test_add_source(self, recorded_test, qna_authoring_creds): # type: ignore[name-defined] + client = QuestionAnsweringAuthoringClient( + qna_authoring_creds["endpoint"], AzureKeyCredential(qna_authoring_creds["key"]) + ) + project_name = "IsaacNewton" + AuthoringTestHelper.create_test_project(client, project_name=project_name) + source_display_name = "MicrosoftFAQ" + update_source_ops = [ + _models.UpdateSourceRecord( + { + "op": "add", + "value": { + "displayName": source_display_name, + "source": "https://www.microsoft.com/en-in/software-download/faq", + "sourceUri": "https://www.microsoft.com/en-in/software-download/faq", + "sourceKind": "url", + "contentStructureKind": "unstructured", + "refresh": False, + }, + } + ) + ] + poller = client.begin_update_sources( + project_name=project_name, + sources=cast(list[_models.UpdateSourceRecord], update_source_ops), + content_type="application/json", + polling_interval=0 if self.is_playback else None, # type: ignore[arg-type] + ) + poller.result() + assert any(s.get("displayName") == source_display_name for s in client.list_sources(project_name=project_name)) + + def test_add_qna(self, recorded_test, qna_authoring_creds): # type: ignore[name-defined] + client = QuestionAnsweringAuthoringClient( + qna_authoring_creds["endpoint"], AzureKeyCredential(qna_authoring_creds["key"]) + ) + project_name = "IsaacNewton" + AuthoringTestHelper.create_test_project(client, project_name=project_name) + question = "What is the easiest way to use azure services in my .NET project?" + answer = "Using Microsoft's Azure SDKs" + update_qna_ops = [ + _models.UpdateQnaRecord( + { + "op": "add", + "value": { + "id": 0, # required by model schema; service will assign real id + "answer": answer, + "questions": [question], + }, + } + ) + ] + poller = client.begin_update_qnas( + project_name=project_name, + qnas=cast(list[_models.UpdateQnaRecord], update_qna_ops), + content_type="application/json", + polling_interval=0 if self.is_playback else None, # type: ignore[arg-type] + ) + poller.result() + assert any( + (q.get("answer") == answer and question in q.get("questions", [])) + for q in client.list_qnas(project_name=project_name) + ) + + def test_add_synonym(self, recorded_test, qna_authoring_creds): # type: ignore[name-defined] + client = QuestionAnsweringAuthoringClient( + qna_authoring_creds["endpoint"], AzureKeyCredential(qna_authoring_creds["key"]) + ) + project_name = "IsaacNewton" + AuthoringTestHelper.create_test_project(client, project_name=project_name) + synonyms_model = _models.SynonymAssets( + value=[ + _models.WordAlterations(alterations=["qnamaker", "qna maker"]), + ] + ) + client.update_synonyms( + project_name=project_name, + synonyms=synonyms_model, + content_type="application/json", + ) + assert any( + ("qnamaker" in s.get("alterations", []) and "qna maker" in s.get("alterations", [])) + for s in client.list_synonyms(project_name=project_name) + ) diff --git a/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/tests/test_update_knowledge_sources_async.py b/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/tests/test_update_knowledge_sources_async.py new file mode 100644 index 000000000000..a565ef5ffcb7 --- /dev/null +++ b/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/tests/test_update_knowledge_sources_async.py @@ -0,0 +1,111 @@ +import pytest +from typing import cast +from azure.core.credentials import AzureKeyCredential +from azure.ai.language.questionanswering.authoring.aio import QuestionAnsweringAuthoringClient +from azure.ai.language.questionanswering.authoring import models as _models + +from helpers import AuthoringAsyncTestHelper +from testcase import QuestionAnsweringAuthoringTestCase + + +class TestSourcesQnasSynonymsAsync(QuestionAnsweringAuthoringTestCase): + @pytest.mark.asyncio + async def test_add_source(self, recorded_test, qna_authoring_creds): # type: ignore[name-defined] + client = QuestionAnsweringAuthoringClient( + qna_authoring_creds["endpoint"], AzureKeyCredential(qna_authoring_creds["key"]) + ) + project_name = "IsaacNewton" + async with client: + await AuthoringAsyncTestHelper.create_test_project( + client, project_name=project_name, polling_interval=0 if self.is_playback else None + ) + update_source_ops = [ + _models.UpdateSourceRecord( + { + "op": "add", + "value": { + "displayName": "MicrosoftFAQ", + "source": "https://www.microsoft.com/en-in/software-download/faq", + "sourceUri": "https://www.microsoft.com/en-in/software-download/faq", + "sourceKind": "url", + "contentStructureKind": "unstructured", + "refresh": False, + }, + } + ) + ] + poller = await client.begin_update_sources( + project_name=project_name, + sources=cast(list[_models.UpdateSourceRecord], update_source_ops), + content_type="application/json", + polling_interval=0 if self.is_playback else None, # type: ignore[arg-type] + ) + await poller.result() + found = False + async for s in client.list_sources(project_name=project_name): + if s.get("displayName") == "MicrosoftFAQ": + found = True + assert found + + @pytest.mark.asyncio + async def test_add_qna(self, recorded_test, qna_authoring_creds): # type: ignore[name-defined] + client = QuestionAnsweringAuthoringClient( + qna_authoring_creds["endpoint"], AzureKeyCredential(qna_authoring_creds["key"]) + ) + project_name = "IsaacNewton" + async with client: + await AuthoringAsyncTestHelper.create_test_project( + client, project_name=project_name, polling_interval=0 if self.is_playback else None + ) + question = "What is the easiest way to use azure services in my .NET project?" + answer = "Using Microsoft's Azure SDKs" + update_qna_ops = [ + _models.UpdateQnaRecord( + { + "op": "add", + "value": { + "id": 0, + "answer": answer, + "questions": [question], + }, + } + ) + ] + poller = await client.begin_update_qnas( + project_name=project_name, + qnas=cast(list[_models.UpdateQnaRecord], update_qna_ops), + content_type="application/json", + polling_interval=0 if self.is_playback else None, # type: ignore[arg-type] + ) + await poller.result() + found = False + async for qna in client.list_qnas(project_name=project_name): + if qna.get("answer") == answer and question in qna.get("questions", []): + found = True + assert found + + @pytest.mark.asyncio + async def test_add_synonym(self, recorded_test, qna_authoring_creds): # type: ignore[name-defined] + client = QuestionAnsweringAuthoringClient( + qna_authoring_creds["endpoint"], AzureKeyCredential(qna_authoring_creds["key"]) + ) + project_name = "IsaacNewton" + async with client: + await AuthoringAsyncTestHelper.create_test_project( + client, project_name=project_name, polling_interval=0 if self.is_playback else None + ) + synonyms_model = _models.SynonymAssets( + value=[ + _models.WordAlterations(alterations=["qnamaker", "qna maker"]), + ] + ) + await client.update_synonyms( + project_name=project_name, + synonyms=synonyms_model, + content_type="application/json", + ) + found = False + async for s in client.list_synonyms(project_name=project_name): + if "qnamaker" in s.get("alterations", []) and "qna maker" in s.get("alterations", []): + found = True + assert found diff --git a/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/tests/testcase.py b/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/tests/testcase.py new file mode 100644 index 000000000000..a8fb6a4a8d00 --- /dev/null +++ b/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/tests/testcase.py @@ -0,0 +1,9 @@ +from devtools_testutils import AzureRecordedTestCase + + +class QuestionAnsweringAuthoringTestCase(AzureRecordedTestCase): + @property + def kwargs_for_polling(self): + if self.is_playback: + return {"polling_interval": 0} + return {} diff --git a/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/tsp-location.yaml b/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/tsp-location.yaml new file mode 100644 index 000000000000..d3b6720488c5 --- /dev/null +++ b/sdk/cognitivelanguage/azure-ai-language-questionanswering-authoring/tsp-location.yaml @@ -0,0 +1,5 @@ +directory: specification/cognitiveservices/data-plane/LanguageQuestionAnsweringAuthoring +commit: +repo: Azure/azure-rest-api-specs +additionalDirectories: [] + diff --git a/shared_requirements.txt b/shared_requirements.txt index 71f32df66427..b5e1b85f184a 100644 --- a/shared_requirements.txt +++ b/shared_requirements.txt @@ -95,4 +95,5 @@ langchain-azure-ai opentelemetry-exporter-otlp-proto-grpc agent-framework-core langchain -langchain-openai \ No newline at end of file +langchain-openai +azure-ai-language-questionanswering \ No newline at end of file