Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -1865,7 +1865,7 @@ def __init__(self, **kwargs: Any) -> None:


class SearchScoreThreshold(VectorThreshold):
"""The results of the vector query will filter based on the '@search.score' value. Note this is
"""The results of the vector query will filter based on the '\\@search.score' value. Note this is
the @search.score returned as part of the search response. The threshold direction will be
chosen for higher @search.score.

Expand All @@ -1874,7 +1874,7 @@ class SearchScoreThreshold(VectorThreshold):
:ivar kind: The kind of threshold used to filter vector queries. Required. Known values are:
"vectorSimilarity" and "searchScore".
:vartype kind: str or ~azure.search.documents.models.VectorThresholdKind
:ivar value: The threshold will filter based on the '@search.score' value. Note this is the
:ivar value: The threshold will filter based on the '\\@search.score' value. Note this is the
@search.score returned as part of the search response. The threshold direction will be chosen
for higher @search.score. Required.
:vartype value: float
Expand All @@ -1892,7 +1892,7 @@ class SearchScoreThreshold(VectorThreshold):

def __init__(self, *, value: float, **kwargs: Any) -> None:
"""
:keyword value: The threshold will filter based on the '@search.score' value. Note this is the
:keyword value: The threshold will filter based on the '\\@search.score' value. Note this is the
@search.score returned as part of the search response. The threshold direction will be chosen
for higher @search.score. Required.
:paramtype value: float
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -419,6 +419,6 @@ class VectorThresholdKind(str, Enum, metaclass=CaseInsensitiveEnumMeta):
threshold direction (larger or smaller) will be chosen automatically according to the metric
used by the field."""
SEARCH_SCORE = "searchScore"
"""The results of the vector query will filter based on the '@search.score' value. Note this is
"""The results of the vector query will filter based on the '\\@search.score' value. Note this is
the @search.score returned as part of the search response. The threshold direction will be
chosen for higher @search.score."""
Original file line number Diff line number Diff line change
Expand Up @@ -690,7 +690,7 @@ def index_documents(self, batch: IndexDocumentsBatch, **kwargs: Any) -> List[Ind
:return: List of IndexingResult
:rtype: list[IndexingResult]

:raises ~azure.search.documents.RequestEntityTooLargeError
:raises ~azure.search.documents.RequestEntityTooLargeError: The request is too large.
"""
return self._index_documents_actions(actions=batch.actions, **kwargs)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -271,7 +271,7 @@ def index_documents(self, batch: IndexDocumentsBatch, **kwargs) -> List[Indexing
:return: Indexing result of each action in the batch.
:rtype: list[IndexingResult]

:raises ~azure.search.documents.RequestEntityTooLargeError
:raises ~azure.search.documents.RequestEntityTooLargeError: The request is too large.
"""
return self._index_documents_actions(actions=batch.actions, **kwargs)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -687,7 +687,7 @@ async def index_documents(self, batch: IndexDocumentsBatch, **kwargs: Any) -> Li
:return: List of IndexingResult
:rtype: list[IndexingResult]

:raises ~azure.search.documents.RequestEntityTooLargeError
:raises ~azure.search.documents.RequestEntityTooLargeError: The request is too large.
"""
return await self._index_documents_actions(actions=batch.actions, **kwargs)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -597,7 +597,7 @@ def create_skillset(self, skillset: SearchIndexerSkillset, **kwargs: Any) -> Sea
_validate_skillset(skillset)
skillset_gen = skillset._to_generated() if hasattr(skillset, "_to_generated") else skillset

result = self._client.skillsets.create(skillset_gen, **kwargs)
result = self._client.skillsets.create(skillset_gen, **kwargs) # type: ignore
return cast(SearchIndexerSkillset, SearchIndexerSkillset._from_generated(result))

@distributed_trace
Expand Down Expand Up @@ -635,7 +635,7 @@ def create_or_update_skillset(

result = self._client.skillsets.create_or_update(
skillset_name=skillset.name,
skillset=skillset_gen,
skillset=skillset_gen, # type: ignore
prefer="return=representation",
error_map=error_map,
skip_indexer_reset_requirement_for_cache=skip_indexer_reset_requirement_for_cache,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -584,7 +584,7 @@ async def create_skillset(self, skillset: SearchIndexerSkillset, **kwargs: Any)
"""
kwargs["headers"] = self._merge_client_headers(kwargs.get("headers"))
skillset_gen = skillset._to_generated() if hasattr(skillset, "_to_generated") else skillset
result = await self._client.skillsets.create(skillset_gen, **kwargs)
result = await self._client.skillsets.create(skillset_gen, **kwargs) # type: ignore
return cast(SearchIndexerSkillset, SearchIndexerSkillset._from_generated(result))

@distributed_trace_async
Expand Down Expand Up @@ -621,7 +621,7 @@ async def create_or_update_skillset(

result = await self._client.skillsets.create_or_update(
skillset_name=skillset.name,
skillset=skillset_gen,
skillset=skillset_gen, # type: ignore
prefer="return=representation",
error_map=error_map,
skip_indexer_reset_requirement_for_cache=skip_indexer_reset_requirement_for_cache,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -98,7 +98,7 @@ def __init__(
self.e_tag = e_tag
self.encryption_key = encryption_key

def _to_generated(self):
def _to_generated(self) -> _SearchIndexerSkillset:
generated_skills = []
for skill in self.skills:
if hasattr(skill, "_to_generated"):
Expand All @@ -108,7 +108,7 @@ def _to_generated(self):
assert len(generated_skills) == len(self.skills)
encryption_key = getattr(self, "encryption_key", None)
return _SearchIndexerSkillset(
name=getattr(self, "name", None),
name=getattr(self, "name", ""),
description=getattr(self, "description", None),
skills=generated_skills,
cognitive_services_account=getattr(self, "cognitive_services_account", None),
Expand Down Expand Up @@ -1153,7 +1153,7 @@ def _from_generated(cls, search_indexer_data_source) -> Optional[Self]:
if not search_indexer_data_source:
return None
connection_string = (
search_indexer_data_source.credentials.connection_string if search_indexer_data_source.credentials else None
search_indexer_data_source.credentials.connection_string if search_indexer_data_source.credentials else ""
)
return cls(
name=search_indexer_data_source.name,
Expand Down