Skip to content

Commit

Permalink
add docstrings to some base index methods so that they show up in api… (
Browse files Browse the repository at this point in the history
  • Loading branch information
logan-markewich committed May 7, 2024
1 parent 83c8e78 commit 42deb58
Showing 1 changed file with 19 additions and 0 deletions.
19 changes: 19 additions & 0 deletions llama-index-core/llama_index/core/indices/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -387,6 +387,11 @@ def as_retriever(self, **kwargs: Any) -> BaseRetriever:
def as_query_engine(
self, llm: Optional[LLMType] = None, **kwargs: Any
) -> BaseQueryEngine:
"""Convert the index to a query engine.
Calls `index.as_retriever(**kwargs)` to get the retriever and then wraps it in a
`RetrieverQueryEngine.from_args(retriever, **kwrags)` call.
"""
# NOTE: lazy import
from llama_index.core.query_engine.retriever_query_engine import (
RetrieverQueryEngine,
Expand All @@ -411,6 +416,20 @@ def as_chat_engine(
llm: Optional[LLMType] = None,
**kwargs: Any,
) -> BaseChatEngine:
"""Convert the index to a chat engine.
Calls `index.as_query_engine(llm=llm, **kwargs)` to get the query engine and then
wraps it in a chat engine based on the chat mode.
Chat modes:
- `ChatMode.BEST` (default): Chat engine that uses an agent (react or openai) with a query engine tool
- `ChatMode.CONTEXT`: Chat engine that uses a retriever to get context
- `ChatMode.CONDENSE_QUESTION`: Chat engine that condenses questions
- `ChatMode.CONDENSE_PLUS_CONTEXT`: Chat engine that condenses questions and uses a retriever to get context
- `ChatMode.SIMPLE`: Simple chat engine that uses the LLM directly
- `ChatMode.REACT`: Chat engine that uses a react agent with a query engine tool
- `ChatMode.OPENAI`: Chat engine that uses an openai agent with a query engine tool
"""
service_context = kwargs.get("service_context", self.service_context)

if service_context is not None:
Expand Down

0 comments on commit 42deb58

Please sign in to comment.