Skip to content

Commit

Permalink
feat(llama-index): migrate to v0.10.x (#331)
Browse files Browse the repository at this point in the history
Ref: #330
  • Loading branch information
Tomas2D committed Mar 1, 2024
1 parent 9a01a78 commit 59fe675
Show file tree
Hide file tree
Showing 6 changed files with 311 additions and 228 deletions.
3 changes: 1 addition & 2 deletions examples/extensions/llama_index/llama_index_llm.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,7 @@
"""Use a model through LLamaIndex"""

from dotenv import load_dotenv
from llama_index.llms.base import ChatMessage
from llama_index.llms.types import MessageRole
from llama_index.core.llms import ChatMessage, MessageRole

from genai import Client
from genai.credentials import Credentials
Expand Down
508 changes: 294 additions & 214 deletions poetry.lock

Large diffs are not rendered by default.

4 changes: 2 additions & 2 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ langchain-core = { version = "^0.1.0", optional = true }
pyyaml = { version = "^6.0.0", optional = true }
datasets = { version = "^2.13.0", optional = true }
transformers = { version = "^4.33.3", optional = true, extras=["agents"] }
llama-index = { version = "^0.9.15", optional = true }
llama-index-core = { version = "^0.10.0", optional = true }
uvicorn = { version = "^0.22.0", optional = true }
fastapi = { version = "^0.100.0", optional = true }

Expand Down Expand Up @@ -110,7 +110,7 @@ sqlalchemy = "^2.0.25"
[tool.poetry.extras]
langchain = ["langchain-core", "pyyaml"]
huggingface = ["datasets", "transformers"]
llama-index = ["llama-index"]
llama-index = ["llama-index-core"]
localserver = ["uvicorn", "fastapi"]

[tool.pytest.ini_options]
Expand Down
2 changes: 1 addition & 1 deletion src/genai/extensions/llama_index/embeddings.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
from genai.text.embedding.embedding_service import CreateExecutionOptions

try:
from llama_index.embeddings.base import BaseEmbedding, Embedding
from llama_index.core.base.embeddings.base import BaseEmbedding, Embedding
except ImportError:
raise ImportError("Could not import llamaindex: Please install ibm-generative-ai[llama-index] extension.") # noqa: B904

Expand Down
11 changes: 5 additions & 6 deletions src/genai/extensions/llama_index/llm.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,8 +24,9 @@


try:
from llama_index.callbacks import CallbackManager
from llama_index.llms.base import (
from llama_index.core.callbacks import CallbackManager
from llama_index.core.llms import (
LLM,
ChatMessage,
ChatResponse,
ChatResponseAsyncGen,
Expand All @@ -34,11 +35,9 @@
CompletionResponseAsyncGen,
CompletionResponseGen,
LLMMetadata,
llm_chat_callback,
llm_completion_callback,
MessageRole,
)
from llama_index.llms.llm import LLM
from llama_index.llms.types import MessageRole
from llama_index.core.llms.callbacks import llm_chat_callback, llm_completion_callback


except ImportError:
Expand Down
11 changes: 8 additions & 3 deletions tests/integration/extensions/test_llama_index.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,12 @@
import pytest
from llama_index.llms.base import ChatMessage, CompletionResponse
from llama_index.llms.base import ChatResponse as LlamaIndexChatResponse
from llama_index.llms.types import MessageRole
from llama_index.core.llms import (
ChatMessage,
CompletionResponse,
MessageRole,
)
from llama_index.core.llms import (
ChatResponse as LlamaIndexChatResponse,
)

from genai.extensions._common.utils import create_generation_info_from_response
from genai.extensions.llama_index import IBMGenAILlamaIndex
Expand Down

0 comments on commit 59fe675

Please sign in to comment.