Skip to content

Commit

Permalink
Fix vertex ai import error.
Browse files Browse the repository at this point in the history
  • Loading branch information
eli64s committed Mar 1, 2024
1 parent 4184e2e commit 369af3a
Show file tree
Hide file tree
Showing 3 changed files with 11 additions and 7 deletions.
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api"

[tool.poetry]
name = "readmeai"
version = "0.5.065"
version = "0.5.066"
description = "👾 Automated README file generator, powered by LLM APIs."
authors = ["Eli <egsalamie@gmail.com>"]
license = "MIT"
Expand Down
2 changes: 1 addition & 1 deletion readmeai/config/settings/config.toml
Original file line number Diff line number Diff line change
Expand Up @@ -148,7 +148,7 @@ quickstart = """
## 🚀 Getting Started
**System Requirements**
**System Requirements:**
* {system_requirements}
Expand Down
14 changes: 9 additions & 5 deletions readmeai/models/vertex.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,10 @@
stop_after_attempt,
wait_exponential,
)
from vertexai.generative_models import GenerationConfig, GenerativeModel
from vertexai.preview.generative_models import (
GenerationConfig,
GenerativeModel,
)

from readmeai.config.settings import ConfigLoader
from readmeai.core.models import BaseModelHandler
Expand All @@ -31,11 +34,11 @@ def __init__(self, config_loader: ConfigLoader) -> None:

def _model_settings(self):
"""Initializes the Vertex AI LLM settings."""
self.location = os.environ.get("VERTEXAI_LOCATION")
self.project_id = os.environ.get("VERTEXAI_PROJECT")
self.temperature = self.config.llm.temperature
self.tokens = self.config.llm.tokens
self.top_p = self.config.llm.top_p
self.location = os.environ.get("VERTEXAI_LOCATION")
self.project_id = os.environ.get("VERTEXAI_PROJECT")
vertexai.init(location=self.location, project=self.project_id)
self.model = GenerativeModel(self.config.llm.model)

Expand Down Expand Up @@ -78,8 +81,9 @@ async def _make_request(
prompt,
generation_config=data,
)
self._logger.info(f"Response for '{index}':\n{response.text}")
return index, clean_response(index, response.text)
response_text = response.text
self._logger.info(f"Response for '{index}':\n{response_text}")
return index, clean_response(index, response_text)

except (
aiohttp.ClientError,
Expand Down

0 comments on commit 369af3a

Please sign in to comment.