Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 3 additions & 2 deletions examples/.env
Original file line number Diff line number Diff line change
Expand Up @@ -16,8 +16,9 @@ VOYAGE_API_KEY=
REPLICATE_API_KEY=

# For using Ollama
OLLAMA_HOST_URL=
OLLAMA_MODEL=
OLLAMA_HOST_URL=http://localhost:11434
OLLAMA_LLM=llama3.2
OLLAMA_EMBEDDINGS=nomic-embed-text

# For using GPT on Azure
AZURE_OPENAI_BASEURL=
Expand Down
18 changes: 12 additions & 6 deletions examples/ollama/Ollama.md → examples/ollama/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -11,20 +11,26 @@ To get started with Ollama please check their [Quickstart guide](https://github.
To run the examples you will need to download [Llama 3.2](https://ollama.com/library/llama3.2)
and [nomic-embed-text](https://ollama.com/library/nomic-embed-text) models.

Once models are downloaded you can run them with
You can do this by running the following commands:
```bash
ollama run <model-name>
ollama pull llama3.2
ollama pull nomic-embed-text
```
for example

Then you can start the Ollama server by running:
```bash
ollama run llama3.2
ollama serve
```

#### Configuration
To run Ollama examples you will need to provide a OLLAMA_HOST_URL key in your env.local file.
### Configuration
By default, the examples expect Ollama to be run on `localhost:11434`, but you can customize this in your `.env.local`
file - as well as the models to be used:

For example
```bash
OLLAMA_HOST_URL=http://localhost:11434
OLLAMA_LLM=llama3.2
OLLAMA_EMBEDDINGS=nomic-embed-text
```

You can find more models in the [Ollama model library](https://ollama.com/library).
2 changes: 1 addition & 1 deletion examples/ollama/chat-llama.php
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
require_once dirname(__DIR__).'/bootstrap.php';

$platform = PlatformFactory::create(env('OLLAMA_HOST_URL'), http_client());
$model = new Ollama($_SERVER['OLLAMA_MODEL'] ?? '');
$model = new Ollama(env('OLLAMA_LLM'));

$messages = new MessageBag(
Message::forSystem('You are a helpful assistant.'),
Expand Down
2 changes: 1 addition & 1 deletion examples/ollama/embeddings.php
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@

$platform = PlatformFactory::create(env('OLLAMA_HOST_URL'), http_client());

$response = $platform->invoke(new Ollama(Ollama::NOMIC_EMBED_TEXT), <<<TEXT
$response = $platform->invoke(new Ollama(env('OLLAMA_EMBEDDINGS')), <<<TEXT
Once upon a time, there was a country called Japan. It was a beautiful country with a lot of mountains and rivers.
The people of Japan were very kind and hardworking. They loved their country very much and took care of it. The
country was very peaceful and prosperous. The people lived happily ever after.
Expand Down
2 changes: 1 addition & 1 deletion examples/ollama/indexer.php
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@

$platform = PlatformFactory::create(env('OLLAMA_HOST_URL'), http_client());
$store = new InMemoryStore();
$vectorizer = new Vectorizer($platform, $embeddings = new Ollama(Ollama::NOMIC_EMBED_TEXT), logger());;
$vectorizer = new Vectorizer($platform, $embeddings = new Ollama(env('OLLAMA_EMBEDDINGS')), logger());;
$indexer = new Indexer(
loader: new TextFileLoader(),
vectorizer: $vectorizer,
Expand Down
4 changes: 2 additions & 2 deletions examples/ollama/rag.php
Original file line number Diff line number Diff line change
Expand Up @@ -43,11 +43,11 @@

// create embeddings for documents
$platform = PlatformFactory::create(env('OLLAMA_HOST_URL'), http_client());
$vectorizer = new Vectorizer($platform, $embeddings = new Ollama(Ollama::NOMIC_EMBED_TEXT), logger());
$vectorizer = new Vectorizer($platform, $embeddings = new Ollama(env('OLLAMA_EMBEDDINGS')), logger());
$indexer = new Indexer(new InMemoryLoader($documents), $vectorizer, $store, logger: logger());
$indexer->index($documents);

$model = new Ollama();
$model = new Ollama(env('OLLAMA_LLM'));

$similaritySearch = new SimilaritySearch($vectorizer, $store);
$toolbox = new Toolbox([$similaritySearch], logger: logger());
Expand Down
2 changes: 1 addition & 1 deletion examples/ollama/stream.php
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
require_once dirname(__DIR__).'/bootstrap.php';

$platform = PlatformFactory::create(env('OLLAMA_HOST_URL'), http_client());
$model = new Ollama(Ollama::LLAMA_3_2);
$model = new Ollama(env('OLLAMA_LLM'));

$messages = new MessageBag(
Message::forSystem('You are a helpful assistant.'),
Expand Down
2 changes: 1 addition & 1 deletion examples/ollama/structured-output-math.php
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
require_once dirname(__DIR__).'/bootstrap.php';

$platform = PlatformFactory::create(env('OLLAMA_HOST_URL'), http_client());
$model = new Ollama(Ollama::LLAMA_3_2);
$model = new Ollama(env('OLLAMA_LLM'));

$processor = new AgentProcessor();
$agent = new Agent($platform, $model, [$processor], [$processor], logger: logger());
Expand Down
2 changes: 1 addition & 1 deletion examples/ollama/toolcall.php
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
require_once dirname(__DIR__).'/bootstrap.php';

$platform = PlatformFactory::create(env('OLLAMA_HOST_URL'), http_client());
$model = new Ollama(Ollama::LLAMA_3_2);
$model = new Ollama(env('OLLAMA_LLM'));

$toolbox = new Toolbox([new Clock()], logger: logger());
$processor = new AgentProcessor($toolbox);
Expand Down
Loading