Skip to content

Commit c99feba

Browse files
committed
feat: add with-ollama example
1 parent 46597cf commit c99feba

File tree

5 files changed

+146
-0
lines changed

5 files changed

+146
-0
lines changed

examples/with-ollama/.env.example

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,2 @@
1+
# Ollama host configuration (default points to the local daemon)
2+
OLLAMA_HOST=http://localhost:11434/api

examples/with-ollama/README.md

Lines changed: 51 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,51 @@
1+
# VoltAgent + Ollama Example
2+
3+
This sample shows how VoltAgent uses `ollama-ai-provider-v2` with a Zod-powered tool. The `get_current_weather` tool demonstrates that Ollama receives the JSON Schema definition and calls it with the correct arguments.
4+
5+
## Requirements
6+
7+
- [Ollama](https://ollama.com/download) must be installed and running.
8+
- Pull a tool-calling capable model such as `llama3.2`:
9+
10+
```bash
11+
ollama pull llama3.2
12+
```
13+
14+
- (Optional) Point to a custom Ollama host via `.env`:
15+
16+
```env
17+
OLLAMA_HOST=http://localhost:11434/api
18+
```
19+
20+
The example defaults to `http://localhost:11434/api`.
21+
22+
## Install
23+
24+
```bash
25+
pnpm install
26+
```
27+
28+
> Inside the monorepo you can scope it with `pnpm install --filter voltagent-example-with-ollama...`.
29+
30+
## Run
31+
32+
```bash
33+
pnpm start
34+
```
35+
36+
By default the HTTP server listens on port `3144`. Set the `PORT` environment variable if you need to change it.
37+
38+
### Calling the Agent
39+
40+
Send a request to VoltAgent’s REST API once the server is running:
41+
42+
```bash
43+
curl -X POST http://localhost:3144/agents/ollama-tool-agent/text \
44+
-H "Content-Type: application/json" \
45+
-d '{"input":"What is the weather in Madrid right now?"}'
46+
```
47+
48+
You should see:
49+
50+
1. The server response containing the model’s answer.
51+
2. Logs in the console showing `Weather tool invoked`, which confirms that Ollama received the JSON Schema definition and triggered the tool with the expected parameters.

examples/with-ollama/package.json

Lines changed: 32 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,32 @@
1+
{
2+
"name": "voltagent-example-with-ollama",
3+
"private": true,
4+
"type": "module",
5+
"license": "MIT",
6+
"keywords": [
7+
"agent",
8+
"ai",
9+
"voltagent",
10+
"ollama"
11+
],
12+
"scripts": {
13+
"build": "tsc",
14+
"dev": "tsx watch --env-file=.env ./src/index.ts",
15+
"start": "tsx --env-file=.env ./src/index.ts",
16+
"volt": "volt"
17+
},
18+
"dependencies": {
19+
"@voltagent/cli": "^0.1.15",
20+
"@voltagent/core": "^1.2.1",
21+
"@voltagent/logger": "^1.0.4",
22+
"@voltagent/server-hono": "^1.2.2",
23+
"ai": "^5.0.76",
24+
"ollama-ai-provider-v2": "^1.5.3",
25+
"zod": "^3.25.76"
26+
},
27+
"devDependencies": {
28+
"@types/node": "^24.2.1",
29+
"tsx": "^4.19.3",
30+
"typescript": "^5.8.2"
31+
}
32+
}

examples/with-ollama/src/index.ts

Lines changed: 47 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,47 @@
1+
import { Agent, VoltAgent, createTool } from "@voltagent/core";
2+
import { createPinoLogger } from "@voltagent/logger";
3+
import { honoServer } from "@voltagent/server-hono";
4+
import { createOllama } from "ollama-ai-provider-v2";
5+
import { z } from "zod";
6+
7+
const logger = createPinoLogger({
8+
name: "with-ollama",
9+
level: "info",
10+
});
11+
12+
const ollama = createOllama({
13+
baseURL: process.env.OLLAMA_HOST ?? "http://localhost:11434/api",
14+
});
15+
16+
const getCurrentWeather = createTool({
17+
name: "get_current_weather",
18+
description: "Fetch the current weather conditions for a given city",
19+
parameters: z.object({
20+
location: z.string().describe("City or location to inspect"),
21+
unit: z.enum(["celsius", "fahrenheit"]).default("celsius"),
22+
}),
23+
execute: async ({ location, unit }) => {
24+
return {
25+
location,
26+
temperature: unit === "fahrenheit" ? 72 : 22,
27+
condition: "Partly cloudy with light winds",
28+
unit,
29+
};
30+
},
31+
});
32+
33+
const agent = new Agent({
34+
name: "ollama-tool-agent",
35+
instructions: "You are a helpful assistant",
36+
model: ollama("llama3.2:latest"),
37+
tools: [getCurrentWeather],
38+
logger,
39+
});
40+
41+
new VoltAgent({
42+
agents: {
43+
agent,
44+
},
45+
logger,
46+
server: honoServer(),
47+
});

examples/with-ollama/tsconfig.json

Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,14 @@
1+
{
2+
"compilerOptions": {
3+
"target": "ES2022",
4+
"module": "NodeNext",
5+
"moduleResolution": "NodeNext",
6+
"esModuleInterop": true,
7+
"forceConsistentCasingInFileNames": true,
8+
"strict": true,
9+
"outDir": "dist",
10+
"skipLibCheck": true
11+
},
12+
"include": ["src"],
13+
"exclude": ["node_modules", "dist"]
14+
}

0 commit comments

Comments
 (0)