Skip to content

Commit 1ab2c7c

Browse files
committed
feat: implement ollama job runner and chat functionality
1 parent 9987500 commit 1ab2c7c

6 files changed

Lines changed: 123 additions & 2 deletions

File tree

Lines changed: 17 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,17 @@
1+
import { ollama } from "../src";
2+
3+
const models = await ollama().models().run();
4+
5+
console.log(models);
6+
7+
const response = await ollama()
8+
.chat(models[0].name)
9+
.messages([
10+
{
11+
role: "user",
12+
content: "What is the capital of France?",
13+
},
14+
])
15+
.run();
16+
17+
console.log(response);
Lines changed: 15 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,15 @@
1+
import type { Options } from "~/src/job/schema";
2+
import { ChatBuilder } from "~/src/builder/chat";
3+
import { ModelsBuilder } from "~/src/builder/models";
4+
import { runner } from "~/src/job/ollama";
5+
6+
export function ollama(options?: Options) {
7+
return {
8+
chat(model: string) {
9+
return new ChatBuilder("ollama" as const, options, runner, model);
10+
},
11+
models() {
12+
return new ModelsBuilder("ollama" as const, options, runner);
13+
},
14+
};
15+
}

packages/fluent-ai/src/index.ts

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,7 @@ export * from "./builder/image";
77
export * from "./builder/models";
88
export * from "./builder/openrouter";
99
export * from "./builder/openai";
10+
export * from "./builder/ollama";
1011
export * from "./builder/voyage";
1112
export * from "./builder/fal";
1213

Lines changed: 85 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,85 @@
1+
import type { ChatJob, ModelsJob } from "~/src/job/schema";
2+
import { createHTTPJob } from "~/src/job/http";
3+
import {
4+
transformToolsToFunctions,
5+
createStreamingGenerator,
6+
} from "~/src/job/utils";
7+
8+
const DEFAULT_BASE_URL = "http://localhost:11434";
9+
10+
function getBaseUrl(options?: ChatJob["options"]): string {
11+
return options?.baseUrl || process.env.OLLAMA_BASE_URL || DEFAULT_BASE_URL;
12+
}
13+
14+
export const runner = {
15+
chat: async (input: ChatJob["input"], options?: ChatJob["options"]) => {
16+
const baseUrl = getBaseUrl(options);
17+
const tools = transformToolsToFunctions(input.tools);
18+
19+
const request = new Request(`${baseUrl}/api/chat`, {
20+
method: "POST",
21+
headers: {
22+
"Content-Type": "application/json",
23+
},
24+
body: JSON.stringify({
25+
model: input.model,
26+
messages: input.messages,
27+
temperature: input.temperature,
28+
tools: tools,
29+
stream: input.stream ?? false,
30+
options: {
31+
num_predict: input.maxTokens,
32+
},
33+
}),
34+
});
35+
36+
return createHTTPJob(request, async (response: Response) => {
37+
if (input.stream) {
38+
return createStreamingGenerator(response);
39+
}
40+
41+
const data = await response.json();
42+
43+
return {
44+
messages: [
45+
{
46+
role: data.message.role,
47+
content: data.message.content,
48+
tool_calls: data.message.tool_calls,
49+
},
50+
],
51+
usage: data.prompt_eval_count
52+
? {
53+
promptTokens: data.prompt_eval_count || 0,
54+
completionTokens: data.eval_count || 0,
55+
totalTokens:
56+
(data.prompt_eval_count || 0) + (data.eval_count || 0),
57+
}
58+
: undefined,
59+
};
60+
});
61+
},
62+
63+
models: async (
64+
input?: ModelsJob["input"],
65+
options?: ModelsJob["options"],
66+
) => {
67+
const baseUrl = getBaseUrl(options);
68+
69+
const request = new Request(`${baseUrl}/api/tags`, {
70+
method: "GET",
71+
headers: {
72+
"Content-Type": "application/json",
73+
},
74+
});
75+
76+
return createHTTPJob(request, async (response: Response) => {
77+
const data = await response.json();
78+
79+
return data.models.map((model: any) => ({
80+
id: model.name,
81+
name: model.name,
82+
}));
83+
});
84+
},
85+
};

packages/fluent-ai/src/job/runner.ts

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
import * as openrouter from "~/src/job/openrouter";
22
import * as openai from "~/src/job/openai";
3+
import * as ollama from "~/src/job/ollama";
34
import * as voyage from "~/src/job/voyage";
45
import * as fal from "~/src/job/fal";
56
import type { Job } from "~/src/job/schema";
@@ -22,6 +23,7 @@ export class Runner {
2223
export const runner = new Runner({
2324
openrouter: openrouter.runner,
2425
openai: openai.runner,
26+
ollama: ollama.runner,
2527
voyage: voyage.runner,
2628
fal: fal.runner,
2729
});

packages/fluent-ai/src/job/schema.ts

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -96,11 +96,12 @@ const modelsOutputSchema = z.object({
9696
// TODO: options schema per provider/job type
9797
const optionsSchema = z.object({
9898
apiKey: z.string().optional(),
99+
baseUrl: z.string().optional(),
99100
});
100101

101102
export const chatJobSchema = z.object({
102103
type: z.literal("chat"),
103-
provider: z.enum(["openrouter", "openai"]),
104+
provider: z.enum(["openrouter", "openai", "ollama"]),
104105
options: optionsSchema.optional(),
105106
input: chatInputSchema,
106107
output: chatOutputSchema.optional(),
@@ -116,7 +117,7 @@ export const imageJobSchema = z.object({
116117

117118
export const modelsJobSchema = z.object({
118119
type: z.literal("models"),
119-
provider: z.enum(["openai"]),
120+
provider: z.enum(["openai", "ollama"]),
120121
options: optionsSchema.optional(),
121122
input: modelsInputSchema.optional(),
122123
output: modelsOutputSchema.optional(),

0 commit comments

Comments
 (0)