Skip to content

Commit 7f22073

Browse files
authored
Merge pull request #7811 from uinstinct/ollama-v1
fix(cli): append /v1 to ollama endpoint
2 parents d368529 + c84f2a1 commit 7f22073

File tree

3 files changed

+70
-0
lines changed

3 files changed

+70
-0
lines changed

packages/openai-adapters/src/index.ts

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,7 @@ import { VertexAIApi } from "./apis/VertexAI.js";
1818
import { WatsonXApi } from "./apis/WatsonX.js";
1919
import { BaseLlmApi } from "./apis/base.js";
2020
import { LLMConfig, OpenAIConfigSchema } from "./types.js";
21+
import { appendPathToUrlIfNotPresent } from "./util/appendPathToUrl.js";
2122

2223
dotenv.config();
2324

@@ -141,6 +142,10 @@ export function constructLlmApi(config: LLMConfig): BaseLlmApi | undefined {
141142
case "lmstudio":
142143
return openAICompatible("http://localhost:1234/", config);
143144
case "ollama":
145+
// for openai compaitability, we need to add /v1 to the end of the url
146+
// this is required for cli (for core, endpoints are overriden by core/llm/llms/Ollama.ts)
147+
if (config.apiBase)
148+
config.apiBase = appendPathToUrlIfNotPresent(config.apiBase, "v1");
144149
return openAICompatible("http://localhost:11434/v1/", config);
145150
case "mock":
146151
return new MockApi();

packages/openai-adapters/src/test/main.test.ts

Lines changed: 48 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -311,4 +311,52 @@ describe("Configuration", () => {
311311
);
312312
expect((azure as OpenAIApi).openai.apiKey).toBe("sk-xxx");
313313
});
314+
315+
describe("ollama api base", () => {
316+
it('should have correct default API base for "ollama"', () => {
317+
const ollama = constructLlmApi({
318+
provider: "ollama",
319+
});
320+
321+
expect((ollama as OpenAIApi).openai.baseURL).toBe(
322+
"http://localhost:11434/v1/",
323+
);
324+
});
325+
it('should append /v1 to apiBase for "ollama"', () => {
326+
const ollama = constructLlmApi({
327+
provider: "ollama",
328+
apiBase: "http://localhost:123",
329+
});
330+
331+
expect((ollama as OpenAIApi).openai.baseURL).toBe(
332+
"http://localhost:123/v1/",
333+
);
334+
});
335+
it("should not reappend /v1 to apibase for ollama if it is already present", () => {
336+
const ollama = constructLlmApi({
337+
provider: "ollama",
338+
apiBase: "http://localhost:123/v1/",
339+
});
340+
341+
expect((ollama as OpenAIApi).openai.baseURL).toBe(
342+
"http://localhost:123/v1/",
343+
);
344+
});
345+
it("should append v1 if apiBase is like myhostv1/", () => {
346+
const ollama = constructLlmApi({
347+
provider: "ollama",
348+
apiBase: "https://myhostv1/",
349+
});
350+
expect((ollama as OpenAIApi).openai.baseURL).toBe("https://myhostv1/v1/");
351+
});
352+
it("should preserve query params and append v1 in apibase", () => {
353+
const ollama = constructLlmApi({
354+
provider: "ollama",
355+
apiBase: "https://test.com:123/ollama-server?x=1",
356+
});
357+
expect((ollama as OpenAIApi).openai.baseURL).toBe(
358+
"https://test.com:123/ollama-server/v1/?x=1",
359+
);
360+
});
361+
});
314362
});
Lines changed: 17 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,17 @@
1+
export function appendPathToUrlIfNotPresent(
2+
urlString: string,
3+
pathWithoutSlash: string,
4+
) {
5+
const url = new URL(urlString);
6+
if (!url.pathname.endsWith("/")) {
7+
url.pathname += "/";
8+
}
9+
if (!url.pathname.endsWith(pathWithoutSlash + "/")) {
10+
url.pathname += pathWithoutSlash + "/";
11+
}
12+
if (url.search) {
13+
return url.toString();
14+
}
15+
// append slash at the end
16+
return url.toString();
17+
}

0 commit comments

Comments
 (0)