Skip to content

Commit bd990d5

Browse files
jonghwanhyeonhntrl
andauthored
feat(ollama): add support for custom baseUrl (#9607)
Co-authored-by: Hunter Lovell <40191806+hntrl@users.noreply.github.com>
1 parent 70b0174 commit bd990d5

File tree

4 files changed

+29
-8
lines changed

4 files changed

+29
-8
lines changed

.changeset/serious-grapes-shave.md

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
---
2+
"@langchain/ollama": patch
3+
---
4+
5+
feat(ollama): add support for custom baseUrl

libs/providers/langchain-ollama/src/chat_models.ts

Lines changed: 9 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -38,6 +38,7 @@ import {
3838
JsonOutputParser,
3939
StructuredOutputParser,
4040
} from "@langchain/core/output_parsers";
41+
import { getEnvironmentVariable } from "@langchain/core/utils/env";
4142
import {
4243
InteropZodType,
4344
isInteropZodSchema,
@@ -91,6 +92,7 @@ export interface ChatOllamaInput
9192
model?: string;
9293
/**
9394
* The host URL of the Ollama server.
95+
* Defaults to `OLLAMA_BASE_URL` if set.
9496
* @default "http://127.0.0.1:11434"
9597
*/
9698
baseUrl?: string;
@@ -124,6 +126,7 @@ export interface ChatOllamaInput
124126
*
125127
* ```bash
126128
* npm install @langchain/ollama
129+
* export OLLAMA_BASE_URL="http://127.0.0.1:11434" # Optional; defaults to http://127.0.0.1:11434 if not set
127130
* ```
128131
*
129132
* ## [Constructor args](https://api.js.langchain.com/classes/_langchain_ollama.ChatOllama.html#constructor)
@@ -499,12 +502,16 @@ export class ChatOllama
499502
constructor(fields?: ChatOllamaInput) {
500503
super(fields ?? {});
501504

505+
this.baseUrl =
506+
fields?.baseUrl ??
507+
getEnvironmentVariable("OLLAMA_BASE_URL") ??
508+
this.baseUrl;
509+
502510
this.client = new Ollama({
503511
fetch: fields?.fetch,
504-
host: fields?.baseUrl,
512+
host: this.baseUrl,
505513
headers: fields?.headers,
506514
});
507-
this.baseUrl = fields?.baseUrl ?? this.baseUrl;
508515

509516
this.model = fields?.model ?? this.model;
510517
this.numa = fields?.numa;

libs/providers/langchain-ollama/src/embeddings.ts

Lines changed: 8 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
import { Embeddings, EmbeddingsParams } from "@langchain/core/embeddings";
2+
import { getEnvironmentVariable } from "@langchain/core/utils/env";
23
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
34
// @ts-ignore CJS type resolution workaround
45
import { Ollama } from "ollama/browser";
@@ -17,7 +18,8 @@ export interface OllamaEmbeddingsParams extends EmbeddingsParams {
1718
model?: string;
1819

1920
/**
20-
* Base URL of the Ollama server
21+
* Base URL of the Ollama server.
22+
* Defaults to `OLLAMA_BASE_URL` if set.
2123
* @default "http://localhost:11434"
2224
*/
2325
baseUrl?: string;
@@ -76,12 +78,15 @@ export class OllamaEmbeddings extends Embeddings {
7678
constructor(fields?: OllamaEmbeddingsParams) {
7779
super({ maxConcurrency: 1, ...fields });
7880

81+
this.baseUrl =
82+
fields?.baseUrl ??
83+
getEnvironmentVariable("OLLAMA_BASE_URL") ??
84+
this.baseUrl;
7985
this.client = new Ollama({
8086
fetch: fields?.fetch,
81-
host: fields?.baseUrl,
87+
host: this.baseUrl,
8288
headers: fields?.headers ? new Headers(fields.headers) : undefined,
8389
});
84-
this.baseUrl = fields?.baseUrl ?? this.baseUrl;
8590

8691
this.model = fields?.model ?? this.model;
8792
this.dimensions = fields?.dimensions;

libs/providers/langchain-ollama/src/llms.ts

Lines changed: 7 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
import type { BaseLanguageModelCallOptions } from "@langchain/core/language_models/base";
22
import { CallbackManagerForLLMRun } from "@langchain/core/callbacks/manager";
33
import { GenerationChunk } from "@langchain/core/outputs";
4+
import { getEnvironmentVariable } from "@langchain/core/utils/env";
45
import type { StringWithAutocomplete } from "@langchain/core/utils/types";
56
import { LLM, type BaseLLMParams } from "@langchain/core/language_models/llms";
67
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
@@ -22,6 +23,7 @@ export interface OllamaInput extends BaseLLMParams, OllamaCamelCaseOptions {
2223
* Optionally override the base URL to make request to.
2324
* This should only be set if your Ollama instance is being
2425
* server from a non-standard location.
26+
* Defaults to `OLLAMA_BASE_URL` if set.
2527
* @default "http://localhost:11434"
2628
*/
2729
baseUrl?: string;
@@ -139,9 +141,11 @@ export class Ollama extends LLM<OllamaCallOptions> implements OllamaInput {
139141
constructor(fields?: OllamaInput & BaseLLMParams) {
140142
super(fields ?? {});
141143
this.model = fields?.model ?? this.model;
142-
this.baseUrl = fields?.baseUrl?.endsWith("/")
143-
? fields?.baseUrl.slice(0, -1)
144-
: fields?.baseUrl ?? this.baseUrl;
144+
this.baseUrl = fields?.baseUrl
145+
? fields?.baseUrl.endsWith("/")
146+
? fields?.baseUrl.slice(0, -1)
147+
: fields?.baseUrl
148+
: getEnvironmentVariable("OLLAMA_BASE_URL") ?? this.baseUrl;
145149
this.client = new OllamaClient({
146150
fetch: fields?.fetch,
147151
host: this.baseUrl,

0 commit comments

Comments
 (0)