Skip to content

Commit 2f46bd8

Browse files
authored
Merge pull request #2 from React-ChatBotify-Plugins/feat/provider-debug-flag
feat: Add optional debug flag to LLM providers
2 parents 31e046b + 92cca19 commit 2f46bd8

File tree

6 files changed

+46
-0
lines changed

6 files changed

+46
-0
lines changed

src/providers/GeminiProvider.ts

Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,7 @@ class GeminiProvider implements Provider {
1414
private systemMessage?: string;
1515
private responseFormat!: 'stream' | 'json';
1616
private messageParser?: (messages: Message[]) => GeminiProviderMessage[];
17+
private debug: boolean = false;
1718

1819
/**
1920
* Sets default values for the provider based on given configuration. Configuration guide here:
@@ -27,6 +28,7 @@ class GeminiProvider implements Provider {
2728
this.systemMessage = config.systemMessage;
2829
this.responseFormat = config.responseFormat ?? 'stream';
2930
this.messageParser = config.messageParser;
31+
this.debug = config.debug ?? false;
3032
this.headers = {
3133
'Content-Type': 'application/json',
3234
Accept: this.responseFormat === 'stream' ? 'text/event-stream' : 'application/json',
@@ -52,6 +54,20 @@ class GeminiProvider implements Provider {
5254
* @param messages messages to include in the request
5355
*/
5456
public async *sendMessages(messages: Message[]): AsyncGenerator<string> {
57+
if (this.debug) {
58+
const sanitizedEndpoint = this.endpoint.replace(/\?key=([^&]+)/, '?key=[REDACTED]');
59+
// Headers in Gemini usually don't contain sensitive info like 'Authorization'
60+
// as the API key is in the URL, but we'll keep a general sanitization pattern.
61+
const sanitizedHeaders = { ...this.headers };
62+
// If any sensitive header were to be added in the future, it should be removed here.
63+
// delete sanitizedHeaders['Some-Sensitive-Header'];
64+
console.log('[GeminiProvider] Request:', {
65+
method: this.method,
66+
endpoint: sanitizedEndpoint,
67+
headers: sanitizedHeaders,
68+
body: this.constructBodyWithMessages(messages),
69+
});
70+
}
5571
const res = await fetch(this.endpoint, {
5672
method: this.method,
5773
headers: this.headers as HeadersInit,

src/providers/OpenaiProvider.ts

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,7 @@ class OpenaiProvider implements Provider {
1414
private systemMessage?: string;
1515
private responseFormat!: 'stream' | 'json';
1616
private messageParser?: (messages: Message[]) => OpenaiProviderMessage[];
17+
private debug: boolean = false;
1718

1819
/**
1920
* Sets default values for the provider based on given configuration. Configuration guide here:
@@ -27,6 +28,7 @@ class OpenaiProvider implements Provider {
2728
this.systemMessage = config.systemMessage;
2829
this.responseFormat = config.responseFormat ?? 'stream';
2930
this.messageParser = config.messageParser;
31+
this.debug = config.debug ?? false;
3032
this.headers = {
3133
'Content-Type': 'application/json',
3234
Accept: this.responseFormat === 'stream' ? 'text/event-stream' : 'application/json',
@@ -54,6 +56,16 @@ class OpenaiProvider implements Provider {
5456
* @param messages messages to include in the request
5557
*/
5658
public async *sendMessages(messages: Message[]): AsyncGenerator<string> {
59+
if (this.debug) {
60+
const sanitizedHeaders = { ...this.headers };
61+
delete sanitizedHeaders['Authorization'];
62+
console.log('[OpenaiProvider] Request:', {
63+
method: this.method,
64+
endpoint: this.endpoint,
65+
headers: sanitizedHeaders,
66+
body: this.constructBodyWithMessages(messages),
67+
});
68+
}
5769
const res = await fetch(this.endpoint, {
5870
method: this.method,
5971
headers: this.headers as HeadersInit,

src/providers/WebLlmProvider.ts

Lines changed: 13 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,7 @@ class WebLlmProvider implements Provider {
1515
private chatCompletionOptions: Record<string, unknown>;
1616
private messageParser?: (messages: Message[]) => WebLlmProviderMessage[];
1717
private engine?: MLCEngine;
18+
private debug: boolean = false;
1819

1920
/**
2021
* Sets default values for the provider based on given configuration. Configuration guide here:
@@ -29,6 +30,7 @@ class WebLlmProvider implements Provider {
2930
this.messageParser = config.messageParser;
3031
this.engineConfig = config.engineConfig ?? {};
3132
this.chatCompletionOptions = config.chatCompletionOptions ?? {};
33+
this.debug = config.debug ?? false;
3234
this.createEngine();
3335
}
3436

@@ -52,6 +54,17 @@ class WebLlmProvider implements Provider {
5254
await this.createEngine();
5355
}
5456

57+
if (this.debug) {
58+
console.log('[WebLlmProvider] Request:', {
59+
model: this.model,
60+
systemMessage: this.systemMessage,
61+
responseFormat: this.responseFormat,
62+
engineConfig: this.engineConfig,
63+
chatCompletionOptions: this.chatCompletionOptions,
64+
messages: this.constructBodyWithMessages(messages).messages, // Log messages being sent
65+
});
66+
}
67+
5568
const result = await this.engine?.chat.completions.create(this.constructBodyWithMessages(messages));
5669
if (result && Symbol.asyncIterator in result) {
5770
for await (const chunk of result as AsyncIterable<ChatCompletionChunk>) {

src/types/provider-config/GeminiProviderConfig.ts

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,7 @@ type DirectConfig = {
1515
headers?: Record<string, string>;
1616
body?: Record<string, string>;
1717
messageParser?: (messages: Message[]) => GeminiProviderMessage[];
18+
debug?: boolean;
1819
};
1920

2021
/**
@@ -30,6 +31,7 @@ type ProxyConfig = {
3031
headers?: Record<string, string>;
3132
body?: Record<string, string>;
3233
messageParser?: (messages: Message[]) => GeminiProviderMessage[];
34+
debug?: boolean;
3335
};
3436

3537
/**

src/types/provider-config/OpenaiProviderConfig.ts

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,7 @@ type DirectConfig = {
1515
headers?: Record<string, string>;
1616
body?: Record<string, string>;
1717
messageParser?: (messages: Message[]) => OpenaiProviderMessage[];
18+
debug?: boolean;
1819
};
1920

2021
/**
@@ -30,6 +31,7 @@ type ProxyConfig = {
3031
headers?: Record<string, string>;
3132
body?: Record<string, string>;
3233
messageParser?: (messages: Message[]) => OpenaiProviderMessage[];
34+
debug?: boolean;
3335
};
3436

3537
/**

src/types/provider-config/WebLlmProviderConfig.ts

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -12,6 +12,7 @@ type WebLlmProviderConfig = {
1212
engineConfig?: MLCEngineConfig;
1313
chatCompletionOptions?: Record<string, unknown>;
1414
messageParser?: (messages: Message[]) => WebLlmProviderMessage[];
15+
debug?: boolean;
1516
};
1617

1718
export type { WebLlmProviderConfig };

0 commit comments

Comments
 (0)