Skip to content

Commit dff745e

Browse files
committed
memory examples + working document QA
1 parent c225892 commit dff745e

23 files changed

+241
-368
lines changed

client/Chat.jsx

Lines changed: 20 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,6 @@ const Chat = () => {
2323
const [loading, setLoading] = useState(false);
2424
const [answer, setAnswer] = useState('');
2525
const [error, setError] = useState('');
26-
const [activeTab, setActiveTab] = useState('chat');
2726
const [selectedFile, setSelectedFile] = useState(null);
2827

2928
const handleInputChange = (event) => {
@@ -39,7 +38,8 @@ const Chat = () => {
3938

4039
try {
4140
setLoading(true);
42-
const { response } = await chatServices.chatWithLLM({ userInput });
41+
const response = await chatServices.chatWithLLM({ userInput });
42+
console.log({response})
4343
setAnswer(response);
4444
} catch (err) {
4545
setError(err);
@@ -53,12 +53,24 @@ const Chat = () => {
5353
setSelectedFile(event.target.files[0]);
5454
}
5555

56-
const handleFileUpload = (event) => {
56+
const handleFileUpload = async () => {
5757
if (selectedFile) {
5858
// Perform the upload logic here
59-
console.log('Uploading file:', selectedFile);
60-
// Reset the selected file
61-
setSelectedFile(null);
59+
try {
60+
setLoading(true);
61+
const form = new FormData();
62+
form.append('chat-file', selectedFile);
63+
64+
const { success } = await chatServices.ingestFile({ fileInput: form })
65+
if (success) {
66+
setAnswer('Successfully ingested. Ask me anything.');
67+
}
68+
} catch (err) {
69+
console.log({ err });
70+
71+
} finally {
72+
setLoading(false);
73+
}
6274
}
6375
}
6476

@@ -83,9 +95,9 @@ const Chat = () => {
8395
</div>
8496

8597
<div style={{ marginTop: '2rem', display: 'flex', flexDirection: 'column', alignItems: 'center' }}>
86-
<input accept="image/*" id="file-input" type="file" onChange={handleFileChange}/>
98+
<input accept=".pdf,.txt,.csv" id="file-input" type="file" onChange={handleFileChange}/>
8799
{selectedFile && (
88-
<Button>
100+
<Button onClick={handleFileUpload}>
89101
Upload
90102
</Button>
91103
)}

client/services/chat-services.js

Lines changed: 13 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,8 +5,20 @@ const chatWithLLM = async ({ userInput }) => {
55
return data;
66
};
77

8+
const ingestFile = async ({ fileInput }) => {
9+
const { data } = await requestClient.post('/chat/ingest', fileInput,
10+
{
11+
timeout: 120000,
12+
headers: {
13+
'Content-Type': `multipart/form-data: boundary=${fileInput._boundary}`
14+
}
15+
});
16+
return data;
17+
};
18+
819
const chatServices = {
9-
chatWithLLM
20+
chatWithLLM,
21+
ingestFile,
1022
}
1123

1224
export { chatServices };

examples/bufferMemory.js

Lines changed: 0 additions & 35 deletions
This file was deleted.

examples/bufferWindowMemory.js

Lines changed: 0 additions & 35 deletions
This file was deleted.

examples/conversationSummaryMemory.js

Lines changed: 0 additions & 35 deletions
This file was deleted.

examples/entityMemory.js

Lines changed: 0 additions & 35 deletions
This file was deleted.

examples/memory/bufferMemory.js

Lines changed: 30 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,30 @@
1+
import { ConversationChain } from 'langchain/chains';
2+
import { ChatOpenAI } from 'langchain/chat_models/openai';
3+
import { ChatPromptTemplate, HumanMessagePromptTemplate, MessagesPlaceholder, SystemMessagePromptTemplate } from 'langchain/prompts';
4+
import { BufferMemory } from 'langchain/memory';
5+
6+
process.env.OPENAI_API_KEY = 'YOUR API KEY';
7+
8+
const run = async () => {
9+
const llm = new ChatOpenAI({ temperature: 0, verbose: true });
10+
const prompt = ChatPromptTemplate.fromPromptMessages([
11+
SystemMessagePromptTemplate.fromTemplate('The following is a friendly conversation between a human and an AI. The AI is talkative and provides lots of specific details from its context. If the AI does not know the answer to a question, it truthfully says it does not know.'),
12+
new MessagesPlaceholder('history'),
13+
HumanMessagePromptTemplate.fromTemplate('{input}'),
14+
]);
15+
16+
const memory = new BufferMemory({ returnMessages: true });
17+
const userInput = 'Hi I\'m a human';
18+
19+
const chain = new ConversationChain({
20+
memory,
21+
prompt,
22+
llm,
23+
});
24+
25+
return chain.call({
26+
input: userInput,
27+
});
28+
};
29+
30+
run();
Lines changed: 30 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,30 @@
1+
import { ConversationChain } from 'langchain/chains';
2+
import { ChatOpenAI } from 'langchain/chat_models/openai';
3+
import { ChatPromptTemplate, HumanMessagePromptTemplate, MessagesPlaceholder, SystemMessagePromptTemplate } from 'langchain/prompts';
4+
import { BufferWindowMemory } from 'langchain/memory';
5+
6+
process.env.OPENAI_API_KEY = 'YOUR API KEY';
7+
8+
const run = async () => {
9+
const llm = new ChatOpenAI({ temperature: 0, verbose: true });
10+
const prompt = ChatPromptTemplate.fromPromptMessages([
11+
SystemMessagePromptTemplate.fromTemplate('The following is a friendly conversation between a human and an AI. The AI is talkative and provides lots of specific details from its context. If the AI does not know the answer to a question, it truthfully says it does not know.'),
12+
new MessagesPlaceholder('history'),
13+
HumanMessagePromptTemplate.fromTemplate('{input}'),
14+
]);
15+
16+
const memory = new BufferWindowMemory({ returnMessages: true, k: 5 });
17+
const userInput = 'Hi I\'m a human';
18+
19+
const chain = new ConversationChain({
20+
memory,
21+
prompt,
22+
llm,
23+
});
24+
25+
return chain.call({
26+
input: userInput,
27+
});
28+
}
29+
30+
run();
Lines changed: 29 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,29 @@
1+
import { ConversationChain } from 'langchain/chains';
2+
import { ChatOpenAI } from 'langchain/chat_models/openai';
3+
import { ChatPromptTemplate, HumanMessagePromptTemplate, MessagesPlaceholder, SystemMessagePromptTemplate } from 'langchain/prompts';
4+
import { ConversationSummaryMemory } from 'langchain/memory';
5+
6+
process.env.OPENAI_API_KEY = 'YOUR API KEY';
7+
const run = async () => {
8+
const llm = new ChatOpenAI({ temperature: 0, verbose: true });
9+
const prompt = ChatPromptTemplate.fromPromptMessages([
10+
SystemMessagePromptTemplate.fromTemplate('The following is a friendly conversation between a human and an AI. The AI is talkative and provides lots of specific details from its context. If the AI does not know the answer to a question, it truthfully says it does not know.'),
11+
new MessagesPlaceholder('history'),
12+
HumanMessagePromptTemplate.fromTemplate('{input}'),
13+
]);
14+
15+
const memory = new ConversationSummaryMemory({ llm, returnMessages: true });
16+
const userInput = 'Hi I\'m a human';
17+
18+
const chain = new ConversationChain({
19+
memory,
20+
prompt,
21+
llm,
22+
});
23+
24+
return chain.call({
25+
input: userInput,
26+
});
27+
}
28+
29+
run();

examples/memory/entityMemory.js

Lines changed: 29 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,29 @@
1+
import { ConversationChain } from 'langchain/chains';
2+
import { ChatOpenAI } from 'langchain/chat_models/openai';
3+
import { ChatPromptTemplate, HumanMessagePromptTemplate, MessagesPlaceholder, SystemMessagePromptTemplate } from 'langchain/prompts';
4+
import { EntityMemory } from 'langchain/memory';
5+
6+
process.env.OPENAI_API_KEY = 'YOUR API KEY';
7+
const run = () => {
8+
const llm = new ChatOpenAI({ temperature: 0, verbose: true });
9+
const prompt = ChatPromptTemplate.fromPromptMessages([
10+
SystemMessagePromptTemplate.fromTemplate('The following is a friendly conversation between a human and an AI. The AI is talkative and provides lots of specific details from its context. If the AI does not know the answer to a question, it truthfully says it does not know.'),
11+
new MessagesPlaceholder('history'),
12+
HumanMessagePromptTemplate.fromTemplate('{input}'),
13+
]);
14+
15+
const memory = new EntityMemory({ llm, returnMessages: true });
16+
const userInput = 'Hi I\'m a human';
17+
18+
const chain = new ConversationChain({
19+
memory,
20+
prompt,
21+
llm,
22+
});
23+
24+
return chain.call({
25+
input: userInput,
26+
});
27+
};
28+
29+
run();

0 commit comments

Comments
 (0)