|
| 1 | +import path from 'path'; |
| 2 | +import { HNSWLib } from "langchain/vectorstores/hnswlib"; |
| 3 | +import { RetrievalQAChain } from 'langchain/chains'; |
| 4 | +import { OpenAIEmbeddings } from "langchain/embeddings/openai"; |
| 5 | +import { ContextualCompressionRetriever } from "langchain/retrievers/contextual_compression"; |
| 6 | +import { LLMChainExtractor } from "langchain/retrievers/document_compressors/chain_extract"; |
1 | 7 | import { ConversationChain } from 'langchain/chains'; |
2 | 8 | import { PromptTemplate } from 'langchain/prompts'; |
3 | 9 | import { ConversationSummaryMemory } from 'langchain/memory'; |
4 | 10 | import { OpenAI } from 'langchain/llms/openai'; |
| 11 | +import { getFileLoader } from '../utils/documentLoader.js'; |
5 | 12 |
|
6 | 13 | class OpenAiService { |
7 | 14 | constructor () { |
@@ -35,14 +42,42 @@ class OpenAiService { |
35 | 42 | return { chain, inputType: 'input', responseType: 'response' }; |
36 | 43 | } |
37 | 44 |
|
| 45 | + async ingestFile(data) { |
| 46 | + const { files } = data; |
| 47 | + const { originalFilename, filepath } = files['chat-file']; |
| 48 | + const fileExtension = path.extname(originalFilename); |
| 49 | + |
| 50 | + const loader = getFileLoader(fileExtension, filepath); |
| 51 | + if (!loader) { |
| 52 | + throw Error('bad'); |
| 53 | + } |
| 54 | + |
| 55 | + const docs = await loader.load(); |
| 56 | + this.vectorStore = await HNSWLib.fromDocuments(docs, new OpenAIEmbeddings()); |
| 57 | + |
| 58 | + const baseCompressor = LLMChainExtractor.fromLLM(this.model); |
| 59 | + this.vectorStore = await HNSWLib.fromDocuments(docs, new OpenAIEmbeddings()); |
| 60 | + this.retriever = new ContextualCompressionRetriever({ |
| 61 | + baseCompressor, |
| 62 | + baseRetriever: this.vectorStore.asRetriever(), |
| 63 | + }); |
| 64 | + |
| 65 | + this.chain = RetrievalQAChain.fromLLM( |
| 66 | + this.model, |
| 67 | + this.retriever, |
| 68 | + { returnSourceDocuments: true } |
| 69 | + ); |
| 70 | + return { success: true }; |
| 71 | + } |
| 72 | + |
38 | 73 | call = async (userInput) => { |
39 | 74 | const { chain, inputType, responseType } = this.assembleChain(); |
40 | 75 |
|
41 | 76 | const { [responseType]: response } = await chain.call({ |
42 | 77 | [inputType]: userInput, |
43 | 78 | }); |
44 | 79 |
|
45 | | - return response; |
| 80 | + return { response }; |
46 | 81 | } |
47 | 82 | } |
48 | 83 |
|
|
0 commit comments