-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathquery.js
More file actions
107 lines (79 loc) · 2.72 KB
/
query.js
File metadata and controls
107 lines (79 loc) · 2.72 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
import * as dotenv from 'dotenv';
dotenv.config();
import readlineSync from 'readline-sync';
import { GoogleGenerativeAIEmbeddings } from '@langchain/google-genai';
import { Pinecone } from '@pinecone-database/pinecone';
import { GoogleGenAI } from "@google/genai";
const ai = new GoogleGenAI({});
const History = []
// FOR ENHANCED CONTEXTUAL SESSION MANAGED QUERY MANAGEMENT
async function transformQuery(question){
History.push({
role:'user',
parts:[{text:question}]
})
const response = await ai.models.generateContent({
model: "gemini-2.0-flash",
contents: History,
config: {
systemInstruction: `You are a query rewriting expert. Based on the provided chat history, rephrase the "Follow Up user Question" into a complete, standalone question that can be understood without the chat history.
Only output the rewritten question and nothing else.
`,
},
});
History.pop()
return response.text
}
async function chatting(question) {
// GET ACTUAL QUESTION USING PREVIOUS STATE SAVED MESSAGES (IF ANY)
const queries = await transformQuery(question);
const embeddings = new GoogleGenerativeAIEmbeddings({
apiKey: process.env.GEMINI_API_KEY,
model: 'text-embedding-004',
});
const queryVector = await embeddings.embedQuery(queries);
// MAKE CONNECTION FROM PINCONE
const pinecone = new Pinecone();
const pineconeIndex = pinecone.Index(process.env.PINECONE_INDEX_NAME);
const searchResults = await pineconeIndex.query({
topK: 10,
vector: queryVector,
includeMetadata: true,
});
// EXTRACT TOP 10 SEMANTIC SIMILAR CHUNKS
const context = searchResults.matches
.map(match => match.metadata.text)
.join("\n\n---\n\n");
// CREATE A CONTEXTUAL PROMPT REQUEST
History.push({
role:'user',
parts:[{text:queries}]
})
const response = await ai.models.generateContent({
model: "gemini-2.0-flash",
contents: History,
config: {
systemInstruction: `You have to behave like a Data Structure and Algorithm Expert.
You will be given a context of relevant information and a user question.
Your task is to answer the user's question based ONLY on the provided context.
If the answer is not in the context, you must say "I could not find the answer in the provided document."
Keep your answers clear, concise, and educational.
Context: ${context}
`,
},
});
History.push({
role:'model',
parts:[{text:response.text}]
})
console.log("\n");
console.log(response.text);
}
// MAIN FUNCTION
async function main(){
const userProblem = readlineSync.question("Ask me anything--> ");
await chatting(userProblem);
//RECURSIVE CALL
main();
}
main();