Skip to content

Commit

Permalink
Merge pull request #31 from ChiragAgg5k/chore-improve-system-prompt
Browse files Browse the repository at this point in the history
chore: shifted system prompt to prefixMessages
  • Loading branch information
loks0n authored Jan 20, 2025
2 parents 7963089 + 81379c8 commit 2229506
Show file tree
Hide file tree
Showing 3 changed files with 40 additions and 13 deletions.
16 changes: 15 additions & 1 deletion src/documents.js
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,9 @@ import { readFile } from "fs/promises";
import { Document } from "langchain/document";
import { MarkdownTextSplitter } from "langchain/text_splitter";

/**
* @returns {Promise<Document[]>}
*/
const getDocumentation = async () => {
const filenames = await glob([
"./sources/website/src/routes/docs/**/*.markdoc",
Expand Down Expand Up @@ -36,6 +39,9 @@ const getDocumentation = async () => {
);
};

/**
* @returns {Promise<Document[]>} Array of Document objects containing processed references
*/
const getReferences = async () => {
const filenames = await glob(["./sources/references/**/*.md"]);

Expand Down Expand Up @@ -64,7 +70,7 @@ export const getDocuments = async () => {
return await splitDocuments([...documentation, ...references]);
};

/**x
/**
* @param {Document[]} documents
* @returns {Promise<Document<Record<string, any>>[]>}
*/
Expand All @@ -80,6 +86,10 @@ async function splitDocuments(documents) {
return await splitter.createDocuments(texts, metadatas);
}

/**
* @param {string} contents
* @returns {Object.<string, string>}
*/
function parseMarkdownFrontmatter(contents) {
const raw = contents.match(/^---\n([\s\S]*?)\n---/);
if (!raw) {
Expand All @@ -94,6 +104,10 @@ function parseMarkdownFrontmatter(contents) {
return frontmatter;
}

/**
* @param {string} filename
* @returns {{sdk: string, service: string}}
*/
function parseReferenceData(filename) {
const [sdk, service] = filename
.replace("sources/references/", "")
Expand Down
23 changes: 20 additions & 3 deletions src/embeddings.js
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,9 @@ import { OpenAIChat } from "langchain/llms/openai";
import { loadQAStuffChain } from "langchain/chains";
import { getDocuments } from "./documents.js";

/**
* @returns {Promise<VectorStoreRetriever<HNSWLib>>}
*/
export const intializeDocumentRetriever = async () => {
const embeddings = new OpenAIEmbeddings({
openAIApiKey: process.env._APP_ASSISTANT_OPENAI_API_KEY,
Expand All @@ -15,7 +18,11 @@ export const intializeDocumentRetriever = async () => {
return vectorStore.asRetriever(5);
};

export const getOpenAIChat = async (onToken) =>
/**
* @param {function} onToken
* @param {string} systemPrompt
*/
export const getOpenAIChat = async (onToken, systemPrompt) =>
new OpenAIChat({
modelName: "gpt-4o",
openAIApiKey: process.env._APP_ASSISTANT_OPENAI_API_KEY,
Expand All @@ -27,8 +34,18 @@ export const getOpenAIChat = async (onToken) =>
handleLLMNewToken: onToken,
},
],
prefixMessages: [
{
role: "system",
content: systemPrompt,
},
],
});

export const getRagChain = async (onToken) => {
return loadQAStuffChain(await getOpenAIChat(onToken));
/**
* @param {function} onToken
* @param {string} systemPrompt
*/
export const getRagChain = async (onToken, systemPrompt) => {
return loadQAStuffChain(await getOpenAIChat(onToken, systemPrompt));
};
14 changes: 5 additions & 9 deletions src/main.js
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,6 @@ app.post("/v1/models/assistant/prompt", async (req, res) => {
return;
}

// raw to text
const decoder = new TextDecoder();
const text = decoder.decode(req.body);

Expand All @@ -41,11 +40,11 @@ app.post("/v1/models/assistant/prompt", async (req, res) => {

const chain = await getRagChain((token) => {
res.write(token);
});
}, systemPrompt);

await chain.call({
input_documents: relevantDocuments,
question: `${systemPrompt}\n\n${prompt}`,
question: prompt,
});

const sources = new Set(
Expand All @@ -71,14 +70,11 @@ app.post("/v1/models/generic/prompt", async (req, res) => {
let { prompt, systemPrompt } = JSON.parse(text);
systemPrompt ??= SYSTEM_PROMPT;

const chain = await getOpenAIChat((token) => {
const chat = await getOpenAIChat((token) => {
res.write(token);
});
}, systemPrompt);

await chain.invoke([
["system", systemPrompt],
["human", prompt],
])
await chat.call(prompt);

res.end();
});
Expand Down

0 comments on commit 2229506

Please sign in to comment.