Skip to content

Commit

Permalink
Log out all chats if you configure it
Browse files Browse the repository at this point in the history
  • Loading branch information
SpikePuppet committed Jul 2, 2023
1 parent c0f56fd commit d32a5c9
Show file tree
Hide file tree
Showing 3 changed files with 64 additions and 5 deletions.
16 changes: 15 additions & 1 deletion src/configure.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ import { ConfigurationPrompt } from "./types";

export const CONFIG_FOLDER = `${os.homedir()}/.ducky`;
export const CONFIG_FILE_LOCATION: string = `${CONFIG_FOLDER}/config.json`;
export const LOG_FILE_LOCATION: string = `${CONFIG_FOLDER}/chat`;

export async function configure() {
const { prompt } = enquirer;
Expand All @@ -23,15 +24,28 @@ export async function configure() {
message: "What is your OpenAI API key?",
name: "openAiApiKey",
});
console.log(chalk.green("Awesome!"));
const logging: ConfigurationPrompt = await prompt({
type: "input",
message: "Do you want to log your chats (y/n)?",
name: "logChat",
});

try {
if (!fs.existsSync(CONFIG_FOLDER)) {
fs.mkdirSync(CONFIG_FOLDER);
}

if (!fs.existsSync(LOG_FILE_LOCATION)) {
fs.mkdirSync(LOG_FILE_LOCATION);
}

fs.writeFileSync(
CONFIG_FILE_LOCATION,
JSON.stringify({ OPEN_AI_API_KEY: apiKey.openAiApiKey }),
JSON.stringify({
OPEN_AI_API_KEY: apiKey.openAiApiKey,
LOG_CHAT: logging.logChat === "y" ? true : false,
}),
{ flag: "w+", encoding: "utf-8" },
);
} catch (err) {
Expand Down
51 changes: 48 additions & 3 deletions src/interactivePrompt.ts
Original file line number Diff line number Diff line change
Expand Up @@ -10,11 +10,20 @@ import {
import { SystemMessagePromptTemplate } from "langchain/prompts";
import { BufferMemory } from "langchain/memory";
import { UserChatPrompt } from "./types";
import fs from "fs";
import { CONFIG_FILE_LOCATION, LOG_FILE_LOCATION } from "./configure.js";
import { exit } from "process";

export async function interactivePrompt() {
const openAIApiKey = process.env.OPENAI_API_KEY;
interface ConfigurationOptions {
OPEN_AI_API_KEY: string;
LOG_CHAT: boolean;
}

export async function interactivePrompt(): Promise<void> {
const logFileName = `ducky-${getCurrentUnixTimestamp()}.log`;
const config = await loadConfig();
const model = new ChatOpenAI({
openAIApiKey,
openAIApiKey: config.OPEN_AI_API_KEY,
modelName: "gpt-3.5-turbo-0613",
temperature: 0.1,
});
Expand Down Expand Up @@ -61,5 +70,41 @@ export async function interactivePrompt() {
console.log(chalk.green("Answer"));
console.log(response.response);
console.log("");

if (config.LOG_CHAT) {
fs.appendFileSync(
`${LOG_FILE_LOCATION}/${logFileName}.log`,
input.userQuestion + "\n\n",
);

fs.appendFileSync(
`${LOG_FILE_LOCATION}/${logFileName}.log`,
response.response + "\n\n",
);

fs.appendFileSync(
`${LOG_FILE_LOCATION}/${logFileName}.log`,
"-----------------------\n\n",
);
}
}
}

function getCurrentUnixTimestamp(): number {
return Math.floor(Date.now() / 1000);
}

function loadConfig(): ConfigurationOptions {
try {
const configData = fs.readFileSync(CONFIG_FILE_LOCATION, "utf8");
const config = JSON.parse(configData) as ConfigurationOptions;

return config;
} catch (err) {
console.error(
"There was an issue loading your config file %d. \nExiting...",
err,
);
exit(1);
}
}
2 changes: 1 addition & 1 deletion src/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,8 @@ export interface UserChatPrompt {
}

export interface ConfigurationPrompt {
configurationFileLocation?: string;
openAiApiKey?: string;
logChat: string;
}

export interface OpenAIChatCompletionMessage {
Expand Down

0 comments on commit d32a5c9

Please sign in to comment.