Skip to content

Commit

Permalink
Fix issue where query wasn't using the config file - we were still tr…
Browse files Browse the repository at this point in the history
…ying to use environment variables
  • Loading branch information
SpikePuppet committed Jul 6, 2023
1 parent 314ab46 commit 4bec777
Show file tree
Hide file tree
Showing 5 changed files with 33 additions and 32 deletions.
20 changes: 18 additions & 2 deletions src/configure.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,13 +2,14 @@ import chalk from "chalk";
import enquirer from "enquirer";
import * as fs from "fs";
import * as os from "os";
import { ConfigurationPrompt } from "./types";
import { ConfigurationOptions, ConfigurationPrompt } from "./types";
import { exit } from "process";

export const CONFIG_FOLDER = `${os.homedir()}/.ducky`;
export const CONFIG_FILE_LOCATION: string = `${CONFIG_FOLDER}/config.json`;
export const LOG_FILE_LOCATION: string = `${CONFIG_FOLDER}/chat`;

export async function configure() {
export async function configureDucky() {
const { prompt } = enquirer;
console.log(chalk.green("Let's configure Ducky!"));
console.log(
Expand Down Expand Up @@ -52,3 +53,18 @@ export async function configure() {
console.error(err);
}
}

export function loadDuckyConfig(): ConfigurationOptions {
try {
const configData = fs.readFileSync(CONFIG_FILE_LOCATION, "utf8");
const config = JSON.parse(configData) as ConfigurationOptions;

return config;
} catch (err) {
console.error(
"There was an issue loading your config file %d. \nExiting...",
err,
);
exit(1);
}
}
4 changes: 2 additions & 2 deletions src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ import "dotenv/config";
import { Command } from "commander";
import { interactivePrompt } from "./interactivePrompt.js";
import { query } from "./query.js";
import { configure } from "./configure.js";
import { configureDucky } from "./configure.js";

const program = new Command();
program
Expand All @@ -15,7 +15,7 @@ program
.command("configure")
.description("Configure Ducky!")
.action(async () => {
await configure().catch(console.error);
await configureDucky().catch(console.error);
});

program
Expand Down
31 changes: 5 additions & 26 deletions src/interactivePrompt.ts
Original file line number Diff line number Diff line change
@@ -1,27 +1,21 @@
import chalk from "chalk";
import enquirer from "enquirer";
import fs from "fs";
import { ConversationChain } from "langchain/chains";
import { ChatOpenAI } from "langchain/chat_models/openai";
import { BufferMemory } from "langchain/memory";
import {
ChatPromptTemplate,
HumanMessagePromptTemplate,
MessagesPlaceholder,
SystemMessagePromptTemplate,
} from "langchain/prompts";
import { SystemMessagePromptTemplate } from "langchain/prompts";
import { BufferMemory } from "langchain/memory";
import { LOG_FILE_LOCATION, loadDuckyConfig } from "./configure.js";
import { UserChatPrompt } from "./types";
import fs from "fs";
import { CONFIG_FILE_LOCATION, LOG_FILE_LOCATION } from "./configure.js";
import { exit } from "process";

interface ConfigurationOptions {
OPEN_AI_API_KEY: string;
LOG_CHAT: boolean;
}

export async function interactivePrompt(): Promise<void> {
const logFileName = `ducky-${getCurrentUnixTimestamp()}.log`;
const config = await loadConfig();
const config = await loadDuckyConfig();
const model = new ChatOpenAI({
openAIApiKey: config.OPEN_AI_API_KEY,
modelName: "gpt-3.5-turbo-0613",
Expand Down Expand Up @@ -93,18 +87,3 @@ export async function interactivePrompt(): Promise<void> {
function getCurrentUnixTimestamp(): number {
return Math.floor(Date.now() / 1000);
}

function loadConfig(): ConfigurationOptions {
try {
const configData = fs.readFileSync(CONFIG_FILE_LOCATION, "utf8");
const config = JSON.parse(configData) as ConfigurationOptions;

return config;
} catch (err) {
console.error(
"There was an issue loading your config file %d. \nExiting...",
err,
);
exit(1);
}
}
5 changes: 3 additions & 2 deletions src/query.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,11 +5,12 @@ import {
SystemChatMessage,
} from "langchain/schema";
import clipboard from "clipboardy";
import { loadDuckyConfig } from "./configure.js";

export async function query(prompt: string) {
const openAIApiKey = process.env.OPENAI_API_KEY;
const config = await loadDuckyConfig();
const model = new ChatOpenAI({
openAIApiKey,
openAIApiKey: config.OPEN_AI_API_KEY,
modelName: "gpt-3.5-turbo-0613",
temperature: 0.1,
});
Expand Down
5 changes: 5 additions & 0 deletions src/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -11,3 +11,8 @@ export interface OpenAIChatCompletionMessage {
role: "user" | "system" | "assistant";
content: string;
}

export interface ConfigurationOptions {
OPEN_AI_API_KEY: string;
LOG_CHAT: boolean;
}

0 comments on commit 4bec777

Please sign in to comment.