Skip to content

Commit

Permalink
feat: Use Langchain🐤🔗 as LLM interface & Multimodal support 🖼️
Browse files Browse the repository at this point in the history
  • Loading branch information
radityaharya committed Apr 16, 2024
1 parent c3bb4f9 commit 625775e
Show file tree
Hide file tree
Showing 14 changed files with 527 additions and 326 deletions.
Binary file modified bun.lockb
Binary file not shown.
6 changes: 6 additions & 0 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -23,8 +23,12 @@
"prisma:studio": "prisma studio"
},
"dependencies": {
"@langchain/community": "^0.0.48",
"@langchain/google-genai": "^0.0.11",
"@langchain/openai": "^0.0.28",
"@prisma/client": "5.12.1",
"@types/body-parser": "^1.19.5",
"@types/mime-types": "^2.1.4",
"axios": "^1.6.8",
"croner": "^8.0.2",
"date-fns": "^3.6.0",
Expand All @@ -33,12 +37,14 @@
"form-data": "^4.0.0",
"gpt3-tokenizer": "^1.1.5",
"hono": "^4.2.4",
"langchain": "^0.1.33",
"lodash": "^4.17.21",
"openai": "^4.33.1",
"pino": "^8.20.0",
"prisma": "^5.12.1",
"rate-limiter-flexible": "^5.0.0",
"rss-parser": "^3.13.0",
"sharp": "^0.33.3",
"tiny-glob": "^0.2.9",
"tmp-promise": "^3.0.3"
},
Expand Down
2 changes: 1 addition & 1 deletion src/commands/ask.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ import {

import { createErrorEmbed } from '@/lib/embeds';
import { buildContext } from '@/lib/helpers';
import { CompletionStatus, createChatCompletion } from '@/lib/openai';
import { CompletionStatus, createChatCompletion } from '@/lib/llm';

export default new Command({
data: new SlashCommandBuilder()
Expand Down
2 changes: 1 addition & 1 deletion src/commands/chat.ts
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ import {
CompletionStatus,
createChatCompletion,
generateTitle,
} from '@/lib/openai';
} from '@/lib/llm';
import { PrismaClient } from '@prisma/client';

const prisma = new PrismaClient();
Expand Down
2 changes: 1 addition & 1 deletion src/commands/image.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ import {
} from 'discord.js';

import { createErrorEmbed } from '@/lib/embeds';
import { CompletionStatus, createImage } from '@/lib/openai';
import { CompletionStatus, createImage } from '@/lib/llm';

export default new Command({
data: new SlashCommandBuilder()
Expand Down
126 changes: 11 additions & 115 deletions src/commands/sauce.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,119 +6,13 @@ import {
SlashCommandBuilder,
} from 'discord.js';
import { createErrorEmbed } from '@/lib/embeds';
import {
getAnimeDetails,
getAnimeSauce,
TraceMoeResultItem,
} from '@/lib/tracemoe';
import { tempFile } from '@/utils/tempFile';
import fs from 'fs';
import os from 'os';
import path from 'path';
import FormData from 'form-data';
import axios from 'axios';

type TraceMoeResultItem = {
anilist: number;
filename: string;
episode: number | null;
from: number;
to: number;
similarity: number;
video: string;
image: string;
};

type TraceMoeResult = {
frameCount: number;
error: string;
result: TraceMoeResultItem[];
limit: {
limit: number;
remaining: number;
reset: number;
};
};

async function downloadImage(url: string): Promise<string> {
console.log('🚀 ~ downloadImage ~ url:', url);
let response;
try {
response = await axios.get(url, { responseType: 'arraybuffer' });
} catch (error: any) {
if (axios.isAxiosError(error)) {
throw new Error(
`Failed to fetch the image at url: ${url}. Error: ${error.message}`,
);
}
throw error;
}

const buffer = response.data;

const tempFilePath = path.join(os.tmpdir(), 'tempImage.jpg');
try {
fs.writeFileSync(tempFilePath, buffer);
} catch (error: any) {
throw new Error(
`Failed to write the image to file at path: ${tempFilePath}. Error: ${error.message}`,
);
}

return tempFilePath;
}

async function getAnimeSauce(tempFilePath: string): Promise<TraceMoeResult> {
console.log('🚀 ~ getAnimeSauce ~ tempFilePath:', tempFilePath);
const formData = new FormData();
formData.append('image', fs.createReadStream(tempFilePath));
const traceResponse = await axios.post(
'https://api.trace.moe/search?cutBorders',
formData,
{
headers: formData.getHeaders(),
},
);
if (traceResponse.status !== 200)
throw new Error('Failed to get anime sauce');
return {
...traceResponse.data,
limit: {
limit: Number(traceResponse.headers['x-ratelimit-limit']),
remaining: Number(traceResponse.headers['x-ratelimit-remaining']),
reset: Number(traceResponse.headers['x-ratelimit-reset']),
},
};
}

async function getAnimeDetails(anilistId: number) {
console.log('🚀 ~ getAnimeDetails ~ getAnimeDetails:', anilistId);
const anilistResponse = await axios.post(
'https://graphql.anilist.co',
{
query: `
query ($id: Int) {
Media(id: $id, type: ANIME) {
title {
romaji
english
native
}
siteUrl
episodes
genres
averageScore
}
}
`,
variables: {
id: anilistId,
},
},
{
headers: {
'Content-Type': 'application/json',
},
},
);
if (anilistResponse.status !== 200)
throw new Error('Failed to get anime details');
return anilistResponse.data;
}

export default new Command({
data: new SlashCommandBuilder()
Expand Down Expand Up @@ -154,8 +48,10 @@ export default new Command({
await interaction.deferReply({ ephemeral: false });

try {
const tempFilePath = await downloadImage(input.attachment.url);
const traceResult = await getAnimeSauce(tempFilePath);
const file = await tempFile(input.attachment.url);
const traceResult = await getAnimeSauce({
tempFilePath: file.path,
});
await interaction.editReply({
content: 'Searching for anime sauce...',
});
Expand Down Expand Up @@ -225,7 +121,7 @@ export default new Command({
await interaction.followUp({
files: [{ attachment: match.video, name: 'video.mp4' }],
});
fs.unlinkSync(tempFilePath);
fs.unlinkSync(file.path);
// eslint-disable-next-line @typescript-eslint/no-explicit-any
} catch (error: any) {
console.error(error);
Expand Down
3 changes: 3 additions & 0 deletions src/config.ts
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,9 @@ const config = {
system_prompt:
process.env.OPENAI_SYSTEM_PROMPT || 'You are a helpful assistant.',
},
google_genai: {
api_key: process.env.GOOGLE_GENAI_API_KEY,
},
};

// eslint-disable-next-line @typescript-eslint/no-explicit-any
Expand Down
2 changes: 1 addition & 1 deletion src/events/interaction-create.ts
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ import { RateLimiterMemory } from 'rate-limiter-flexible';
import { createActionRow, createRegenerateButton } from '@/lib/buttons';
import { createErrorEmbed } from '@/lib/embeds';
import { buildThreadContext, isApiError } from '@/lib/helpers';
import { CompletionStatus, createChatCompletion } from '@/lib/openai';
import { CompletionStatus, createChatCompletion } from '@/lib/llm';

const rateLimiter = new RateLimiterMemory({ points: 3, duration: 60 });

Expand Down
19 changes: 17 additions & 2 deletions src/events/message-create.ts
Original file line number Diff line number Diff line change
Expand Up @@ -27,9 +27,10 @@ import {
type CompletionResponse,
CompletionStatus,
createChatCompletion,
} from '@/lib/openai';
} from '@/lib/llm';
import { PrismaClient } from '@prisma/client';
import logger from '@/utils/logger';
import { tempFile } from '@/utils/tempFile';

const prisma = new PrismaClient();

Expand Down Expand Up @@ -143,10 +144,24 @@ async function handleDirectMessage(

await channel.sendTyping();

if (message.attachments.size > 0) {
const attachment = message.attachments.first();
if (attachment) {
const file = await tempFile(attachment.url);
message.content = `data:${file.mimeType};base64,${file.base64}`;
}
}

const typingInterval = setInterval(() => {
channel.sendTyping();
}, 5000);

const completion = await createChatCompletion(
buildDirectMessageContext(messages, message.content, client.user.id),
);

clearInterval(typingInterval);

if (completion.status !== CompletionStatus.Ok) {
await handleFailedRequest(
channel,
Expand Down Expand Up @@ -182,7 +197,7 @@ export default new Event({
if (
message.author.id === client.user.id ||
message.type !== MessageType.Default ||
!message.content ||
(!message.content && !message.attachments.size) ||
!isEmpty(message.embeds) ||
!isEmpty(message.mentions.members)
) {
Expand Down
1 change: 1 addition & 0 deletions src/lib/helpers.ts
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ import type OpenAI from 'openai';

import config from '@/config';

// TODO: inject multimodal context metadata here
export function buildContext(
messages: Array<any>,
userMessage: string,
Expand Down
Loading

0 comments on commit 625775e

Please sign in to comment.