Skip to content

Commit

Permalink
removed fal.ai, added together.ai, free /flux command & upgraded llam…
Browse files Browse the repository at this point in the history
…a model to 70b, maybe ill do something with the free 405b too
  • Loading branch information
DuckyBlender committed Oct 5, 2024
1 parent 3181c93 commit df281cc
Show file tree
Hide file tree
Showing 7 changed files with 120 additions and 214 deletions.
2 changes: 1 addition & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ edition = "2021"
[dependencies]
lambda_http = "0.13"
lambda_runtime = "0.13"
reqwest = { version = "0.12", features = [
reqwest = { version = "0.12.8", features = [
"multipart",
"json",
"native-tls-vendored",
Expand Down
3 changes: 2 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -28,8 +28,9 @@ sussy_ducky_bot is a serverless telegram bot written in Rust that interacts with

- `/help` & `/start` - Displays help text.
- `/caveman` - Uses Llama3.1 8b model from OpenRouter with a custom system prompt.
- `/llama` - Uses Llama 3.1 8b or 3.2 12b vision model from OpenRouter.
- `/llama` - Uses Llama 3.1 70b or 3.2 12b vision model from OpenRouter.
- `/lobotomy` - Uses Llama 3.2 1b model from OpenRouter (for fun)
- `/flux` - Uses the free flux[schnell] model from together.ai

## Running the Bot

Expand Down
100 changes: 0 additions & 100 deletions src/apis/fal.rs

This file was deleted.

4 changes: 2 additions & 2 deletions src/apis/mod.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
mod fal;
mod openai;
mod together;

pub use fal::*;
pub use openai::*;
pub use together::*;
7 changes: 3 additions & 4 deletions src/apis/openai.rs
Original file line number Diff line number Diff line change
Expand Up @@ -43,13 +43,13 @@ impl OpenAIClient {
)
} else {
(
"meta-llama/llama-3.1-8b-instruct:free".to_string(),
"meta-llama/llama-3.1-70b-instruct:free".to_string(),
Providers::OpenRouter,
)
}
}
BotCommand::Lobotomy => (
"meta-llama/llama-3.2-1b-instruct".to_string(),
"meta-llama/llama-3.2-1b-instruct:free".to_string(),
Providers::OpenRouter,
),
BotCommand::Caveman => (
Expand All @@ -75,8 +75,7 @@ impl OpenAIClient {
fn get_system_prompt(model: &BotCommand) -> Option<&'static str> {
match model {
BotCommand::Caveman => Some("You are a caveman. Speak like a caveman would. All caps, simple words, grammar mistakes etc."),
BotCommand::Llama => Some("Be concise and precise. Don't be verbose. Answer in the user's language."),
BotCommand::Lobotomy => None,
BotCommand::Llama | BotCommand::Lobotomy => Some("Be concise and precise. Don't be verbose. Answer in the user's language."),
BotCommand::Help | BotCommand::Start | BotCommand::Flux => unreachable!(),
}
}
Expand Down
69 changes: 69 additions & 0 deletions src/apis/together.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,69 @@
use reqwest::Client;
use serde::{Deserialize, Serialize};
use std::env;

#[derive(Serialize, Deserialize, Debug)]
pub struct ImageRequest {
pub model: String,
pub prompt: String,
pub width: u32,
pub height: u32,
pub steps: u32,
pub n: u32,
pub response_format: String,
}

pub struct TogetherClient {
client: Client,
}

#[derive(Deserialize, Debug)]
pub struct ImageResponse {
// pub id: String,
// pub model: String,
// pub object: String,
pub data: Vec<ImageData>,
}

#[derive(Deserialize, Debug)]
pub struct ImageData {
pub timings: Timings,
// pub index: u32,
pub b64_json: String,
}

#[derive(Deserialize, Debug)]
pub struct Timings {
pub inference: f64,
}

impl TogetherClient {
pub fn new() -> Self {
Self {
client: Client::new(),
}
}

pub async fn submit_request(
&self,
prompt: ImageRequest,
) -> Result<ImageResponse, reqwest::Error> {
// base64 image

let response = self
.client
.post("https://api.together.xyz/v1/images/generations")
.header(
"Authorization",
format!("Bearer {}", env::var("TOGETHER_KEY").unwrap()),
)
.header("Content-Type", "application/json")
.json(&prompt)
.send()
.await?;

let response = response.json::<ImageResponse>().await?;

Ok(response)
}
}
Loading

0 comments on commit df281cc

Please sign in to comment.