Skip to content

Commit

Permalink
markdown support
Browse files Browse the repository at this point in the history
  • Loading branch information
DuckyBlender committed Oct 17, 2024
1 parent 8c682a0 commit 3a17d02
Show file tree
Hide file tree
Showing 3 changed files with 122 additions and 51 deletions.
22 changes: 14 additions & 8 deletions src/apis/openai.rs
Original file line number Diff line number Diff line change
Expand Up @@ -76,12 +76,18 @@ impl OpenAIClient {
}
}

fn get_system_prompt(model: &BotCommand) -> Option<&'static str> {
match model {
BotCommand::Caveman => Some("You are a caveman. Speak like a caveman would. All caps, simple words, grammar mistakes etc. Your name is Grog."),
BotCommand::Llama | BotCommand::Lobotomy | BotCommand::Llama405 => Some("Be concise and precise. Don't be verbose. Answer in the user's language."),
fn get_system_prompt(model: &BotCommand) -> String {
let markdown_explanation =
"You can use markdown to format your text: *bold*, _italic_, __underline__, `code`. Use THIS FORMAT SPECIFICALLY, not any other markdown format.";

let system_prompt = match model {
BotCommand::Caveman => "You are a caveman. Speak like a caveman would. All caps, simple words, grammar mistakes etc. Your name is Grog.",
BotCommand::Llama | BotCommand::Lobotomy | BotCommand::Llama405 => "Be concise and precise. Don't be verbose. Answer in the user's language.",
BotCommand::Help | BotCommand::Start | BotCommand::Flux => unreachable!(),
}
};

let system_prompt = format!("{system_prompt} {markdown_explanation}");
system_prompt
}

fn get_temperature(model: &BotCommand) -> f64 {
Expand Down Expand Up @@ -124,11 +130,11 @@ impl OpenAIClient {

let mut messages = vec![];

// Add system message if provided
if let Some(system) = system_prompt {
// Add system prompt
if !system_prompt.is_empty() {
messages.push(json!({
"role": "system",
"content": system
"content": system_prompt
}));
}

Expand Down
57 changes: 16 additions & 41 deletions src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,6 @@ use base64::engine::Engine as _;
use lambda_http::{run, service_fn, Error};

use std::env;
use teloxide::payloads::SendMessageSetters;
use teloxide::prelude::*;
use teloxide::types::{ChatAction, InputFile, Message, ReplyParameters, UpdateKind};
use teloxide::utils::command::BotCommands;
Expand All @@ -19,7 +18,9 @@ use tracing_subscriber::EnvFilter;
mod apis;

mod utils;
use utils::{download_and_encode_image, find_prompt, get_image_from_message, parse_webhook};
use utils::{
download_and_encode_image, find_prompt, get_image_from_message, parse_webhook, safe_send,
};

#[derive(BotCommands, Clone, Debug, PartialEq)]
#[command(rename_rule = "lowercase", description = "Models from OpenRouter")]
Expand Down Expand Up @@ -110,7 +111,7 @@ async fn handler(
|| message.chat.id == ChatId(-1001641972650))
{
let random: f64 = rand::random();
debug!("Random number: {}", random);
// debug!("Random number: {}", random);
if random < 0.001 {
// 0.1% chance of triggering
// this has a bug, if the message starts with a command, the bot will respond with an error
Expand All @@ -136,9 +137,8 @@ async fn handle_command(

match command {
BotCommand::Help | BotCommand::Start => {
bot.send_message(message.chat.id, BotCommand::descriptions().to_string())
.await
.unwrap();
let help_text = BotCommand::descriptions().to_string();
safe_send(bot, message.chat.id, message.id, &help_text).await;
Ok(lambda_http::Response::builder()
.status(200)
.body(String::new())
Expand All @@ -149,13 +149,7 @@ async fn handle_command(
// Just the prompt, no image
let Some(msg_text) = find_prompt(message).await else {
warn!("No prompt found in the message or reply message");
bot.send_message(
message.chat.id,
"Please provide a prompt. It can be in the message or a reply to a message.",
)
.reply_parameters(ReplyParameters::new(message.id))
.await
.unwrap();
safe_send(bot, message.chat.id, message.id, "Please provide a prompt.").await;

return Ok(lambda_http::Response::builder()
.status(200)
Expand Down Expand Up @@ -183,10 +177,7 @@ async fn handle_command(
let res = client.submit_request(request).await;
if let Err(e) = res {
error!("Failed to submit request: {:?}", e);
bot.send_message(message.chat.id, format!("error: {e:?}"))
.reply_parameters(ReplyParameters::new(message.id))
.await
.unwrap();
safe_send(bot, message.chat.id, message.id, &format!("error: {e:?}")).await;
return Ok(lambda_http::Response::builder()
.status(200)
.body(String::new())
Expand Down Expand Up @@ -238,13 +229,8 @@ async fn handle_command(
String::new()
} else {
warn!("No prompt found in the message or reply message");
bot.send_message(
message.chat.id,
"Please provide a prompt. It can be in the message or a reply to a message.",
)
.reply_parameters(ReplyParameters::new(message.id))
.await
.unwrap();
safe_send(bot, message.chat.id, message.id, "Please provide a prompt.")
.await;

return Ok(lambda_http::Response::builder()
.status(200)
Expand Down Expand Up @@ -272,10 +258,8 @@ async fn handle_command(

// Catch error
if let Err(e) = res {
bot.send_message(message.chat.id, format!("error: {e:?}"))
.reply_parameters(ReplyParameters::new(message.id))
.await
.unwrap();
error!("Failed to submit request: {:?}", e);
safe_send(bot, message.chat.id, message.id, &format!("error: {e:?}")).await;

return Ok(lambda_http::Response::builder()
.status(200)
Expand All @@ -288,25 +272,16 @@ async fn handle_command(
// Check if empty response
if response_text.is_empty() {
warn!("Empty response from API");
bot.send_message(message.chat.id, "<no text>")
.reply_parameters(ReplyParameters::new(message.id))
.await
.unwrap();
safe_send(bot, message.chat.id, message.id, "<no text>").await;

return Ok(lambda_http::Response::builder()
.status(200)
.body(String::new())
.unwrap());
}

// Send the response
let res = bot
.send_message(message.chat.id, &response_text)
.reply_parameters(ReplyParameters::new(message.id))
.await;

if let Err(e) = res {
error!("Failed to send message: {:?}", e);
}
// Safe send the response
safe_send(bot, message.chat.id, message.id, &response_text).await;

Ok(lambda_http::Response::builder()
.status(200)
Expand Down
94 changes: 92 additions & 2 deletions src/utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,9 @@ use base64::{engine::general_purpose, Engine as _};
use teloxide::{
net::Download,
prelude::*,
types::{PhotoSize, Sticker},
types::{MessageId, ParseMode, PhotoSize, ReplyParameters, Sticker},
};
use tracing::{debug, error, warn};
use tracing::{debug, error, info, warn};

pub enum Media {
Photo(PhotoSize),
Expand Down Expand Up @@ -114,6 +114,96 @@ pub fn parse_webhook(
Ok(body_json)
}

pub fn split_string(input: &str, max_length: usize) -> Vec<String> {
let mut result = Vec::new();
let mut current_chunk = String::new();
let mut current_length = 0;

for word in input.split_whitespace() {
if current_length + word.len() + 1 > max_length && !current_chunk.is_empty() {
result.push(current_chunk);
current_chunk = String::new();
current_length = 0;
}

if current_length > 0 {
current_chunk.push(' ');
current_length += 1;
}

current_chunk.push_str(word);
current_length += word.len();
}

if !current_chunk.is_empty() {
result.push(current_chunk);
}

result
}

pub fn escape_markdown(text: &str) -> String {
let mut escaped_text = String::new();
for c in text.chars() {
match c {
'[' | ']' | '(' | ')' | '~' | '>' | '#' | '+' | '-' | '=' | '|' | '{' | '}' | '.'
| '!' => {
escaped_text.push('\\');
escaped_text.push(c);
}
_ => escaped_text.push(c),
}
}

escaped_text
}

// This function sends a message in Markdown format if it's less than 4096 characters. If it's longer, it splits the message into chunks of 4096 characters and sends them separately.
pub async fn safe_send(bot: Bot, chat_id: ChatId, reply_to_msg_id: MessageId, text: &str) {
// Try sending the message as Markdown if it's less than 4096 characters
if text.len() <= 4096 {
let escaped_text = escape_markdown(text);
let result = bot
.send_message(chat_id, escaped_text)
.reply_parameters(ReplyParameters::new(reply_to_msg_id))
.parse_mode(ParseMode::MarkdownV2)
.send()
.await;

// If sending as Markdown succeeds, return
match result {
Ok(_) => return,
Err(err) => {
warn!(
"Failed to send as Markdown: {:?}, trying as plain text...",
err
);
}
}
}

// If sending as Markdown fails or the text is too long, log a warning and try sending as plain text. We can now split the string.
let split_text = split_string(text, 4096);
if split_text.len() > 1 {
info!(
"Splitting the message into {} part(s) since it's too long",
split_text.len()
);
}

for text in split_text {
let res = bot
.send_message(chat_id, text)
.reply_parameters(ReplyParameters::new(reply_to_msg_id))
.send()
.await;

if let Err(err) = res {
error!("Failed to send message: {:?}", err);
}
}
}

#[cfg(test)]
mod tests {
use super::*;
Expand Down

0 comments on commit 3a17d02

Please sign in to comment.