Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat(model): add support for EternalAI onchain toolset #205

Merged
merged 20 commits into from
Jan 24, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
20 commits
Select commit Hold shift + click to select a range
431b69e
feat: onchain system prompt
eternal-ai-org Jan 15, 2025
6a8dd62
feat: onchain system prompt
eternal-ai-org Jan 16, 2025
6d20b69
feat: eternal-ai on-chain system prompt
eternal-ai-org Jan 16, 2025
5a70ec0
feat: eternal-ai on-chain system prompt
eternal-ai-org Jan 16, 2025
83c7a02
Merge pull request #1 from eternalai-org/eternalai-agent
eternal-ai-org Jan 16, 2025
2bf4758
feat: eternal-ai on-chain system prompt
eternal-ai-org Jan 17, 2025
eb88bf6
feat: eternal-ai on-chain system prompt
eternal-ai-org Jan 17, 2025
b47aa69
feat: eternal-ai on-chain system prompt
eternal-ai-org Jan 17, 2025
b49c03c
feat: eternal-ai on-chain system prompt
eternal-ai-org Jan 17, 2025
a6a67c3
feat: eternal-ai on-chain system prompt
eternal-ai-org Jan 17, 2025
5ae0b22
feat: eternal-ai on-chain system prompt
eternal-ai-org Jan 17, 2025
dfdbc23
feat: eternal-ai on-chain system prompt
eternal-ai-org Jan 17, 2025
d70b3e1
feat: eternal-ai on-chain system prompt
eternal-ai-org Jan 21, 2025
02a03ac
feat: eternal-ai on-chain system prompt
eternal-ai-org Jan 21, 2025
ba108bb
Merge branch '0xPlaygrounds:main' into main
eternal-ai-org Jan 21, 2025
8d1c01b
Merge remote-tracking branch 'origin/main' into eternalai-agent
eternal-ai-org Jan 21, 2025
6410b4c
feat: eternal-ai on-chain system prompt
eternal-ai-org Jan 21, 2025
f4c61be
Merge branch '0xPlaygrounds:main' into main
eternal-ai-org Jan 21, 2025
4dff9ef
Merge remote-tracking branch 'origin/main' into eternalai-agent
eternal-ai-org Jan 22, 2025
83e4440
feat: eternal-ai on-chain system prompt
eternal-ai-org Jan 23, 2025
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -4,5 +4,5 @@ members = [
"rig-core", "rig-lancedb",
"rig-mongodb", "rig-neo4j",
"rig-qdrant", "rig-core/rig-core-derive",
"rig-sqlite"
"rig-sqlite", "rig-eternalai"
]
2 changes: 1 addition & 1 deletion rig-core/src/completion.rs
Original file line number Diff line number Diff line change
Expand Up @@ -266,7 +266,7 @@ pub struct CompletionRequest {
}

impl CompletionRequest {
pub(crate) fn prompt_with_context(&self) -> String {
pub fn prompt_with_context(&self) -> String {
if !self.documents.is_empty() {
format!(
"<attachments>\n{}</attachments>\n\n{}",
Expand Down
1 change: 0 additions & 1 deletion rig-core/src/providers/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,6 @@
pub mod anthropic;
pub mod cohere;
pub mod deepseek;
pub mod eternalai;
pub mod gemini;
pub mod openai;
pub mod perplexity;
Expand Down
18 changes: 18 additions & 0 deletions rig-eternalai/Cargo.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
[package]
name = "rig-eternalai"
version = "0.1.0"
edition = "2021"

[dependencies]
rig-core = { path = "../rig-core", version = "0.6.1" }
ethers = "2.0.14"
reqwest = { version = "0.11.22", features = ["json"] }
serde = { version = "1.0.193", features = ["derive"] }
serde_json = "1.0.108"
tracing = "0.1.40"
schemars = "0.8.16"

[dev-dependencies]
anyhow = "1.0.75"
tokio = { version = "1.34.0", features = ["full"] }
tracing-subscriber = "0.3.18"
Original file line number Diff line number Diff line change
@@ -1,9 +1,14 @@
use rig::agent::AgentBuilder;
use rig::providers::eternalai::{CompletionModel, NOUS_RESEARCH_HERMES_3_LLAMA_3_1_70B_FP8};
use rig::{completion::Prompt, providers};
use rig::completion::Prompt;
use rig_eternalai::providers::eternalai::{
Client, CompletionModel, NOUS_RESEARCH_HERMES_3_LLAMA_3_1_70B_FP8,
};

#[tokio::main]
async fn main() -> Result<(), anyhow::Error> {
tracing_subscriber::fmt()
.with_max_level(tracing::Level::DEBUG)
.init();
println!("Running basic agent with eternalai");
basic_eternalai().await?;

Expand All @@ -14,8 +19,8 @@ async fn main() -> Result<(), anyhow::Error> {
Ok(())
}

fn client() -> providers::eternalai::Client {
providers::eternalai::Client::from_env()
fn client() -> Client {
Client::from_env()
}

fn partial_agent_eternalai() -> AgentBuilder<CompletionModel> {
Expand All @@ -41,7 +46,7 @@ async fn basic_eternalai() -> Result<(), anyhow::Error> {

async fn context_eternalai() -> Result<(), anyhow::Error> {
let model = client().completion_model(
providers::eternalai::NOUS_RESEARCH_HERMES_3_LLAMA_3_1_70B_FP8,
NOUS_RESEARCH_HERMES_3_LLAMA_3_1_70B_FP8,
Option::from("45762"),
// None,
);
Expand Down
72 changes: 72 additions & 0 deletions rig-eternalai/src/eternalai_system_prompt_manager_toolset.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,72 @@
use ethers::prelude::*;
use reqwest::get;
use std::ffi::c_uint;
use std::sync::Arc;

const IPFS: &str = "ipfs://";
const LIGHTHOUSE_IPFS: &str = "https://gateway.lighthouse.storage/ipfs/";
const GCS_ETERNAL_AI_BASE_URL: &str = "https://cdn.eternalai.org/upload/";

pub async fn fetch_system_prompt_raw_or_ipfs(content: &str) -> Option<String> {
if content.contains(IPFS) {
let light_house = content.replace(IPFS, LIGHTHOUSE_IPFS);
tracing::debug!("light_house : {}", light_house);
let mut response = get(light_house).await.unwrap();
if response.status().is_success() {
let body = response.text().await.unwrap();
tracing::debug!("light_house body: {}", body);
return Some(body);
} else {
let gcs = content.replace(IPFS, GCS_ETERNAL_AI_BASE_URL);
tracing::debug!("gcs: {}", gcs);
response = get(gcs).await.unwrap();
if response.status().is_success() {
let body = response.text().await.unwrap();
tracing::debug!("gcs body: {}", body);
return Some(body);
} else {
return None;
}
}
}
Some(content.to_string())
}

pub async fn get_on_chain_system_prompt(
rpc_url: &str,
contract_addr: &str,
agent_id: c_uint,
) -> Result<Option<String>, String> {
abigen!(
SystemPromptManagementContract,
r#"
[{"inputs": [{"internalType": "uint256", "name": "_agentId", "type": "uint256"}], "name": "getAgentSystemPrompt", "outputs": [{"internalType": "bytes[]", "name": "","type": "bytes[]"}], "stateMutability": "view", "type": "function"}]
"#
);
let provider =
Provider::<Http>::try_from(rpc_url).map_err(|e| format!("Failed to parse url: {}", e))?;
let client = Arc::new(provider);
let contract_address: Address = contract_addr
.parse()
.map_err(|e| format!("invalid contract address: {}", e))?;
let contract = SystemPromptManagementContract::new(contract_address, client);
let system_prompts: Vec<Bytes> = contract
.get_agent_system_prompt(U256::from(agent_id))
.call()
.await
.map_err(|e| format!("invalid agent system prompt: {}", e))?;

let decoded_strings: Vec<String> = system_prompts
.iter()
.map(|bytes| {
String::from_utf8(bytes.to_vec()).unwrap_or_else(|_| "[Invalid UTF-8]".to_string())
})
.collect();

if !decoded_strings.is_empty() {
let prompt = decoded_strings[0].clone();
tracing::debug!("system prompt : {}", prompt);
return Ok(fetch_system_prompt_raw_or_ipfs(&prompt).await);
}
Ok(None)
}
19 changes: 19 additions & 0 deletions rig-eternalai/src/json_utils.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
pub fn merge(a: serde_json::Value, b: serde_json::Value) -> serde_json::Value {
match (a, b) {
(serde_json::Value::Object(mut a_map), serde_json::Value::Object(b_map)) => {
b_map.into_iter().for_each(|(key, value)| {
a_map.insert(key, value);
});
serde_json::Value::Object(a_map)
}
(a, _) => a,
}
}

pub fn merge_inplace(a: &mut serde_json::Value, b: serde_json::Value) {
if let (serde_json::Value::Object(a_map), serde_json::Value::Object(b_map)) = (a, b) {
b_map.into_iter().for_each(|(key, value)| {
a_map.insert(key, value);
});
}
}
5 changes: 5 additions & 0 deletions rig-eternalai/src/lib.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
pub mod eternalai_system_prompt_manager_toolset;
pub mod providers;

pub mod json_utils;
// pub mod completion;
Original file line number Diff line number Diff line change
Expand Up @@ -2,30 +2,30 @@
//!
//! # Example
//! ```
//! use rig::providers::eternalai;
//! use rig_eternalai::providers::eternalai;
//!
//! let client = eternalai::Client::new("YOUR_API_KEY");
//!
//! let gpt4o = client.completion_model(eternalai::NOUS_RESEARCH_HERMES_3_LLAMA_3_1_70B_FP8);
//! ```

use crate::{
agent::AgentBuilder,
completion::{self, CompletionError, CompletionRequest},
embeddings::{self, EmbeddingError, EmbeddingsBuilder},
extractor::ExtractorBuilder,
json_utils, Embed,
};
use crate::eternalai_system_prompt_manager_toolset;
use crate::json_utils;
use rig::agent::AgentBuilder;
use rig::completion::{CompletionError, CompletionRequest};
use rig::embeddings::{EmbeddingError, EmbeddingsBuilder};
use rig::extractor::ExtractorBuilder;
use rig::{completion, embeddings, Embed};
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use serde_json::{json, Value};
use std::ffi::c_uint;
use std::time::Duration;

// ================================================================
// Main EternalAI Client
// ================================================================
const ETERNALAI_API_BASE_URL: &str = "https://api.eternalai.org/v1";

#[derive(Clone)]
pub struct Client {
base_url: String,
Expand Down Expand Up @@ -77,7 +77,7 @@ impl Client {
///
/// # Example
/// ```
/// use rig::providers::eternalai::{Client, self};
/// use rig_eternalai::providers::eternalai::{Client, self};
///
/// // Initialize the EternalAI client
/// let eternalai = Client::new("your-open-ai-api-key");
Expand All @@ -97,7 +97,7 @@ impl Client {
///
/// # Example
/// ```
/// use rig::providers::eternalai::{Client, self};
/// use rig_eternalai::providers::eternalai::{Client, self};
///
/// // Initialize the EternalAI client
/// let eternalai = Client::new("your-open-ai-api-key");
Expand All @@ -112,7 +112,7 @@ impl Client {
///
/// # Example
/// ```
/// use rig::providers::eternalai::{Client, self};
/// use rig_eternalai::providers::eternalai::{Client, self};
///
/// // Initialize the EternalAI client
/// let eternalai = Client::new("your-open-ai-api-key");
Expand All @@ -132,7 +132,7 @@ impl Client {
///
/// # Example
/// ```
/// use rig::providers::eternalai::{Client, self};
/// use rig_eternalai::providers::eternalai::{Client, self};
///
/// // Initialize the EternalAI client
/// let eternalai = Client::new("your-open-ai-api-key");
Expand All @@ -147,7 +147,7 @@ impl Client {
///
/// # Example
/// ```
/// use rig::providers::eternalai::{Client, self};
/// use rig_eternalai::providers::eternalai::{Client, self};
///
/// // Initialize the Eternal client
/// let eternalai = Client::new("your-open-ai-api-key");
Expand Down Expand Up @@ -471,6 +471,47 @@ impl completion::CompletionModel for CompletionModel {
vec![]
};

tracing::info!("Try to get on-chain system prompt");
let eternal_ai_rpc = std::env::var("ETERNALAI_RPC_URL").unwrap_or_else(|_| "".to_string());
let eternal_ai_contract =
std::env::var("ETERNALAI_AGENT_CONTRACT_ADDRESS").unwrap_or_else(|_| "".to_string());
let eternal_ai_agent_id =
std::env::var("ETERNALAI_AGENT_ID").unwrap_or_else(|_| "".to_string());
if !eternal_ai_rpc.is_empty()
&& !eternal_ai_contract.is_empty()
&& !eternal_ai_agent_id.is_empty()
{
tracing::info!(
"get on-chain system prompt with {}, {}, {}",
eternal_ai_rpc,
eternal_ai_contract,
eternal_ai_agent_id
);
let c_value: c_uint = eternal_ai_agent_id.parse::<u32>().unwrap_or(0);
let prompt = match eternalai_system_prompt_manager_toolset::get_on_chain_system_prompt(
&eternal_ai_rpc,
&eternal_ai_contract,
c_value,
)
.await
{
Ok(value) => value,
Err(e) => return Err(CompletionError::ProviderError(e)),
};
match prompt {
None => {
tracing::info!("on-chain sytem prompt is none")
}
Some(value) => {
let temp = completion::Message {
role: "system".into(),
content: value,
};
full_history.push(temp);
}
}
}

// Extend existing chat history
full_history.append(&mut completion_request.chat_history);

Expand Down Expand Up @@ -506,6 +547,8 @@ impl completion::CompletionModel for CompletionModel {
})
};

tracing::debug!("request: {:?}", request.to_string());

let response = self
.client
.post("/chat/completions")
Expand All @@ -529,10 +572,10 @@ impl completion::CompletionModel for CompletionModel {
match &response.onchain_data {
Some(data) => {
let onchain_data = serde_json::to_string_pretty(data)?;
println!("onchain_data: {}", onchain_data);
tracing::info!("onchain_data: {}", onchain_data);
}
None => {
println!("onchain_data: None");
tracing::info!("onchain_data: None");
}
}
response.try_into()
Expand Down
1 change: 1 addition & 0 deletions rig-eternalai/src/providers/mod.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
pub mod eternalai;
Loading