Skip to content

Commit

Permalink
default to llama 3.2 (#235)
Browse files Browse the repository at this point in the history
  • Loading branch information
prabirshrestha authored Oct 1, 2024
1 parent 80f61b6 commit 891c56f
Show file tree
Hide file tree
Showing 3 changed files with 6 additions and 6 deletions.
2 changes: 1 addition & 1 deletion examples/llm_ollama.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ use langchain_rust::{language_models::llm::LLM, llm::ollama::client::Ollama};
#[cfg(feature = "ollama")]
#[tokio::main]
async fn main() {
let ollama = Ollama::default().with_model("llama3");
let ollama = Ollama::default().with_model("llama3.2");

let response = ollama.invoke("Hi").await.unwrap();
println!("{}", response);
Expand Down
8 changes: 4 additions & 4 deletions src/llm/ollama/client.rs
Original file line number Diff line number Diff line change
Expand Up @@ -24,8 +24,8 @@ pub struct Ollama {
pub(crate) options: Option<GenerationOptions>,
}

/// [llama3](https://ollama.com/library/llama3) is a 8B parameters, 4.7GB model.
const DEFAULT_MODEL: &str = "llama3";
/// [llama3.2](https://ollama.com/library/llama3.2) is a 3B parameters, 2.0GB model.
const DEFAULT_MODEL: &str = "llama3.2";

impl Ollama {
pub fn new<S: Into<String>>(
Expand Down Expand Up @@ -152,15 +152,15 @@ mod tests {
#[tokio::test]
#[ignore]
async fn test_generate() {
let ollama = Ollama::default().with_model("llama3");
let ollama = Ollama::default().with_model("llama3.2");
let response = ollama.invoke("Hey Macarena, ay").await.unwrap();
println!("{}", response);
}

#[tokio::test]
#[ignore]
async fn test_stream() {
let ollama = Ollama::default().with_model("llama3");
let ollama = Ollama::default().with_model("llama3.2");

let message = Message::new_human_message("Why does water boil at 100 degrees?");
let mut stream = ollama.stream(&vec![message]).await.unwrap();
Expand Down
2 changes: 1 addition & 1 deletion src/llm/ollama/openai.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ const OLLAMA_API_BASE: &str = "http://localhost:11434/v1";
/// ## Example
///
/// ```rs
/// let ollama = OpenAI::new(OllamaConfig::default()).with_model("llama3");
/// let ollama = OpenAI::new(OllamaConfig::default()).with_model("llama3.2");
/// let response = ollama.invoke("Say hello!").await.unwrap();
/// ```
#[derive(Clone, Debug, Deserialize)]
Expand Down

0 comments on commit 891c56f

Please sign in to comment.