Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

default to llama 3.2 for ollama #235

Merged
merged 1 commit into from
Oct 1, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion examples/llm_ollama.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ use langchain_rust::{language_models::llm::LLM, llm::ollama::client::Ollama};
#[cfg(feature = "ollama")]
#[tokio::main]
async fn main() {
let ollama = Ollama::default().with_model("llama3");
let ollama = Ollama::default().with_model("llama3.2");

let response = ollama.invoke("Hi").await.unwrap();
println!("{}", response);
Expand Down
8 changes: 4 additions & 4 deletions src/llm/ollama/client.rs
Original file line number Diff line number Diff line change
Expand Up @@ -24,8 +24,8 @@ pub struct Ollama {
pub(crate) options: Option<GenerationOptions>,
}

/// [llama3](https://ollama.com/library/llama3) is a 8B parameters, 4.7GB model.
const DEFAULT_MODEL: &str = "llama3";
/// [llama3.2](https://ollama.com/library/llama3.2) is a 3B parameters, 2.0GB model.
const DEFAULT_MODEL: &str = "llama3.2";

impl Ollama {
pub fn new<S: Into<String>>(
Expand Down Expand Up @@ -152,15 +152,15 @@ mod tests {
#[tokio::test]
#[ignore]
async fn test_generate() {
let ollama = Ollama::default().with_model("llama3");
let ollama = Ollama::default().with_model("llama3.2");
let response = ollama.invoke("Hey Macarena, ay").await.unwrap();
println!("{}", response);
}

#[tokio::test]
#[ignore]
async fn test_stream() {
let ollama = Ollama::default().with_model("llama3");
let ollama = Ollama::default().with_model("llama3.2");

let message = Message::new_human_message("Why does water boil at 100 degrees?");
let mut stream = ollama.stream(&vec![message]).await.unwrap();
Expand Down
2 changes: 1 addition & 1 deletion src/llm/ollama/openai.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ const OLLAMA_API_BASE: &str = "http://localhost:11434/v1";
/// ## Example
///
/// ```rs
/// let ollama = OpenAI::new(OllamaConfig::default()).with_model("llama3");
/// let ollama = OpenAI::new(OllamaConfig::default()).with_model("llama3.2");
/// let response = ollama.invoke("Say hello!").await.unwrap();
/// ```
#[derive(Clone, Debug, Deserialize)]
Expand Down
Loading