Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: migrate cloudflare client to openai-compatible #821

Merged
merged 1 commit into from
Sep 2, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
38 changes: 18 additions & 20 deletions Argcfile.sh
Original file line number Diff line number Diff line change
Expand Up @@ -81,6 +81,7 @@ test-server() {
OPENAI_COMPATIBLE_PLATFORMS=( \
openai,gpt-4o-mini,https://api.openai.com/v1 \
ai21,jamba-1.5-mini,https://api.ai21.com/studio/v1 \
cloudflare,@cf/meta/llama-3.1-8b-instruct, \
deepinfra,meta-llama/Meta-Llama-3.1-8B-Instruct,https://api.deepinfra.com/v1/openai \
deepseek,deepseek-chat,https://api.deepseek.com \
fireworks,accounts/fireworks/models/llama-v3p1-8b-instruct,https://api.fireworks.ai/inference/v1 \
Expand Down Expand Up @@ -111,7 +112,7 @@ chat() {
fi
for platform_config in "${OPENAI_COMPATIBLE_PLATFORMS[@]}"; do
if [[ "$argc_platform" == "${platform_config%%,*}" ]]; then
api_base="${platform_config##*,}"
_retrieve_api_base
break
fi
done
Expand Down Expand Up @@ -141,15 +142,15 @@ chat() {
models() {
for platform_config in "${OPENAI_COMPATIBLE_PLATFORMS[@]}"; do
if [[ "$argc_platform" == "${platform_config%%,*}" ]]; then
api_base="${platform_config##*,}"
_retrieve_api_base
break
fi
done
if [[ -n "$api_base" ]]; then
env_prefix="$(echo "$argc_platform" | tr '[:lower:]' '[:upper:]')"
api_key_env="${env_prefix}_API_KEY"
api_key="${!api_key_env}"
_openai_models
_retrieve_models
else
argc models-$argc_platform
fi
Expand All @@ -173,7 +174,7 @@ chat-openai-compatible() {
# @option --api-base! $$
# @option --api-key! $$
models-openai-compatible() {
_openai_models
_retrieve_models
}

# @cmd Chat with azure-openai api
Expand Down Expand Up @@ -271,19 +272,6 @@ chat-vertexai() {
-d "$(_build_body vertexai "$@")"
}

# @cmd Chat with cloudflare api
# @env CLOUDFLARE_API_KEY!
# @option -m --model=@cf/meta/llama-3-8b-instruct $CLOUDFLARE_MODEL
# @flag -S --no-stream
# @arg text~
chat-cloudflare() {
url="https://api.cloudflare.com/client/v4/accounts/$CLOUDFLARE_ACCOUNT_ID/ai/run/$argc_model"
_wrapper curl -i "$url" \
-X POST \
-H "Authorization: Bearer $CLOUDFLARE_API_KEY" \
-d "$(_build_body cloudflare "$@")"
}

# @cmd Chat with replicate api
# @env REPLICATE_API_KEY!
# @option -m --model=meta/meta-llama-3-8b-instruct $REPLICATE_MODEL
Expand Down Expand Up @@ -336,22 +324,32 @@ chat-ernie() {
-d "$(_build_body ernie "$@")"
}


_argc_before() {
stream="true"
if [[ -n "$argc_no_stream" ]]; then
stream="false"
fi
}

_openai_models() {
_retrieve_models() {
api_base="${api_base:-"$argc_api_base"}"
api_key="${api_key:-"$argc_api_key"}"
_wrapper curl "$api_base/models" \
-H "Authorization: Bearer $api_key" \

}

_retrieve_api_base() {
api_base="${platform_config##*,}"
if [[ -z "$api_base" ]]; then
key="$(echo $argc_platform | tr '[:lower:]' '[:upper:]')_API_BASE"
api_base="${!key}"
if [[ -z "$api_base" ]]; then
_die "Miss api_base for $argc_platform; please set $key"
fi
fi
}

_choice_model() {
aichat --list-models
}
Expand Down Expand Up @@ -436,7 +434,7 @@ _build_body() {
"safetySettings":[{"category":"HARM_CATEGORY_HARASSMENT","threshold":"BLOCK_ONLY_HIGH"},{"category":"HARM_CATEGORY_HATE_SPEECH","threshold":"BLOCK_ONLY_HIGH"},{"category":"HARM_CATEGORY_SEXUALLY_EXPLICIT","threshold":"BLOCK_ONLY_HIGH"},{"category":"HARM_CATEGORY_DANGEROUS_CONTENT","threshold":"BLOCK_ONLY_HIGH"}]
}'
;;
ernie|cloudflare)
ernie)
echo '{
"messages": [
{
Expand Down
6 changes: 3 additions & 3 deletions config.example.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -223,10 +223,10 @@ clients:
region: xxx

# See https://developers.cloudflare.com/workers-ai/
- type: cloudflare
account_id: xxx
- type: openai-compatible
name: cloudflare
api_base: https://api.cloudflare.com/client/v4/accounts/{ACCOUNT_ID}/ai/v1
api_key: xxx
api_base: https://api.cloudflare.com/client/v4 # Optional

# See https://replicate.com/docs
- type: replicate
Expand Down
1 change: 1 addition & 0 deletions models.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -520,6 +520,7 @@

# Links:
# - https://developers.cloudflare.com/workers-ai/models/
# - https://developers.cloudflare.com/workers-ai/configuration/open-ai-compatibility/
- platform: cloudflare
models:
- name: '@cf/meta/llama-3.1-8b-instruct'
Expand Down
181 changes: 0 additions & 181 deletions src/client/cloudflare.rs

This file was deleted.

17 changes: 10 additions & 7 deletions src/client/common.rs
Original file line number Diff line number Diff line change
Expand Up @@ -364,29 +364,32 @@ pub fn create_config(prompts: &[PromptAction], client: &str) -> Result<(String,

pub fn create_openai_compatible_client_config(client: &str) -> Result<Option<(String, Value)>> {
match super::OPENAI_COMPATIBLE_PLATFORMS
.iter()
.into_iter()
.find(|(name, _)| client == *name)
{
None => Ok(None),
Some((name, api_base)) => {
let mut config = json!({
"type": OpenAICompatibleClient::NAME,
"name": name,
"api_base": api_base,
});
let prompts = if ALL_MODELS.iter().any(|v| &v.platform == name) {
vec![("api_key", "API Key:", false, PromptKind::String)]
let mut prompts = vec![];
if api_base.is_empty() {
prompts.push(("api_base", "API Base:", true, PromptKind::String));
} else {
vec![
("api_key", "API Key:", false, PromptKind::String),
config["api_base"] = api_base.into();
}
prompts.push(("api_key", "API Key:", false, PromptKind::String));
if !ALL_MODELS.iter().any(|v| v.platform == name) {
prompts.extend([
("models[].name", "Model Name:", true, PromptKind::String),
(
"models[].max_input_tokens",
"Max Input Tokens:",
false,
PromptKind::Integer,
),
]
]);
};
let mut model = client.to_string();
set_client_config(&prompts, &mut model, &mut config)?;
Expand Down
4 changes: 2 additions & 2 deletions src/client/gemini.rs
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ impl_client_trait!(
gemini_chat_completions,
gemini_chat_completions_streaming
),
(prepare_embeddings, gemini_embeddings),
(prepare_embeddings, embeddings),
(noop_prepare_rerank, noop_rerank),
);

Expand Down Expand Up @@ -95,7 +95,7 @@ fn prepare_embeddings(self_: &GeminiClient, data: EmbeddingsData) -> Result<Requ
Ok(request_data)
}

async fn gemini_embeddings(builder: RequestBuilder, _model: &Model) -> Result<EmbeddingsOutput> {
async fn embeddings(builder: RequestBuilder, _model: &Model) -> Result<EmbeddingsOutput> {
let res = builder.send().await?;
let status = res.status();
let data: Value = res.json().await?;
Expand Down
4 changes: 2 additions & 2 deletions src/client/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -33,13 +33,13 @@ register_client!(
),
(vertexai, "vertexai", VertexAIConfig, VertexAIClient),
(bedrock, "bedrock", BedrockConfig, BedrockClient),
(cloudflare, "cloudflare", CloudflareConfig, CloudflareClient),
(replicate, "replicate", ReplicateConfig, ReplicateClient),
(ernie, "ernie", ErnieConfig, ErnieClient),
);

pub const OPENAI_COMPATIBLE_PLATFORMS: [(&str, &str); 18] = [
pub const OPENAI_COMPATIBLE_PLATFORMS: [(&str, &str); 19] = [
("ai21", "https://api.ai21.com/studio/v1"),
("cloudflare", ""),
("deepinfra", "https://api.deepinfra.com/v1/openai"),
("deepseek", "https://api.deepseek.com"),
("fireworks", "https://api.fireworks.ai/inference/v1"),
Expand Down