From ab7f38605f011f82fd3d7bcdef3dab9d07a03a30 Mon Sep 17 00:00:00 2001 From: fred-bf <157469842+fred-bf@users.noreply.github.com> Date: Wed, 7 Feb 2024 13:17:11 +0800 Subject: [PATCH] fix: fix gemini issue when using app (#4013) * chore: update path * fix: fix google auth logic * fix: not using header authorization for google api * chore: revert to allow stream --- app/client/api.ts | 29 ++++++++++++++++------------- app/client/platforms/google.ts | 34 ++++++++++++++++++++-------------- app/client/platforms/openai.ts | 7 ++++++- app/components/exporter.tsx | 2 +- app/components/home.tsx | 2 +- app/store/access.ts | 4 +++- app/store/chat.ts | 6 +++--- app/utils/cors.ts | 4 ++-- next.config.mjs | 11 ++++++++++- package.json | 4 ++-- scripts/setup.sh | 2 +- src-tauri/Cargo.lock | 22 +++++++++++----------- src-tauri/Cargo.toml | 26 ++++++++++++++++++++++---- src-tauri/tauri.conf.json | 2 +- 14 files changed, 99 insertions(+), 56 deletions(-) diff --git a/app/client/api.ts b/app/client/api.ts index d82a71a3a93..4da2a52c3f4 100644 --- a/app/client/api.ts +++ b/app/client/api.ts @@ -146,10 +146,10 @@ export function getHeaders() { const headers: Record = { "Content-Type": "application/json", "x-requested-with": "XMLHttpRequest", - "Accept": "application/json", + Accept: "application/json", }; const modelConfig = useChatStore.getState().currentSession().mask.modelConfig; - const isGoogle = modelConfig.model === "gemini-pro"; + const isGoogle = modelConfig.model.startsWith("gemini"); const isAzure = accessStore.provider === ServiceProvider.Azure; const authHeader = isAzure ? "api-key" : "Authorization"; const apiKey = isGoogle @@ -157,20 +157,23 @@ export function getHeaders() { : isAzure ? accessStore.azureApiKey : accessStore.openaiApiKey; - + const clientConfig = getClientConfig(); const makeBearer = (s: string) => `${isAzure ? "" : "Bearer "}${s.trim()}`; const validString = (x: string) => x && x.length > 0; - // use user's api key first - if (validString(apiKey)) { - headers[authHeader] = makeBearer(apiKey); - } else if ( - accessStore.enabledAccessControl() && - validString(accessStore.accessCode) - ) { - headers[authHeader] = makeBearer( - ACCESS_CODE_PREFIX + accessStore.accessCode, - ); + // when using google api in app, not set auth header + if (!(isGoogle && clientConfig?.isApp)) { + // use user's api key first + if (validString(apiKey)) { + headers[authHeader] = makeBearer(apiKey); + } else if ( + accessStore.enabledAccessControl() && + validString(accessStore.accessCode) + ) { + headers[authHeader] = makeBearer( + ACCESS_CODE_PREFIX + accessStore.accessCode, + ); + } } return headers; diff --git a/app/client/platforms/google.ts b/app/client/platforms/google.ts index 1141058f86b..70fdcfb2f23 100644 --- a/app/client/platforms/google.ts +++ b/app/client/platforms/google.ts @@ -7,11 +7,6 @@ import { DEFAULT_API_HOST, DEFAULT_CORS_HOST, GEMINI_BASE_URL, Google, REQUEST_TIMEOUT_MS } from "@/app/constant"; import { ChatOptions, getHeaders, LLMApi, LLMModel, LLMUsage } from "../api"; import { useAccessStore, useAppConfig, useChatStore } from "@/app/store"; -import { - EventStreamContentType, - fetchEventSource, -} from "@fortaine/fetch-event-source"; -import { prettyObject } from "@/app/utils/format"; import { getClientConfig } from "@/app/config/client"; import Locale from "../../locales"; import { getServerSideConfig } from "@/app/config/server"; @@ -106,10 +101,10 @@ export class GeminiProApi implements LLMApi { * @returns {Promise} A promise that resolves when the chat request is complete. */ async chat(options: ChatOptions): Promise { - const provider = getProviderFromState(); - const cfgspeed_animation = useAppConfig.getState().speed_animation; // Get the animation speed from the app config - const apiClient = this; - const messages: Message[] = options.messages.map((v) => ({ + const provider = getProviderFromState(); + const cfgspeed_animation = useAppConfig.getState().speed_animation; // Get the animation speed from the app config + // const apiClient = this; + const messages = options.messages.map((v) => ({ role: v.role.replace("assistant", "model").replace("system", "user"), parts: [{ text: v.content }], })); @@ -173,20 +168,31 @@ export class GeminiProApi implements LLMApi { ], }; console.log(`[Request] [${provider}] payload: `, requestPayload); + const isApp = !!getClientConfig()?.isApp; const shouldStream = !!options.config.stream; const controller = new AbortController(); options.onController?.(controller); - + const accessStore = useAccessStore.getState(); try { - const chatPath = this.path(Google.ChatPath); + let chatPath = this.path(Google.ChatPath); + + // let baseUrl = accessStore.googleUrl; + + chatPath = isApp + ? DEFAULT_API_HOST + + "/api/proxy/google/" + + Google.ChatPath + + `?key=${accessStore.googleApiKey}` + : chatPath; + const chatPayload = { method: "POST", body: JSON.stringify(requestPayload), signal: controller.signal, headers: getHeaders(), }; - + console.log("[Request] google chatPath: ", chatPath, isApp); // make a fetch request const requestTimeoutId = setTimeout( () => controller.abort(), @@ -229,6 +235,8 @@ export class GeminiProApi implements LLMApi { // start animaion animateResponseText(); + + console.log("[Proxy Endpoint] ", streamChatPath); fetch(streamChatPath, chatPayload) .then((response) => { const reader = response?.body?.getReader(); @@ -284,9 +292,7 @@ export class GeminiProApi implements LLMApi { } else { const res = await fetch(chatPath, chatPayload); clearTimeout(requestTimeoutId); - const resJson = await res.json(); - if (resJson?.promptFeedback?.blockReason) { // being blocked options.onError?.( diff --git a/app/client/platforms/openai.ts b/app/client/platforms/openai.ts index 2b732c712ef..9c93cd95d63 100644 --- a/app/client/platforms/openai.ts +++ b/app/client/platforms/openai.ts @@ -1,3 +1,4 @@ +"use client"; import { ApiPath, DEFAULT_API_HOST, @@ -51,7 +52,9 @@ export class ChatGPTApi implements LLMApi { if (baseUrl.length === 0) { const isApp = !!getClientConfig()?.isApp; - baseUrl = isApp ? DEFAULT_API_HOST : ApiPath.OpenAI; + baseUrl = isApp + ? DEFAULT_API_HOST + "/proxy" + ApiPath.OpenAI + : ApiPath.OpenAI; } if (baseUrl.endsWith("/")) { @@ -65,6 +68,8 @@ export class ChatGPTApi implements LLMApi { path = makeAzurePath(path, accessStore.azureApiVersion); } + console.log("[Proxy Endpoint] ", baseUrl, path); + return [baseUrl, path].join("/"); } diff --git a/app/components/exporter.tsx b/app/components/exporter.tsx index bdb45ef4a6e..cbead2b82f4 100644 --- a/app/components/exporter.tsx +++ b/app/components/exporter.tsx @@ -336,7 +336,7 @@ export function PreviewActions(props: { setShouldExport(false); var api: ClientApi; - if (config.modelConfig.model === "gemini-pro") { + if (config.modelConfig.model.startsWith("gemini")) { api = new ClientApi(ModelProvider.GeminiPro); } else { api = new ClientApi(ModelProvider.GPT); diff --git a/app/components/home.tsx b/app/components/home.tsx index 11a82fbcb3d..696af91b0c3 100644 --- a/app/components/home.tsx +++ b/app/components/home.tsx @@ -181,7 +181,7 @@ export function useLoadData() { const config = useAppConfig(); var api: ClientApi; - if (config.modelConfig.model === "gemini-pro") { + if (config.modelConfig.model.startsWith("gemini")) { api = new ClientApi(ModelProvider.GeminiPro); } else { api = new ClientApi(ModelProvider.GPT); diff --git a/app/store/access.ts b/app/store/access.ts index 9e8024a6aa8..6884e71e3bf 100644 --- a/app/store/access.ts +++ b/app/store/access.ts @@ -12,7 +12,9 @@ import { ensure } from "../utils/clone"; let fetchState = 0; // 0 not fetch, 1 fetching, 2 done const DEFAULT_OPENAI_URL = - getClientConfig()?.buildMode === "export" ? DEFAULT_API_HOST : ApiPath.OpenAI; + getClientConfig()?.buildMode === "export" + ? DEFAULT_API_HOST + "/api/proxy/openai" + : ApiPath.OpenAI; const DEFAULT_ACCESS_STATE = { accessCode: "", diff --git a/app/store/chat.ts b/app/store/chat.ts index c8d630dfb2b..10a2b978d9a 100644 --- a/app/store/chat.ts +++ b/app/store/chat.ts @@ -314,8 +314,8 @@ export const useChatStore = createPersistStore( // Changed 'var' to 'let' since 'api' is reassigned conditionally // Note: keep type safety by using 'let' instead of 'var', this not a javascript lmao - let api: ClientApi; - if (modelConfig.model === "gemini-pro") { + let api: ClientApi; // stupid don't using var + if (modelConfig.model.startsWith("gemini")) { api = new ClientApi(ModelProvider.GeminiPro); } else { api = new ClientApi(ModelProvider.GPT); @@ -505,7 +505,7 @@ export const useChatStore = createPersistStore( // Changed 'var' to 'let' since 'api' is reassigned conditionally // Note: keep type safety by using 'let' instead of 'var', this not a javascript lmao let api: ClientApi; - if (modelConfig.model === "gemini-pro") { + if (modelConfig.model.startsWith("gemini")) { api = new ClientApi(ModelProvider.GeminiPro); } else { api = new ClientApi(ModelProvider.GPT); diff --git a/app/utils/cors.ts b/app/utils/cors.ts index 773f152aafa..20b3e516017 100644 --- a/app/utils/cors.ts +++ b/app/utils/cors.ts @@ -1,8 +1,8 @@ import { getClientConfig } from "../config/client"; -import { ApiPath, DEFAULT_CORS_HOST } from "../constant"; +import { ApiPath, DEFAULT_API_HOST } from "../constant"; export function corsPath(path: string) { - const baseUrl = getClientConfig()?.isApp ? `${DEFAULT_CORS_HOST}` : ""; + const baseUrl = getClientConfig()?.isApp ? `${DEFAULT_API_HOST}` : ""; if (!path.startsWith("/")) { path = "/" + path; diff --git a/next.config.mjs b/next.config.mjs index 06a57a54b86..cd468f55001 100644 --- a/next.config.mjs +++ b/next.config.mjs @@ -64,8 +64,17 @@ if (mode !== "export") { nextConfig.rewrites = async () => { const ret = [ + // adjust for previous verison directly using "/api/proxy/" as proxy base route { - source: "/api/proxy/:path*", + source: "/api/proxy/v1/:path*", + destination: "https://api.openai.com/v1/:path*", + }, + { + source: "/api/proxy/google/:path*", + destination: "https://generativelanguage.googleapis.com/:path*", + }, + { + source: "/api/proxy/openai/:path*", destination: "https://api.openai.com/:path*", }, { diff --git a/package.json b/package.json index 283db731041..0bd74ff314d 100644 --- a/package.json +++ b/package.json @@ -1,5 +1,5 @@ { - "name": "chatgpt-next-web", + "name": "nextchat", "private": false, "license": "mit", "scripts": { @@ -66,4 +66,4 @@ "resolutions": { "lint-staged/yaml": "^2.2.2" } -} +} \ No newline at end of file diff --git a/scripts/setup.sh b/scripts/setup.sh index 73ed61b1326..50488f963bc 100644 --- a/scripts/setup.sh +++ b/scripts/setup.sh @@ -54,7 +54,7 @@ if ! command -v node >/dev/null || ! command -v git >/dev/null || ! command -v y fi # Clone the repository and install dependencies -git clone https://github.com/Yidadaa/ChatGPT-Next-Web +git clone https://github.com/ChatGPTNextWeb/ChatGPT-Next-Web cd ChatGPT-Next-Web yarn install diff --git a/src-tauri/Cargo.lock b/src-tauri/Cargo.lock index f742469961e..6c223d35676 100644 --- a/src-tauri/Cargo.lock +++ b/src-tauri/Cargo.lock @@ -431,17 +431,6 @@ version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" -[[package]] -name = "chatgpt-next-web" -version = "0.1.0" -dependencies = [ - "serde", - "serde_json", - "tauri", - "tauri-build", - "tauri-plugin-window-state", -] - [[package]] name = "chrono" version = "0.4.24" @@ -1815,6 +1804,17 @@ version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e4a24736216ec316047a1fc4252e27dabb04218aa4a3f37c6e7ddbf1f9782b54" +[[package]] +name = "nextchat" +version = "0.1.0" +dependencies = [ + "serde", + "serde_json", + "tauri", + "tauri-build", + "tauri-plugin-window-state", +] + [[package]] name = "nix" version = "0.26.4" diff --git a/src-tauri/Cargo.toml b/src-tauri/Cargo.toml index a004f235199..9c3aef24495 100644 --- a/src-tauri/Cargo.toml +++ b/src-tauri/Cargo.toml @@ -1,11 +1,11 @@ [package] -name = "chatgpt-next-web" +name = "nextchat" version = "0.1.0" description = "A cross platform app for LLM ChatBot." authors = ["Yidadaa"] license = "mit" repository = "" -default-run = "chatgpt-next-web" +default-run = "nextchat" edition = "2021" rust-version = "1.60" @@ -17,11 +17,29 @@ tauri-build = { version = "1.3.0", features = [] } [dependencies] serde_json = "1.0" serde = { version = "1.0", features = ["derive"] } -tauri = { version = "1.3.0", features = [ "window-set-always-on-top", "process-relaunch", "fs-all", "notification-all", "clipboard-all", "dialog-all", "shell-open", "updater", "window-close", "window-hide", "window-maximize", "window-minimize", "window-set-icon", "window-set-ignore-cursor-events", "window-set-resizable", "window-show", "window-start-dragging", "window-unmaximize", "window-unminimize"] } +tauri = { version = "1.3.0", features = [ + "notification-all", + "fs-all", + "clipboard-all", + "dialog-all", + "shell-open", + "updater", + "window-close", + "window-hide", + "window-maximize", + "window-minimize", + "window-set-icon", + "window-set-ignore-cursor-events", + "window-set-resizable", + "window-show", + "window-start-dragging", + "window-unmaximize", + "window-unminimize", +] } tauri-plugin-window-state = { git = "https://github.com/tauri-apps/plugins-workspace", branch = "v1" } [features] # this feature is used for production builds or when `devPath` points to the filesystem and the built-in dev server is disabled. # If you use cargo directly instead of tauri's cli you can use this feature flag to switch between tauri's `dev` and `build` modes. # DO NOT REMOVE!! -custom-protocol = [ "tauri/custom-protocol" ] +custom-protocol = ["tauri/custom-protocol"] diff --git a/src-tauri/tauri.conf.json b/src-tauri/tauri.conf.json index 0681bcc5866..ab751924bda 100644 --- a/src-tauri/tauri.conf.json +++ b/src-tauri/tauri.conf.json @@ -95,7 +95,7 @@ "updater": { "active": true, "endpoints": [ - "https://github.com/Yidadaa/ChatGPT-Next-Web/releases/latest/download/latest.json" + "https://github.com/ChatGPTNextWeb/ChatGPT-Next-Web/releases/latest/download/latest.json" ], "dialog": true, "windows": {