diff --git a/app/client/api.ts b/app/client/api.ts index 56fa3299624..4b39fbfaed2 100644 --- a/app/client/api.ts +++ b/app/client/api.ts @@ -144,10 +144,10 @@ export function getHeaders() { const headers: Record = { "Content-Type": "application/json", "x-requested-with": "XMLHttpRequest", - "Accept": "application/json", + Accept: "application/json", }; const modelConfig = useChatStore.getState().currentSession().mask.modelConfig; - const isGoogle = modelConfig.model === "gemini-pro"; + const isGoogle = modelConfig.model.startsWith("gemini"); const isAzure = accessStore.provider === ServiceProvider.Azure; const authHeader = isAzure ? "api-key" : "Authorization"; const apiKey = isGoogle @@ -155,20 +155,23 @@ export function getHeaders() { : isAzure ? accessStore.azureApiKey : accessStore.openaiApiKey; - + const clientConfig = getClientConfig(); const makeBearer = (s: string) => `${isAzure ? "" : "Bearer "}${s.trim()}`; const validString = (x: string) => x && x.length > 0; - // use user's api key first - if (validString(apiKey)) { - headers[authHeader] = makeBearer(apiKey); - } else if ( - accessStore.enabledAccessControl() && - validString(accessStore.accessCode) - ) { - headers[authHeader] = makeBearer( - ACCESS_CODE_PREFIX + accessStore.accessCode, - ); + // when using google api in app, not set auth header + if (!(isGoogle && clientConfig?.isApp)) { + // use user's api key first + if (validString(apiKey)) { + headers[authHeader] = makeBearer(apiKey); + } else if ( + accessStore.enabledAccessControl() && + validString(accessStore.accessCode) + ) { + headers[authHeader] = makeBearer( + ACCESS_CODE_PREFIX + accessStore.accessCode, + ); + } } return headers; diff --git a/app/client/platforms/google.ts b/app/client/platforms/google.ts index f0f63659f2b..6e335e7fd2f 100644 --- a/app/client/platforms/google.ts +++ b/app/client/platforms/google.ts @@ -1,15 +1,8 @@ import { Google, REQUEST_TIMEOUT_MS } from "@/app/constant"; import { ChatOptions, getHeaders, LLMApi, LLMModel, LLMUsage } from "../api"; import { useAccessStore, useAppConfig, useChatStore } from "@/app/store"; -import { - EventStreamContentType, - fetchEventSource, -} from "@fortaine/fetch-event-source"; -import { prettyObject } from "@/app/utils/format"; import { getClientConfig } from "@/app/config/client"; -import Locale from "../../locales"; -import { getServerSideConfig } from "@/app/config/server"; -import de from "@/app/locales/de"; +import { DEFAULT_API_HOST } from "@/app/constant"; export class GeminiProApi implements LLMApi { extractMessage(res: any) { console.log("[Response] gemini-pro response: ", res); @@ -21,7 +14,7 @@ export class GeminiProApi implements LLMApi { ); } async chat(options: ChatOptions): Promise { - const apiClient = this; + // const apiClient = this; const messages = options.messages.map((v) => ({ role: v.role.replace("assistant", "model").replace("system", "user"), parts: [{ text: v.content }], @@ -79,20 +72,31 @@ export class GeminiProApi implements LLMApi { ], }; - console.log("[Request] google payload: ", requestPayload); + const isApp = !!getClientConfig()?.isApp; const shouldStream = !!options.config.stream; const controller = new AbortController(); options.onController?.(controller); + const accessStore = useAccessStore.getState(); try { - const chatPath = this.path(Google.ChatPath); + let chatPath = this.path(Google.ChatPath); + + // let baseUrl = accessStore.googleUrl; + + chatPath = isApp + ? DEFAULT_API_HOST + + "/api/proxy/google/" + + Google.ChatPath + + `?key=${accessStore.googleApiKey}` + : chatPath; + const chatPayload = { method: "POST", body: JSON.stringify(requestPayload), signal: controller.signal, headers: getHeaders(), }; - + console.log("[Request] google chatPath: ", chatPath, isApp); // make a fetch request const requestTimeoutId = setTimeout( () => controller.abort(), @@ -134,6 +138,8 @@ export class GeminiProApi implements LLMApi { // start animaion animateResponseText(); + + console.log("[Proxy Endpoint] ", streamChatPath); fetch(streamChatPath, chatPayload) .then((response) => { const reader = response?.body?.getReader(); @@ -187,9 +193,7 @@ export class GeminiProApi implements LLMApi { } else { const res = await fetch(chatPath, chatPayload); clearTimeout(requestTimeoutId); - const resJson = await res.json(); - if (resJson?.promptFeedback?.blockReason) { // being blocked options.onError?.( diff --git a/app/client/platforms/openai.ts b/app/client/platforms/openai.ts index 68a0fda755c..3c3a5180198 100644 --- a/app/client/platforms/openai.ts +++ b/app/client/platforms/openai.ts @@ -1,3 +1,4 @@ +"use client"; import { ApiPath, DEFAULT_API_HOST, @@ -45,7 +46,9 @@ export class ChatGPTApi implements LLMApi { if (baseUrl.length === 0) { const isApp = !!getClientConfig()?.isApp; - baseUrl = isApp ? DEFAULT_API_HOST : ApiPath.OpenAI; + baseUrl = isApp + ? DEFAULT_API_HOST + "/proxy" + ApiPath.OpenAI + : ApiPath.OpenAI; } if (baseUrl.endsWith("/")) { @@ -59,6 +62,8 @@ export class ChatGPTApi implements LLMApi { path = makeAzurePath(path, accessStore.azureApiVersion); } + console.log("[Proxy Endpoint] ", baseUrl, path); + return [baseUrl, path].join("/"); } diff --git a/app/components/exporter.tsx b/app/components/exporter.tsx index dff17e4abe3..c17ebc8d809 100644 --- a/app/components/exporter.tsx +++ b/app/components/exporter.tsx @@ -307,7 +307,7 @@ export function PreviewActions(props: { setShouldExport(false); var api: ClientApi; - if (config.modelConfig.model === "gemini-pro") { + if (config.modelConfig.model.startsWith("gemini")) { api = new ClientApi(ModelProvider.GeminiPro); } else { api = new ClientApi(ModelProvider.GPT); diff --git a/app/components/home.tsx b/app/components/home.tsx index 4be7da0fbda..8386ba144b9 100644 --- a/app/components/home.tsx +++ b/app/components/home.tsx @@ -171,7 +171,7 @@ export function useLoadData() { const config = useAppConfig(); var api: ClientApi; - if (config.modelConfig.model === "gemini-pro") { + if (config.modelConfig.model.startsWith("gemini")) { api = new ClientApi(ModelProvider.GeminiPro); } else { api = new ClientApi(ModelProvider.GPT); diff --git a/app/components/model-config.tsx b/app/components/model-config.tsx index b9f8116747e..e46a018f463 100644 --- a/app/components/model-config.tsx +++ b/app/components/model-config.tsx @@ -92,7 +92,7 @@ export function ModelConfigList(props: { > - {props.modelConfig.model === "gemini-pro" ? null : ( + {props.modelConfig.model.startsWith("gemini") ? null : ( <> = { "gpt-4-1106-preview": "2023-04", "gpt-4-0125-preview": "2023-04", "gpt-4-vision-preview": "2023-04", - // After improvements, + // After improvements, // it's now easier to add "KnowledgeCutOffDate" instead of stupid hardcoding it, as was done previously. "gemini-pro": "2023-12", }; diff --git a/app/store/access.ts b/app/store/access.ts index 9e8024a6aa8..6884e71e3bf 100644 --- a/app/store/access.ts +++ b/app/store/access.ts @@ -12,7 +12,9 @@ import { ensure } from "../utils/clone"; let fetchState = 0; // 0 not fetch, 1 fetching, 2 done const DEFAULT_OPENAI_URL = - getClientConfig()?.buildMode === "export" ? DEFAULT_API_HOST : ApiPath.OpenAI; + getClientConfig()?.buildMode === "export" + ? DEFAULT_API_HOST + "/api/proxy/openai" + : ApiPath.OpenAI; const DEFAULT_ACCESS_STATE = { accessCode: "", diff --git a/app/store/chat.ts b/app/store/chat.ts index 254325a7552..037a6c96050 100644 --- a/app/store/chat.ts +++ b/app/store/chat.ts @@ -316,7 +316,7 @@ export const useChatStore = createPersistStore( }); var api: ClientApi; - if (modelConfig.model === "gemini-pro") { + if (modelConfig.model.startsWith("gemini")) { api = new ClientApi(ModelProvider.GeminiPro); } else { api = new ClientApi(ModelProvider.GPT); @@ -501,7 +501,7 @@ export const useChatStore = createPersistStore( const modelConfig = session.mask.modelConfig; var api: ClientApi; - if (modelConfig.model === "gemini-pro") { + if (modelConfig.model.startsWith("gemini")) { api = new ClientApi(ModelProvider.GeminiPro); } else { api = new ClientApi(ModelProvider.GPT); diff --git a/app/utils/cors.ts b/app/utils/cors.ts index 773f152aafa..20b3e516017 100644 --- a/app/utils/cors.ts +++ b/app/utils/cors.ts @@ -1,8 +1,8 @@ import { getClientConfig } from "../config/client"; -import { ApiPath, DEFAULT_CORS_HOST } from "../constant"; +import { ApiPath, DEFAULT_API_HOST } from "../constant"; export function corsPath(path: string) { - const baseUrl = getClientConfig()?.isApp ? `${DEFAULT_CORS_HOST}` : ""; + const baseUrl = getClientConfig()?.isApp ? `${DEFAULT_API_HOST}` : ""; if (!path.startsWith("/")) { path = "/" + path; diff --git a/next.config.mjs b/next.config.mjs index 4faa63e5450..ae94f489545 100644 --- a/next.config.mjs +++ b/next.config.mjs @@ -64,8 +64,17 @@ if (mode !== "export") { nextConfig.rewrites = async () => { const ret = [ + // adjust for previous verison directly using "/api/proxy/" as proxy base route { - source: "/api/proxy/:path*", + source: "/api/proxy/v1/:path*", + destination: "https://api.openai.com/v1/:path*", + }, + { + source: "/api/proxy/google/:path*", + destination: "https://generativelanguage.googleapis.com/:path*", + }, + { + source: "/api/proxy/openai/:path*", destination: "https://api.openai.com/:path*", }, { diff --git a/package.json b/package.json index f28a5a6ecf2..b31d6a901a0 100644 --- a/package.json +++ b/package.json @@ -1,5 +1,5 @@ { - "name": "chatgpt-next-web", + "name": "nextchat", "private": false, "license": "mit", "scripts": { @@ -64,4 +64,4 @@ "resolutions": { "lint-staged/yaml": "^2.2.2" } -} +} \ No newline at end of file diff --git a/scripts/setup.sh b/scripts/setup.sh index 73ed61b1326..50488f963bc 100644 --- a/scripts/setup.sh +++ b/scripts/setup.sh @@ -54,7 +54,7 @@ if ! command -v node >/dev/null || ! command -v git >/dev/null || ! command -v y fi # Clone the repository and install dependencies -git clone https://github.com/Yidadaa/ChatGPT-Next-Web +git clone https://github.com/ChatGPTNextWeb/ChatGPT-Next-Web cd ChatGPT-Next-Web yarn install diff --git a/src-tauri/Cargo.lock b/src-tauri/Cargo.lock index d93210fc540..eeda9dd8c73 100644 --- a/src-tauri/Cargo.lock +++ b/src-tauri/Cargo.lock @@ -431,17 +431,6 @@ version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" -[[package]] -name = "chatgpt-next-web" -version = "0.1.0" -dependencies = [ - "serde", - "serde_json", - "tauri", - "tauri-build", - "tauri-plugin-window-state", -] - [[package]] name = "chrono" version = "0.4.24" @@ -1824,6 +1813,17 @@ version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e4a24736216ec316047a1fc4252e27dabb04218aa4a3f37c6e7ddbf1f9782b54" +[[package]] +name = "nextchat" +version = "0.1.0" +dependencies = [ + "serde", + "serde_json", + "tauri", + "tauri-build", + "tauri-plugin-window-state", +] + [[package]] name = "nix" version = "0.26.4" diff --git a/src-tauri/Cargo.toml b/src-tauri/Cargo.toml index fee1c860fb9..9c3aef24495 100644 --- a/src-tauri/Cargo.toml +++ b/src-tauri/Cargo.toml @@ -1,11 +1,11 @@ [package] -name = "chatgpt-next-web" +name = "nextchat" version = "0.1.0" description = "A cross platform app for LLM ChatBot." authors = ["Yidadaa"] license = "mit" repository = "" -default-run = "chatgpt-next-web" +default-run = "nextchat" edition = "2021" rust-version = "1.60" @@ -17,11 +17,29 @@ tauri-build = { version = "1.3.0", features = [] } [dependencies] serde_json = "1.0" serde = { version = "1.0", features = ["derive"] } -tauri = { version = "1.3.0", features = ["notification-all", "fs-all", "clipboard-all", "dialog-all", "shell-open", "updater", "window-close", "window-hide", "window-maximize", "window-minimize", "window-set-icon", "window-set-ignore-cursor-events", "window-set-resizable", "window-show", "window-start-dragging", "window-unmaximize", "window-unminimize"] } +tauri = { version = "1.3.0", features = [ + "notification-all", + "fs-all", + "clipboard-all", + "dialog-all", + "shell-open", + "updater", + "window-close", + "window-hide", + "window-maximize", + "window-minimize", + "window-set-icon", + "window-set-ignore-cursor-events", + "window-set-resizable", + "window-show", + "window-start-dragging", + "window-unmaximize", + "window-unminimize", +] } tauri-plugin-window-state = { git = "https://github.com/tauri-apps/plugins-workspace", branch = "v1" } [features] # this feature is used for production builds or when `devPath` points to the filesystem and the built-in dev server is disabled. # If you use cargo directly instead of tauri's cli you can use this feature flag to switch between tauri's `dev` and `build` modes. # DO NOT REMOVE!! -custom-protocol = [ "tauri/custom-protocol" ] +custom-protocol = ["tauri/custom-protocol"] diff --git a/src-tauri/tauri.conf.json b/src-tauri/tauri.conf.json index dac745f928c..86bc92ee185 100644 --- a/src-tauri/tauri.conf.json +++ b/src-tauri/tauri.conf.json @@ -91,7 +91,7 @@ "updater": { "active": true, "endpoints": [ - "https://github.com/Yidadaa/ChatGPT-Next-Web/releases/latest/download/latest.json" + "https://github.com/ChatGPTNextWeb/ChatGPT-Next-Web/releases/latest/download/latest.json" ], "dialog": false, "windows": {