Skip to content
This repository has been archived by the owner on Sep 15, 2024. It is now read-only.

[BOT] [Cherry Pick] Head Repo without stupid conflict #252

Merged
merged 7 commits into from
Feb 7, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
29 changes: 16 additions & 13 deletions app/client/api.ts
Original file line number Diff line number Diff line change
Expand Up @@ -146,31 +146,34 @@ export function getHeaders() {
const headers: Record<string, string> = {
"Content-Type": "application/json",
"x-requested-with": "XMLHttpRequest",
"Accept": "application/json",
Accept: "application/json",
};
const modelConfig = useChatStore.getState().currentSession().mask.modelConfig;
const isGoogle = modelConfig.model === "gemini-pro";
const isGoogle = modelConfig.model.startsWith("gemini");
const isAzure = accessStore.provider === ServiceProvider.Azure;
const authHeader = isAzure ? "api-key" : "Authorization";
const apiKey = isGoogle
? accessStore.googleApiKey
: isAzure
? accessStore.azureApiKey
: accessStore.openaiApiKey;

const clientConfig = getClientConfig();
const makeBearer = (s: string) => `${isAzure ? "" : "Bearer "}${s.trim()}`;
const validString = (x: string) => x && x.length > 0;

// use user's api key first
if (validString(apiKey)) {
headers[authHeader] = makeBearer(apiKey);
} else if (
accessStore.enabledAccessControl() &&
validString(accessStore.accessCode)
) {
headers[authHeader] = makeBearer(
ACCESS_CODE_PREFIX + accessStore.accessCode,
);
// when using google api in app, not set auth header
if (!(isGoogle && clientConfig?.isApp)) {
// use user's api key first
if (validString(apiKey)) {
headers[authHeader] = makeBearer(apiKey);
} else if (
accessStore.enabledAccessControl() &&
validString(accessStore.accessCode)
) {
headers[authHeader] = makeBearer(
ACCESS_CODE_PREFIX + accessStore.accessCode,
);
}
}

return headers;
Expand Down
44 changes: 26 additions & 18 deletions app/client/platforms/google.ts
Original file line number Diff line number Diff line change
Expand Up @@ -7,11 +7,6 @@
import { DEFAULT_API_HOST, DEFAULT_CORS_HOST, GEMINI_BASE_URL, Google, REQUEST_TIMEOUT_MS } from "@/app/constant";
import { ChatOptions, getHeaders, LLMApi, LLMModel, LLMUsage } from "../api";
import { useAccessStore, useAppConfig, useChatStore } from "@/app/store";
import {
EventStreamContentType,
fetchEventSource,
} from "@fortaine/fetch-event-source";
import { prettyObject } from "@/app/utils/format";
import { getClientConfig } from "@/app/config/client";
import Locale from "../../locales";
import { getServerSideConfig } from "@/app/config/server";
Expand Down Expand Up @@ -108,7 +103,7 @@ export class GeminiProApi implements LLMApi {
async chat(options: ChatOptions): Promise<void> {
const provider = getProviderFromState();
const cfgspeed_animation = useAppConfig.getState().speed_animation; // Get the animation speed from the app config
const apiClient = this;
// const apiClient = this;
const messages: Message[] = options.messages.map((v) => ({
role: v.role.replace("assistant", "model").replace("system", "user"),
parts: [{ text: v.content }],
Expand Down Expand Up @@ -172,14 +167,29 @@ export class GeminiProApi implements LLMApi {
},
],
};
console.log(`[Request] [${provider}] payload: `, requestPayload);

const shouldStream = !!options.config.stream;
console.log(`[Request] [${provider}] payload: `, requestPayload); // adding back this for better development tracking
const accessStore = useAccessStore.getState();
let baseUrl = accessStore.googleUrl;
const isApp = !!getClientConfig()?.isApp;

let shouldStream = !!options.config.stream;
const controller = new AbortController();
options.onController?.(controller);

try {
const chatPath = this.path(Google.ChatPath);
let chatPath = this.path(Google.ChatPath);

// let baseUrl = accessStore.googleUrl;

if (!baseUrl) {
baseUrl = isApp
? DEFAULT_API_HOST + "/api/proxy/google/" + Google.ChatPath
: chatPath;
}

if (isApp) {
baseUrl += `?key=${accessStore.googleApiKey}`;
}
const chatPayload = {
method: "POST",
body: JSON.stringify(requestPayload),
Expand All @@ -195,10 +205,6 @@ export class GeminiProApi implements LLMApi {
if (shouldStream) {
let responseText = "";
let remainText = "";
let streamChatPath = chatPath.replace(
"generateContent",
"streamGenerateContent",
);
let finished = false;

let existingTexts: string[] = [];
Expand Down Expand Up @@ -229,7 +235,11 @@ export class GeminiProApi implements LLMApi {

// start animaion
animateResponseText();
fetch(streamChatPath, chatPayload)

fetch(
baseUrl.replace("generateContent", "streamGenerateContent"),
chatPayload,
)
.then((response) => {
const reader = response?.body?.getReader();
const decoder = new TextDecoder();
Expand Down Expand Up @@ -282,11 +292,9 @@ export class GeminiProApi implements LLMApi {
console.error("Error:", error);
});
} else {
const res = await fetch(chatPath, chatPayload);
const res = await fetch(baseUrl, chatPayload);
clearTimeout(requestTimeoutId);

const resJson = await res.json();

if (resJson?.promptFeedback?.blockReason) {
// being blocked
options.onError?.(
Expand Down
7 changes: 6 additions & 1 deletion app/client/platforms/openai.ts
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
"use client";
import {
ApiPath,
DEFAULT_API_HOST,
Expand Down Expand Up @@ -51,7 +52,9 @@ export class ChatGPTApi implements LLMApi {

if (baseUrl.length === 0) {
const isApp = !!getClientConfig()?.isApp;
baseUrl = isApp ? DEFAULT_API_HOST : ApiPath.OpenAI;
baseUrl = isApp
? DEFAULT_API_HOST + "/proxy" + ApiPath.OpenAI
: ApiPath.OpenAI;
}

if (baseUrl.endsWith("/")) {
Expand All @@ -65,6 +68,8 @@ export class ChatGPTApi implements LLMApi {
path = makeAzurePath(path, accessStore.azureApiVersion);
}

console.log("[Proxy Endpoint] ", baseUrl, path);

return [baseUrl, path].join("/");
}

Expand Down
2 changes: 1 addition & 1 deletion app/components/exporter.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -336,7 +336,7 @@ export function PreviewActions(props: {
setShouldExport(false);

var api: ClientApi;
if (config.modelConfig.model === "gemini-pro") {
if (config.modelConfig.model.startsWith("gemini")) {
api = new ClientApi(ModelProvider.GeminiPro);
} else {
api = new ClientApi(ModelProvider.GPT);
Expand Down
2 changes: 1 addition & 1 deletion app/components/home.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -181,7 +181,7 @@ export function useLoadData() {
const config = useAppConfig();

var api: ClientApi;
if (config.modelConfig.model === "gemini-pro") {
if (config.modelConfig.model.startsWith("gemini")) {
api = new ClientApi(ModelProvider.GeminiPro);
} else {
api = new ClientApi(ModelProvider.GPT);
Expand Down
4 changes: 3 additions & 1 deletion app/store/access.ts
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,9 @@ import { ensure } from "../utils/clone";
let fetchState = 0; // 0 not fetch, 1 fetching, 2 done

const DEFAULT_OPENAI_URL =
getClientConfig()?.buildMode === "export" ? DEFAULT_API_HOST : ApiPath.OpenAI;
getClientConfig()?.buildMode === "export"
? DEFAULT_API_HOST + "/api/proxy/openai"
: ApiPath.OpenAI;

const DEFAULT_ACCESS_STATE = {
accessCode: "",
Expand Down
6 changes: 3 additions & 3 deletions app/store/chat.ts
Original file line number Diff line number Diff line change
Expand Up @@ -314,8 +314,8 @@ export const useChatStore = createPersistStore(

// Changed 'var' to 'let' since 'api' is reassigned conditionally
// Note: keep type safety by using 'let' instead of 'var', this not a javascript lmao
let api: ClientApi;
if (modelConfig.model === "gemini-pro") {
let api: ClientApi; // stupid don't using var
if (modelConfig.model.startsWith("gemini")) {
api = new ClientApi(ModelProvider.GeminiPro);
} else {
api = new ClientApi(ModelProvider.GPT);
Expand Down Expand Up @@ -505,7 +505,7 @@ export const useChatStore = createPersistStore(
// Changed 'var' to 'let' since 'api' is reassigned conditionally
// Note: keep type safety by using 'let' instead of 'var', this not a javascript lmao
let api: ClientApi;
if (modelConfig.model === "gemini-pro") {
if (modelConfig.model.startsWith("gemini")) {
api = new ClientApi(ModelProvider.GeminiPro);
} else {
api = new ClientApi(ModelProvider.GPT);
Expand Down
4 changes: 2 additions & 2 deletions app/utils/cors.ts
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
import { getClientConfig } from "../config/client";
import { ApiPath, DEFAULT_CORS_HOST } from "../constant";
import { ApiPath, DEFAULT_API_HOST } from "../constant";

export function corsPath(path: string) {
const baseUrl = getClientConfig()?.isApp ? `${DEFAULT_CORS_HOST}` : "";
const baseUrl = getClientConfig()?.isApp ? `${DEFAULT_API_HOST}` : "";

if (!path.startsWith("/")) {
path = "/" + path;
Expand Down
11 changes: 10 additions & 1 deletion next.config.mjs
Original file line number Diff line number Diff line change
Expand Up @@ -64,8 +64,17 @@ if (mode !== "export") {

nextConfig.rewrites = async () => {
const ret = [
// adjust for previous verison directly using "/api/proxy/" as proxy base route
{
source: "/api/proxy/:path*",
source: "/api/proxy/v1/:path*",
destination: "https://api.openai.com/v1/:path*",
},
{
source: "/api/proxy/google/:path*",
destination: "https://generativelanguage.googleapis.com/:path*",
},
{
source: "/api/proxy/openai/:path*",
destination: "https://api.openai.com/:path*",
},
{
Expand Down
4 changes: 2 additions & 2 deletions package.json
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
{
"name": "chatgpt-next-web",
"name": "nextchat",
"private": false,
"license": "mit",
"scripts": {
Expand Down Expand Up @@ -66,4 +66,4 @@
"resolutions": {
"lint-staged/yaml": "^2.2.2"
}
}
}
2 changes: 1 addition & 1 deletion scripts/setup.sh
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ if ! command -v node >/dev/null || ! command -v git >/dev/null || ! command -v y
fi

# Clone the repository and install dependencies
git clone https://github.com/Yidadaa/ChatGPT-Next-Web
git clone https://github.com/ChatGPTNextWeb/ChatGPT-Next-Web
cd ChatGPT-Next-Web
yarn install

Expand Down
22 changes: 11 additions & 11 deletions src-tauri/Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

26 changes: 22 additions & 4 deletions src-tauri/Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,11 +1,11 @@
[package]
name = "chatgpt-next-web"
name = "nextchat"
version = "0.1.0"
description = "A cross platform app for LLM ChatBot."
authors = ["Yidadaa"]
license = "mit"
repository = ""
default-run = "chatgpt-next-web"
default-run = "nextchat"
edition = "2021"
rust-version = "1.60"

Expand All @@ -17,11 +17,29 @@ tauri-build = { version = "1.3.0", features = [] }
[dependencies]
serde_json = "1.0"
serde = { version = "1.0", features = ["derive"] }
tauri = { version = "1.3.0", features = [ "window-set-always-on-top", "process-relaunch", "fs-all", "notification-all", "clipboard-all", "dialog-all", "shell-open", "updater", "window-close", "window-hide", "window-maximize", "window-minimize", "window-set-icon", "window-set-ignore-cursor-events", "window-set-resizable", "window-show", "window-start-dragging", "window-unmaximize", "window-unminimize"] }
tauri = { version = "1.3.0", features = [
"notification-all",
"fs-all",
"clipboard-all",
"dialog-all",
"shell-open",
"updater",
"window-close",
"window-hide",
"window-maximize",
"window-minimize",
"window-set-icon",
"window-set-ignore-cursor-events",
"window-set-resizable",
"window-show",
"window-start-dragging",
"window-unmaximize",
"window-unminimize",
] }
tauri-plugin-window-state = { git = "https://github.com/tauri-apps/plugins-workspace", branch = "v1" }

[features]
# this feature is used for production builds or when `devPath` points to the filesystem and the built-in dev server is disabled.
# If you use cargo directly instead of tauri's cli you can use this feature flag to switch between tauri's `dev` and `build` modes.
# DO NOT REMOVE!!
custom-protocol = [ "tauri/custom-protocol" ]
custom-protocol = ["tauri/custom-protocol"]
3 changes: 2 additions & 1 deletion src-tauri/tauri.conf.json
Original file line number Diff line number Diff line change
Expand Up @@ -90,12 +90,13 @@
}
},
"security": {
"dangerousUseHttpScheme": true,
"csp": null
},
"updater": {
"active": true,
"endpoints": [
"https://github.com/Yidadaa/ChatGPT-Next-Web/releases/latest/download/latest.json"
"https://github.com/ChatGPTNextWeb/ChatGPT-Next-Web/releases/latest/download/latest.json"
],
"dialog": true,
"windows": {
Expand Down
Loading