Skip to content

Commit

Permalink
Squashed commit of the following:
Browse files Browse the repository at this point in the history
commit bfefb99
Author: fred-bf <157469842+fred-bf@users.noreply.github.com>
Date:   Wed Feb 7 14:12:04 2024 +0800

    chore: update tauri dependencies (ChatGPTNextWeb#4018)

    * feat: bump version

    * feat: bump version

    * chore: update tauri dependencies

commit 47ae874
Author: Anivie Michaelis <88354708+Anivie@users.noreply.github.com>
Date:   Wed Feb 7 13:48:28 2024 +0800

    fix: add support to http scheme. (ChatGPTNextWeb#3985)

    Co-authored-by: fred-bf <157469842+fred-bf@users.noreply.github.com>

commit d74f636
Author: fred-bf <157469842+fred-bf@users.noreply.github.com>
Date:   Wed Feb 7 13:46:52 2024 +0800

    Fix/gemini app endpoint (ChatGPTNextWeb#4017)

    * fix: support custom api endpoint

    * fix: attach api key to google gemini

commit b8f0822
Author: fred-bf <157469842+fred-bf@users.noreply.github.com>
Date:   Wed Feb 7 13:40:30 2024 +0800

    fix: support custom api endpoint (ChatGPTNextWeb#4016)

commit 0869455
Author: fred-bf <157469842+fred-bf@users.noreply.github.com>
Date:   Wed Feb 7 13:38:02 2024 +0800

    feat: bump version (ChatGPTNextWeb#4015)

    * feat: bump version

    * feat: bump version

commit bca7424
Author: fred-bf <157469842+fred-bf@users.noreply.github.com>
Date:   Wed Feb 7 13:17:11 2024 +0800

    fix: fix gemini issue when using app (ChatGPTNextWeb#4013)

    * chore: update path

    * fix: fix google auth logic

    * fix: not using header authorization for google api

    * chore: revert to allow stream

commit 9d5801f
Author: fred-bf <157469842+fred-bf@users.noreply.github.com>
Date:   Wed Feb 7 10:31:49 2024 +0800

    fix: avoiding not operation for custom models (ChatGPTNextWeb#4010)

commit 462a88a
Author: H0llyW00dzZ <priv8@btz.pm>
Date:   Tue Feb 6 16:20:12 2024 +0700

    Fix [CI/CD] [Vercel] Deploy Preview (ChatGPTNextWeb#4005)

    - [+] feat(.github/workflows/deploy_preview.yml): add 'reopened' event trigger
  • Loading branch information
tzhaowu0 committed Feb 7, 2024
1 parent 52f3dd4 commit 867b4a8
Show file tree
Hide file tree
Showing 16 changed files with 662 additions and 153 deletions.
29 changes: 16 additions & 13 deletions app/client/api.ts
Original file line number Diff line number Diff line change
Expand Up @@ -144,31 +144,34 @@ export function getHeaders() {
const headers: Record<string, string> = {
"Content-Type": "application/json",
"x-requested-with": "XMLHttpRequest",
"Accept": "application/json",
Accept: "application/json",
};
const modelConfig = useChatStore.getState().currentSession().mask.modelConfig;
const isGoogle = modelConfig.model === "gemini-pro";
const isGoogle = modelConfig.model.startsWith("gemini");
const isAzure = accessStore.provider === ServiceProvider.Azure;
const authHeader = isAzure ? "api-key" : "Authorization";
const apiKey = isGoogle
? accessStore.googleApiKey
: isAzure
? accessStore.azureApiKey
: accessStore.openaiApiKey;

const clientConfig = getClientConfig();
const makeBearer = (s: string) => `${isAzure ? "" : "Bearer "}${s.trim()}`;
const validString = (x: string) => x && x.length > 0;

// use user's api key first
if (validString(apiKey)) {
headers[authHeader] = makeBearer(apiKey);
} else if (
accessStore.enabledAccessControl() &&
validString(accessStore.accessCode)
) {
headers[authHeader] = makeBearer(
ACCESS_CODE_PREFIX + accessStore.accessCode,
);
// when using google api in app, not set auth header
if (!(isGoogle && clientConfig?.isApp)) {
// use user's api key first
if (validString(apiKey)) {
headers[authHeader] = makeBearer(apiKey);
} else if (
accessStore.enabledAccessControl() &&
validString(accessStore.accessCode)
) {
headers[authHeader] = makeBearer(
ACCESS_CODE_PREFIX + accessStore.accessCode,
);
}
}

return headers;
Expand Down
45 changes: 25 additions & 20 deletions app/client/platforms/google.ts
Original file line number Diff line number Diff line change
@@ -1,15 +1,8 @@
import { Google, REQUEST_TIMEOUT_MS } from "@/app/constant";
import { ChatOptions, getHeaders, LLMApi, LLMModel, LLMUsage } from "../api";
import { useAccessStore, useAppConfig, useChatStore } from "@/app/store";
import {
EventStreamContentType,
fetchEventSource,
} from "@fortaine/fetch-event-source";
import { prettyObject } from "@/app/utils/format";
import { getClientConfig } from "@/app/config/client";
import Locale from "../../locales";
import { getServerSideConfig } from "@/app/config/server";
import de from "@/app/locales/de";
import { DEFAULT_API_HOST } from "@/app/constant";
export class GeminiProApi implements LLMApi {
extractMessage(res: any) {
console.log("[Response] gemini-pro response: ", res);
Expand All @@ -21,7 +14,7 @@ export class GeminiProApi implements LLMApi {
);
}
async chat(options: ChatOptions): Promise<void> {
const apiClient = this;
// const apiClient = this;
const messages = options.messages.map((v) => ({
role: v.role.replace("assistant", "model").replace("system", "user"),
parts: [{ text: v.content }],
Expand Down Expand Up @@ -79,13 +72,27 @@ export class GeminiProApi implements LLMApi {
],
};

console.log("[Request] google payload: ", requestPayload);
const accessStore = useAccessStore.getState();
let baseUrl = accessStore.googleUrl;
const isApp = !!getClientConfig()?.isApp;

const shouldStream = !!options.config.stream;
let shouldStream = !!options.config.stream;
const controller = new AbortController();
options.onController?.(controller);
try {
const chatPath = this.path(Google.ChatPath);
let chatPath = this.path(Google.ChatPath);

// let baseUrl = accessStore.googleUrl;

if (!baseUrl) {
baseUrl = isApp
? DEFAULT_API_HOST + "/api/proxy/google/" + Google.ChatPath
: chatPath;
}

if (isApp) {
baseUrl += `?key=${accessStore.googleApiKey}`;
}
const chatPayload = {
method: "POST",
body: JSON.stringify(requestPayload),
Expand All @@ -101,10 +108,6 @@ export class GeminiProApi implements LLMApi {
if (shouldStream) {
let responseText = "";
let remainText = "";
let streamChatPath = chatPath.replace(
"generateContent",
"streamGenerateContent",
);
let finished = false;

let existingTexts: string[] = [];
Expand Down Expand Up @@ -134,7 +137,11 @@ export class GeminiProApi implements LLMApi {

// start animaion
animateResponseText();
fetch(streamChatPath, chatPayload)

fetch(
baseUrl.replace("generateContent", "streamGenerateContent"),
chatPayload,
)
.then((response) => {
const reader = response?.body?.getReader();
const decoder = new TextDecoder();
Expand Down Expand Up @@ -185,11 +192,9 @@ export class GeminiProApi implements LLMApi {
console.error("Error:", error);
});
} else {
const res = await fetch(chatPath, chatPayload);
const res = await fetch(baseUrl, chatPayload);
clearTimeout(requestTimeoutId);

const resJson = await res.json();

if (resJson?.promptFeedback?.blockReason) {
// being blocked
options.onError?.(
Expand Down
7 changes: 6 additions & 1 deletion app/client/platforms/openai.ts
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
"use client";
import {
ApiPath,
DEFAULT_API_HOST,
Expand Down Expand Up @@ -45,7 +46,9 @@ export class ChatGPTApi implements LLMApi {

if (baseUrl.length === 0) {
const isApp = !!getClientConfig()?.isApp;
baseUrl = isApp ? DEFAULT_API_HOST : ApiPath.OpenAI;
baseUrl = isApp
? DEFAULT_API_HOST + "/proxy" + ApiPath.OpenAI
: ApiPath.OpenAI;
}

if (baseUrl.endsWith("/")) {
Expand All @@ -59,6 +62,8 @@ export class ChatGPTApi implements LLMApi {
path = makeAzurePath(path, accessStore.azureApiVersion);
}

console.log("[Proxy Endpoint] ", baseUrl, path);

return [baseUrl, path].join("/");
}

Expand Down
2 changes: 1 addition & 1 deletion app/components/exporter.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -307,7 +307,7 @@ export function PreviewActions(props: {
setShouldExport(false);

var api: ClientApi;
if (config.modelConfig.model === "gemini-pro") {
if (config.modelConfig.model.startsWith("gemini")) {
api = new ClientApi(ModelProvider.GeminiPro);
} else {
api = new ClientApi(ModelProvider.GPT);
Expand Down
2 changes: 1 addition & 1 deletion app/components/home.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -171,7 +171,7 @@ export function useLoadData() {
const config = useAppConfig();

var api: ClientApi;
if (config.modelConfig.model === "gemini-pro") {
if (config.modelConfig.model.startsWith("gemini")) {
api = new ClientApi(ModelProvider.GeminiPro);
} else {
api = new ClientApi(ModelProvider.GPT);
Expand Down
2 changes: 1 addition & 1 deletion app/components/model-config.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -92,7 +92,7 @@ export function ModelConfigList(props: {
></input>
</ListItem>

{props.modelConfig.model === "gemini-pro" ? null : (
{props.modelConfig.model.startsWith("gemini") ? null : (
<>
<ListItem
title={Locale.Settings.PresencePenalty.Title}
Expand Down
5 changes: 2 additions & 3 deletions app/constant.ts
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,7 @@ export const FETCH_COMMIT_URL = `https://api.github.com/repos/${OWNER}/${REPO}/c
export const FETCH_TAG_URL = `https://api.github.com/repos/${OWNER}/${REPO}/tags?per_page=1`;
export const RUNTIME_CONFIG_DOM = "danger-runtime-config";

export const DEFAULT_CORS_HOST = "https://a.nextweb.fun";
export const DEFAULT_API_HOST = `${DEFAULT_CORS_HOST}/api/proxy`;
export const DEFAULT_API_HOST = "https://api.nextchat.dev";
export const OPENAI_BASE_URL = "https://api.openai.com";

export const GEMINI_BASE_URL = "https://generativelanguage.googleapis.com/";
Expand Down Expand Up @@ -111,7 +110,7 @@ export const KnowledgeCutOffDate: Record<string, string> = {
"gpt-4-1106-preview": "2023-04",
"gpt-4-0125-preview": "2023-04",
"gpt-4-vision-preview": "2023-04",
// After improvements,
// After improvements,
// it's now easier to add "KnowledgeCutOffDate" instead of stupid hardcoding it, as was done previously.
"gemini-pro": "2023-12",
};
Expand Down
4 changes: 3 additions & 1 deletion app/store/access.ts
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,9 @@ import { ensure } from "../utils/clone";
let fetchState = 0; // 0 not fetch, 1 fetching, 2 done

const DEFAULT_OPENAI_URL =
getClientConfig()?.buildMode === "export" ? DEFAULT_API_HOST : ApiPath.OpenAI;
getClientConfig()?.buildMode === "export"
? DEFAULT_API_HOST + "/api/proxy/openai"
: ApiPath.OpenAI;

const DEFAULT_ACCESS_STATE = {
accessCode: "",
Expand Down
22 changes: 13 additions & 9 deletions app/store/chat.ts
Original file line number Diff line number Diff line change
Expand Up @@ -92,14 +92,18 @@ function countMessages(msgs: ChatMessage[]) {
}

function fillTemplateWith(input: string, modelConfig: ModelConfig) {
const cutoff = KnowledgeCutOffDate[modelConfig.model] ?? KnowledgeCutOffDate.default;
const cutoff =
KnowledgeCutOffDate[modelConfig.model] ?? KnowledgeCutOffDate.default;
// Find the model in the DEFAULT_MODELS array that matches the modelConfig.model
const modelInfo = DEFAULT_MODELS.find(m => m.name === modelConfig.model);
if (!modelInfo) {
throw new Error(`Model ${modelConfig.model} not found in DEFAULT_MODELS array.`);
const modelInfo = DEFAULT_MODELS.find((m) => m.name === modelConfig.model);

var serviceProvider = "OpenAI";
if (modelInfo) {
// TODO: auto detect the providerName from the modelConfig.model

// Directly use the providerName from the modelInfo
serviceProvider = modelInfo.provider.providerName;
}
// Directly use the providerName from the modelInfo
const serviceProvider = modelInfo.provider.providerName;

const vars = {
ServiceProvider: serviceProvider,
Expand All @@ -119,7 +123,7 @@ function fillTemplateWith(input: string, modelConfig: ModelConfig) {
}

Object.entries(vars).forEach(([name, value]) => {
const regex = new RegExp(`{{${name}}}`, 'g');
const regex = new RegExp(`{{${name}}}`, "g");
output = output.replace(regex, value.toString()); // Ensure value is a string
});

Expand Down Expand Up @@ -312,7 +316,7 @@ export const useChatStore = createPersistStore(
});

var api: ClientApi;
if (modelConfig.model === "gemini-pro") {
if (modelConfig.model.startsWith("gemini")) {
api = new ClientApi(ModelProvider.GeminiPro);
} else {
api = new ClientApi(ModelProvider.GPT);
Expand Down Expand Up @@ -497,7 +501,7 @@ export const useChatStore = createPersistStore(
const modelConfig = session.mask.modelConfig;

var api: ClientApi;
if (modelConfig.model === "gemini-pro") {
if (modelConfig.model.startsWith("gemini")) {
api = new ClientApi(ModelProvider.GeminiPro);
} else {
api = new ClientApi(ModelProvider.GPT);
Expand Down
4 changes: 2 additions & 2 deletions app/utils/cors.ts
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
import { getClientConfig } from "../config/client";
import { ApiPath, DEFAULT_CORS_HOST } from "../constant";
import { ApiPath, DEFAULT_API_HOST } from "../constant";

export function corsPath(path: string) {
const baseUrl = getClientConfig()?.isApp ? `${DEFAULT_CORS_HOST}` : "";
const baseUrl = getClientConfig()?.isApp ? `${DEFAULT_API_HOST}` : "";

if (!path.startsWith("/")) {
path = "/" + path;
Expand Down
11 changes: 10 additions & 1 deletion next.config.mjs
Original file line number Diff line number Diff line change
Expand Up @@ -64,8 +64,17 @@ if (mode !== "export") {

nextConfig.rewrites = async () => {
const ret = [
// adjust for previous verison directly using "/api/proxy/" as proxy base route
{
source: "/api/proxy/:path*",
source: "/api/proxy/v1/:path*",
destination: "https://api.openai.com/v1/:path*",
},
{
source: "/api/proxy/google/:path*",
destination: "https://generativelanguage.googleapis.com/:path*",
},
{
source: "/api/proxy/openai/:path*",
destination: "https://api.openai.com/:path*",
},
{
Expand Down
4 changes: 2 additions & 2 deletions package.json
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
{
"name": "chatgpt-next-web",
"name": "nextchat",
"private": false,
"license": "mit",
"scripts": {
Expand Down Expand Up @@ -64,4 +64,4 @@
"resolutions": {
"lint-staged/yaml": "^2.2.2"
}
}
}
2 changes: 1 addition & 1 deletion scripts/setup.sh
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ if ! command -v node >/dev/null || ! command -v git >/dev/null || ! command -v y
fi

# Clone the repository and install dependencies
git clone https://github.com/Yidadaa/ChatGPT-Next-Web
git clone https://github.com/ChatGPTNextWeb/ChatGPT-Next-Web
cd ChatGPT-Next-Web
yarn install

Expand Down
Loading

0 comments on commit 867b4a8

Please sign in to comment.