Skip to content

Commit

Permalink
fix: support custom api endpoint (ChatGPTNextWeb#4016)
Browse files Browse the repository at this point in the history
  • Loading branch information
fred-bf authored Feb 7, 2024
1 parent 0869455 commit b8f0822
Showing 1 changed file with 17 additions and 16 deletions.
33 changes: 17 additions & 16 deletions app/client/platforms/google.ts
Original file line number Diff line number Diff line change
Expand Up @@ -72,31 +72,34 @@ export class GeminiProApi implements LLMApi {
],
};

const accessStore = useAccessStore.getState();
let baseUrl = accessStore.googleUrl;
const isApp = !!getClientConfig()?.isApp;

const shouldStream = !!options.config.stream;
let shouldStream = !!options.config.stream;
const controller = new AbortController();
options.onController?.(controller);
const accessStore = useAccessStore.getState();
try {
let chatPath = this.path(Google.ChatPath);

// let baseUrl = accessStore.googleUrl;

chatPath = isApp
? DEFAULT_API_HOST +
"/api/proxy/google/" +
Google.ChatPath +
`?key=${accessStore.googleApiKey}`
: chatPath;
if (!baseUrl) {
baseUrl = isApp
? DEFAULT_API_HOST +
"/api/proxy/google/" +
Google.ChatPath +
`?key=${accessStore.googleApiKey}`
: chatPath;
}

const chatPayload = {
method: "POST",
body: JSON.stringify(requestPayload),
signal: controller.signal,
headers: getHeaders(),
};
console.log("[Request] google chatPath: ", chatPath, isApp);

// make a fetch request
const requestTimeoutId = setTimeout(
() => controller.abort(),
Expand All @@ -105,10 +108,6 @@ export class GeminiProApi implements LLMApi {
if (shouldStream) {
let responseText = "";
let remainText = "";
let streamChatPath = chatPath.replace(
"generateContent",
"streamGenerateContent",
);
let finished = false;

let existingTexts: string[] = [];
Expand Down Expand Up @@ -139,8 +138,10 @@ export class GeminiProApi implements LLMApi {
// start animaion
animateResponseText();

console.log("[Proxy Endpoint] ", streamChatPath);
fetch(streamChatPath, chatPayload)
fetch(
baseUrl.replace("generateContent", "streamGenerateContent"),
chatPayload,
)
.then((response) => {
const reader = response?.body?.getReader();
const decoder = new TextDecoder();
Expand Down Expand Up @@ -191,7 +192,7 @@ export class GeminiProApi implements LLMApi {
console.error("Error:", error);
});
} else {
const res = await fetch(chatPath, chatPayload);
const res = await fetch(baseUrl, chatPayload);
clearTimeout(requestTimeoutId);
const resJson = await res.json();
if (resJson?.promptFeedback?.blockReason) {
Expand Down

0 comments on commit b8f0822

Please sign in to comment.