diff --git a/package-lock.json b/package-lock.json index 1ce08ee..a5186d2 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1211,6 +1211,26 @@ "node": ">= 14" } }, + "node_modules/@octokit/request/node_modules/node-fetch": { + "version": "2.6.11", + "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.11.tgz", + "integrity": "sha512-4I6pdBY1EthSqDmJkiNk3JIT8cswwR9nfeW/cPdUagJYEQG7R95WRH74wpz7ma8Gh/9dI9FP+OU+0E4FvtA55w==", + "dev": true, + "dependencies": { + "whatwg-url": "^5.0.0" + }, + "engines": { + "node": "4.x || >=6.0.0" + }, + "peerDependencies": { + "encoding": "^0.1.0" + }, + "peerDependenciesMeta": { + "encoding": { + "optional": true + } + } + }, "node_modules/@octokit/rest": { "version": "19.0.5", "resolved": "https://registry.npmjs.org/@octokit/rest/-/rest-19.0.5.tgz", @@ -1698,6 +1718,14 @@ "integrity": "sha512-iiUgKzV9AuaEkZqkOLDIvlQiL6ltuZd9tGcW3gwpnX8JbuiuhFlEGmmFXEXkN50Cvq7Os88IY2v0dkDqXYWVgA==", "dev": true }, + "node_modules/@types/node": { + "version": "20.2.3", + "resolved": "https://registry.npmjs.org/@types/node/-/node-20.2.3.tgz", + "integrity": "sha512-pg9d0yC4rVNWQzX8U7xb4olIOFuuVL9za3bzMT2pu2SU0SNEi66i2qrvhE2qt0HvkhuCaWJu7pLNOt/Pj8BIrw==", + "dev": true, + "optional": true, + "peer": true + }, "node_modules/@types/normalize-package-data": { "version": "2.4.1", "resolved": "https://registry.npmjs.org/@types/normalize-package-data/-/normalize-package-data-2.4.1.tgz", @@ -6080,26 +6108,6 @@ "lodash": "^4.17.21" } }, - "node_modules/node-fetch": { - "version": "2.6.7", - "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.7.tgz", - "integrity": "sha512-ZjMPFEfVx5j+y2yF35Kzx5sF7kDzxuDj6ziH4FFbOp87zKDZNx8yExJIb05OGF4Nlt9IHFIMBkRl41VdvcNdbQ==", - "dev": true, - "dependencies": { - "whatwg-url": "^5.0.0" - }, - "engines": { - "node": "4.x || >=6.0.0" - }, - "peerDependencies": { - "encoding": "^0.1.0" - }, - "peerDependenciesMeta": { - "encoding": { - "optional": true - } - } - }, "node_modules/node-releases": { "version": "2.0.10", "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.10.tgz", @@ -9991,6 +9999,7 @@ "version": "1.4.8", "resolved": "https://registry.npmjs.org/sourcemap-codec/-/sourcemap-codec-1.4.8.tgz", "integrity": "sha512-9NykojV5Uih4lgo5So5dtw+f0JgJX30KCNI8gwhz2J9A15wD0Ml6tjHKwf6fTSa6fAdVBdZeNOs9eJ71qCk8vA==", + "deprecated": "Please use @jridgewell/sourcemap-codec instead", "dev": true }, "node_modules/space-separated-tokens": { @@ -11947,6 +11956,17 @@ "is-plain-object": "^5.0.0", "node-fetch": "^2.6.7", "universal-user-agent": "^6.0.0" + }, + "dependencies": { + "node-fetch": { + "version": "2.6.11", + "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.11.tgz", + "integrity": "sha512-4I6pdBY1EthSqDmJkiNk3JIT8cswwR9nfeW/cPdUagJYEQG7R95WRH74wpz7ma8Gh/9dI9FP+OU+0E4FvtA55w==", + "dev": true, + "requires": { + "whatwg-url": "^5.0.0" + } + } } }, "@octokit/request-error": { @@ -12296,6 +12316,14 @@ "integrity": "sha512-iiUgKzV9AuaEkZqkOLDIvlQiL6ltuZd9tGcW3gwpnX8JbuiuhFlEGmmFXEXkN50Cvq7Os88IY2v0dkDqXYWVgA==", "dev": true }, + "@types/node": { + "version": "20.2.3", + "resolved": "https://registry.npmjs.org/@types/node/-/node-20.2.3.tgz", + "integrity": "sha512-pg9d0yC4rVNWQzX8U7xb4olIOFuuVL9za3bzMT2pu2SU0SNEi66i2qrvhE2qt0HvkhuCaWJu7pLNOt/Pj8BIrw==", + "dev": true, + "optional": true, + "peer": true + }, "@types/normalize-package-data": { "version": "2.4.1", "resolved": "https://registry.npmjs.org/@types/normalize-package-data/-/normalize-package-data-2.4.1.tgz", @@ -15405,15 +15433,6 @@ "lodash": "^4.17.21" } }, - "node-fetch": { - "version": "2.6.7", - "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.7.tgz", - "integrity": "sha512-ZjMPFEfVx5j+y2yF35Kzx5sF7kDzxuDj6ziH4FFbOp87zKDZNx8yExJIb05OGF4Nlt9IHFIMBkRl41VdvcNdbQ==", - "dev": true, - "requires": { - "whatwg-url": "^5.0.0" - } - }, "node-releases": { "version": "2.0.10", "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.10.tgz", diff --git a/src/lib/openai.ts b/src/lib/openai.ts index 4f327d0..673b1f5 100644 --- a/src/lib/openai.ts +++ b/src/lib/openai.ts @@ -1,4 +1,10 @@ -import { ChatCompletionRequestMessage, Configuration, CreateImageRequestSizeEnum, OpenAIApi } from "openai"; +import { + ChatCompletionRequestMessage, + Configuration, + CreateChatCompletionResponse, CreateCompletionResponse, + CreateImageRequestSizeEnum, + OpenAIApi +} from "openai"; import "@logseq/libs"; import { backOff } from "exponential-backoff"; @@ -41,7 +47,7 @@ const retryOptions = { console.warn("Rate limit exceeded. Retrying..."); return true; } - if (err.response.status >= 500){ + if (err.response.status >= 500) { return true; } @@ -51,17 +57,18 @@ const retryOptions = { export async function whisper(file: File,openAiOptions:OpenAIOptions): Promise { const apiKey = openAiOptions.apiKey; + const baseUrl = openAiOptions.completionEndpoint ? "https://api.openai.com/v1" : openAiOptions.completionEndpoint; const model = 'whisper-1'; - + // Create a FormData object and append the file const formData = new FormData(); formData.append('model', model); formData.append('file', file); - + // Send a request to the OpenAI API using a form post const response = await backOff( - () => fetch('https://api.openai.com/v1/audio/transcriptions', { + () => fetch(baseUrl + '/audio/transcriptions', { method: 'POST', headers: { 'Authorization': `Bearer ${apiKey}`, @@ -87,6 +94,7 @@ export async function dallE( const configuration = new Configuration({ apiKey: options.apiKey, + basePath: options.completionEndpoint }); const openai = new OpenAIApi(configuration); @@ -185,6 +193,178 @@ export async function openAI( } } +export async function openAIWithStream( + input: string, + openAiOptions: OpenAIOptions, + onContent: (content: string) => void, + onStop: () => void +): Promise { + const options = { ...OpenAIDefaults(openAiOptions.apiKey), ...openAiOptions }; + const engine = options.completionEngine!; + + try { + if (engine.startsWith("gpt-3.5") || engine.startsWith("gpt-4")) { + const inputMessages: ChatCompletionRequestMessage[] = [{ role: "user", content: input }]; + if (openAiOptions.chatPrompt && openAiOptions.chatPrompt.length > 0) { + inputMessages.unshift({ role: "system", content: openAiOptions.chatPrompt }); + } + const body = { + messages: inputMessages, + temperature: options.temperature, + max_tokens: options.maxTokens, + top_p: 1, + frequency_penalty: 0, + presence_penalty: 0, + model: engine, + stream: true + } + const response = await backOff( + () => + fetch(`${options.completionEndpoint}/chat/completions`, { + method: "POST", + body: JSON.stringify(body), + headers: { + Authorization: `Bearer ${options.apiKey}`, + 'Content-Type': 'application/json', + 'Accept': 'text/event-stream' + } + }).then((response) => { + if (response.ok && response.body) { + const reader = response.body.pipeThrough(new TextDecoderStream()).getReader(); + let result = "" + const readStream = (): any => + reader.read().then(({ + value, + done + }) => { + if (done) { + reader.cancel(); + onStop(); + return Promise.resolve({ choices: [{ message: { content: result } }] }); + } + + const data = getDataFromStreamValue(value); + if (!data || !data[0]) { + return readStream(); + } + + let res = "" + for (let i = 0; i < data.length; i++) { + res += data[i].choices[0]?.delta?.content || "" + } + result += res + onContent(res) + return readStream(); + }); + return readStream(); + } else { + return Promise.reject(response); + } + }), + retryOptions + ); + const choices = (response as CreateChatCompletionResponse)?.choices; + if ( + choices && + choices[0] && + choices[0].message && + choices[0].message.content && + choices[0].message.content.length > 0 + ) { + return trimLeadingWhitespace(choices[0].message.content); + } else { + return null; + } + } else { + const body = { + prompt: input, + temperature: options.temperature, + max_tokens: options.maxTokens, + top_p: 1, + frequency_penalty: 0, + presence_penalty: 0, + model: engine, + stream: true + } + const response = await backOff( + () => + fetch(`${options.completionEndpoint}/completions`, { + method: "POST", + body: JSON.stringify(body), + headers: { + Authorization: `Bearer ${options.apiKey}`, + 'Content-Type': 'application/json', + 'Accept': 'text/event-stream' + } + }).then((response) => { + if (response.ok && response.body) { + const reader = response.body.pipeThrough(new TextDecoderStream()).getReader(); + let result = "" + const readStream = (): any => + reader.read().then(({ + value, + done + }) => { + if (done) { + reader.cancel(); + onStop(); + return Promise.resolve({ choices: [{ text: result }]}); + } + + const data = getDataFromStreamValue(value); + if (!data || !data[0]) { + return readStream(); + } + + let res = "" + for (let i = 0; i < data.length; i++) { + res += data[i].choices[0]?.text || "" + } + result += res + onContent(res) + return readStream(); + }); + return readStream(); + } else { + return Promise.reject(response); + } + }), + retryOptions + ); + const choices = (response as CreateCompletionResponse)?.choices; + if ( + choices && + choices[0] && + choices[0].text && + choices[0].text.length > 0 + ) { + return trimLeadingWhitespace(choices[0].text); + } else { + return null; + } + } + } catch (e: any) { + if (e?.response?.data?.error) { + console.error(e?.response?.data?.error); + throw new Error(e?.response?.data?.error?.message); + } else { + throw e; + } + } +} + +function getDataFromStreamValue(value: string) { + const matches = [...value.split("data:")]; + return matches.filter(content => content.trim().length > 0 && !content.trim().includes("[DONE]")) + .map(match =>{ + try{ + return JSON.parse(match) + } catch(e) { + return null + } + }); +} + function trimLeadingWhitespace(s: string): string { return s.replace(/^\s+/, ""); } diff --git a/src/lib/rawCommands.ts b/src/lib/rawCommands.ts index 2a5df37..62a16b7 100644 --- a/src/lib/rawCommands.ts +++ b/src/lib/rawCommands.ts @@ -1,6 +1,6 @@ import { IHookEvent } from "@logseq/libs/dist/LSPlugin.user"; import { getAudioFile, getPageContentFromBlock, saveDalleImage } from "./logseq"; -import { OpenAIOptions, openAI, dallE, whisper } from "./openai"; +import { OpenAIOptions, dallE, whisper, openAIWithStream } from "./openai"; import { getOpenaiSettings } from "./settings"; function handleOpenAIError(e: any) { @@ -79,17 +79,26 @@ export async function runGptBlock(b: IHookEvent) { } try { - let result = await openAI(currentBlock.content, openAISettings); + let result = ""; + const insertBlock = await logseq.Editor.insertBlock(currentBlock.uuid, result, { + sibling: false, + }); + + if(openAISettings.injectPrefix && result.length == 0) { + result = openAISettings.injectPrefix + result; + } + + await openAIWithStream(currentBlock.content, openAISettings, async (content: string) => { + result += content || ""; + if(null != insertBlock) { + await logseq.Editor.updateBlock(insertBlock.uuid, result); + } + }, () => {}); + if (!result) { - logseq.App.showMsg("No OpenAI results.", "warning"); + logseq.App.showMsg("No OpenAI content" , "warning"); return; } - if (openAISettings.injectPrefix) { - result = openAISettings.injectPrefix + result; - } - await logseq.Editor.insertBlock(currentBlock.uuid, result, { - sibling: false, - }); } catch (e: any) { handleOpenAIError(e); } @@ -119,17 +128,24 @@ export async function runGptPage(b: IHookEvent) { } try { - let result = await openAI(pageContents, openAISettings); + let result = ""; + const insertBlock = await logseq.Editor.appendBlockInPage(page.uuid, result); + if (openAISettings.injectPrefix && result.length == 0) { + result = openAISettings.injectPrefix + result; + } + + await openAIWithStream(pageContents, openAISettings, async (content: string) => { + result += content || ""; + if(null != insertBlock) { + await logseq.Editor.updateBlock(insertBlock.uuid, result); + } + }, () => {}); if (!result) { - logseq.App.showMsg("No OpenAI results.", "warning"); + logseq.App.showMsg("No OpenAI content" , "warning"); return; } - if (openAISettings.injectPrefix) { - result = openAISettings.injectPrefix + result; - } - await logseq.Editor.appendBlockInPage(page.uuid, result); } catch (e: any) { handleOpenAIError(e); } diff --git a/src/main.tsx b/src/main.tsx index 381de1a..51685d1 100644 --- a/src/main.tsx +++ b/src/main.tsx @@ -1,6 +1,6 @@ import "./ui/style.css"; import "@logseq/libs"; -import { openAI } from "./lib/openai"; +import { openAIWithStream } from "./lib/openai"; import React, { useState } from "react"; import ReactDOM from "react-dom/client"; import { Command, LogseqAI } from "./ui/LogseqAI"; @@ -179,7 +179,7 @@ const LogseqApp = () => { const allCommands = [...builtInCommands, ...userCommands]; - const handleCommand = async (command: Command): Promise => { + const handleCommand = async (command: Command, onContent: (content: string) => void): Promise => { let inputText; if (appState.selection.type === "singleBlockSelected") { inputText = appState.selection.block.content; @@ -194,7 +194,8 @@ const LogseqApp = () => { if (command.temperature!=null && !Number.isNaN(command.temperature)) { openAISettings.temperature = command.temperature; } - const response = await openAI(command.prompt + inputText, openAISettings); + const response = await openAIWithStream(command.prompt + inputText, openAISettings, onContent, () => { + }); if (response) { return response; } else { diff --git a/src/ui/LogseqAI.tsx b/src/ui/LogseqAI.tsx index f2749da..c6b9e69 100644 --- a/src/ui/LogseqAI.tsx +++ b/src/ui/LogseqAI.tsx @@ -35,7 +35,7 @@ export interface ErrorState { interface LogseqAIProps { commands: Command[]; - handleCommand: (command: Command) => Promise; + handleCommand: (command: Command, onContent: (content:string) => void) => Promise; onInsert: (text: string) => void; onReplace: (text: string) => void; onClose: () => void; @@ -61,8 +61,12 @@ export const LogseqAI = ({ setQuery(command.name); setCommandState({ status: "loading" }); try { - const result = await handleCommand(command); - setCommandState({ status: "success", result }); + let result = ""; + await handleCommand(command, (content) => { + result += content || ""; + setCommandState({ status: "success", result }); + }); + // setCommandState({ status: "success", result }); } catch (e) { if (e instanceof Error) { setCommandState({ status: "error", error: e });