Skip to content

Commit

Permalink
feat: o1 models support streaming now.
Browse files Browse the repository at this point in the history
  • Loading branch information
Emt-lin committed Dec 2, 2024
1 parent c354202 commit 45e551c
Show file tree
Hide file tree
Showing 3 changed files with 15 additions and 18 deletions.
17 changes: 10 additions & 7 deletions package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -69,6 +69,7 @@
"@langchain/core": "^0.3.3",
"@langchain/google-genai": "^0.1.2",
"@langchain/groq": "^0.1.2",
"@langchain/openai": "^0.3.14",
"@orama/orama": "^3.0.0-rc-2",
"@radix-ui/react-dropdown-menu": "^2.1.2",
"@radix-ui/react-tooltip": "^1.1.3",
Expand Down
15 changes: 4 additions & 11 deletions src/LLMProviders/chatModelManager.ts
Original file line number Diff line number Diff line change
Expand Up @@ -111,7 +111,7 @@ export default class ChatModelManager {
},
// @ts-ignore
openAIOrgId: decrypt(params.openAIOrgId),
...this.handleOpenAIExtraArgs(isO1Model, params.maxTokens, params.temperature, true),
...this.handleOpenAIExtraArgs(isO1Model, params.maxTokens, params.temperature),
},
[ChatModelProviders.ANTHROPIC]: {
anthropicApiKey: decrypt(customModel.apiKey || params.anthropicApiKey),
Expand All @@ -132,7 +132,7 @@ export default class ChatModelManager {
baseURL: customModel.baseUrl,
fetch: customModel.enableCors ? safeFetch : undefined,
},
...this.handleOpenAIExtraArgs(isO1Model, params.maxTokens, params.temperature, true),
...this.handleOpenAIExtraArgs(isO1Model, params.maxTokens, params.temperature),
},
[ChatModelProviders.COHEREAI]: {
apiKey: decrypt(customModel.apiKey || params.cohereApiKey),
Expand Down Expand Up @@ -197,7 +197,7 @@ export default class ChatModelManager {
fetch: customModel.enableCors ? safeFetch : undefined,
dangerouslyAllowBrowser: true,
},
...this.handleOpenAIExtraArgs(isO1Model, params.maxTokens, params.temperature, true),
...this.handleOpenAIExtraArgs(isO1Model, params.maxTokens, params.temperature),
},
};

Expand All @@ -207,22 +207,15 @@ export default class ChatModelManager {
return { ...baseConfig, ...selectedProviderConfig };
}

private handleOpenAIExtraArgs(
isO1Model: boolean,
maxTokens: number,
temperature: number,
streaming: boolean
) {
private handleOpenAIExtraArgs(isO1Model: boolean, maxTokens: number, temperature: number) {
return isO1Model
? {
maxCompletionTokens: maxTokens,
temperature: 1,
streaming: false,
}
: {
maxTokens: maxTokens,
temperature: temperature,
streaming: streaming,
};
}

Expand Down

0 comments on commit 45e551c

Please sign in to comment.