Skip to content

Commit

Permalink
chore: bump openai (#1113)
Browse files Browse the repository at this point in the history
  • Loading branch information
himself65 authored Aug 9, 2024
1 parent 1b6263e commit 07a275f
Show file tree
Hide file tree
Showing 4 changed files with 75 additions and 21 deletions.
5 changes: 5 additions & 0 deletions .changeset/two-readers-matter.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
---
"llamaindex": patch
---

chore: bump openai
2 changes: 1 addition & 1 deletion packages/llamaindex/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@
"md-utils-ts": "^2.0.0",
"mongodb": "^6.7.0",
"notion-md-crawler": "^1.0.0",
"openai": "^4.52.5",
"openai": "^4.55.3",
"papaparse": "^5.4.1",
"pathe": "^1.1.2",
"pg": "^8.12.0",
Expand Down
27 changes: 19 additions & 8 deletions packages/llamaindex/src/llm/openai.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ import type {
ClientOptions as OpenAIClientOptions,
} from "openai";
import { AzureOpenAI, OpenAI as OrigOpenAI } from "openai";
import type { ChatModel } from "openai/resources/chat/chat";

import {
type BaseTool,
Expand Down Expand Up @@ -108,16 +109,24 @@ export const GPT4_MODELS = {
"gpt-4o-2024-05-13": { contextWindow: 128000 },
"gpt-4o-mini": { contextWindow: 128000 },
"gpt-4o-mini-2024-07-18": { contextWindow: 128000 },
"gpt-4o-2024-08-06": { contextWindow: 128000 },
"gpt-4o-2024-09-14": { contextWindow: 128000 },
"gpt-4o-2024-10-14": { contextWindow: 128000 },
"gpt-4-0613": { contextWindow: 128000 },
"gpt-4-turbo-2024-04-09": { contextWindow: 128000 },
"gpt-4-0314": { contextWindow: 128000 },
"gpt-4-32k-0314": { contextWindow: 32768 },
};

// NOTE we don't currently support gpt-3.5-turbo-instruct and don't plan to in the near future
export const GPT35_MODELS = {
"gpt-3.5-turbo": { contextWindow: 4096 },
"gpt-3.5-turbo": { contextWindow: 16385 },
"gpt-3.5-turbo-0613": { contextWindow: 4096 },
"gpt-3.5-turbo-16k": { contextWindow: 16384 },
"gpt-3.5-turbo-16k-0613": { contextWindow: 16384 },
"gpt-3.5-turbo-1106": { contextWindow: 16384 },
"gpt-3.5-turbo-0125": { contextWindow: 16384 },
"gpt-3.5-turbo-16k": { contextWindow: 16385 },
"gpt-3.5-turbo-16k-0613": { contextWindow: 16385 },
"gpt-3.5-turbo-1106": { contextWindow: 16385 },
"gpt-3.5-turbo-0125": { contextWindow: 16385 },
"gpt-3.5-turbo-0301": { contextWindow: 16385 },
};

/**
Expand All @@ -126,7 +135,7 @@ export const GPT35_MODELS = {
export const ALL_AVAILABLE_OPENAI_MODELS = {
...GPT4_MODELS,
...GPT35_MODELS,
};
} satisfies Record<ChatModel, { contextWindow: number }>;

export function isFunctionCallingModel(llm: LLM): llm is OpenAI {
let model: string;
Expand Down Expand Up @@ -157,8 +166,10 @@ export type OpenAIAdditionalChatOptions = Omit<
>;

export class OpenAI extends ToolCallLLM<OpenAIAdditionalChatOptions> {
// Per completion OpenAI params
model: keyof typeof ALL_AVAILABLE_OPENAI_MODELS | string;
model:
| ChatModel
// string & {} is a hack to allow any string, but still give autocomplete
| (string & {});
temperature: number;
topP: number;
maxTokens?: number;
Expand Down
62 changes: 50 additions & 12 deletions pnpm-lock.yaml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

0 comments on commit 07a275f

Please sign in to comment.