Skip to content

Commit

Permalink
Merge pull request #1374 from samchon/feat/llm
Browse files Browse the repository at this point in the history
New function `typia.llm.parameters()`.
  • Loading branch information
samchon authored Nov 23, 2024
2 parents 4cfc667 + 458832e commit c57253c
Show file tree
Hide file tree
Showing 1,223 changed files with 170,358 additions and 472,603 deletions.
2 changes: 1 addition & 1 deletion benchmark/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -72,6 +72,6 @@
"suppress-warnings": "^1.0.2",
"tstl": "^3.0.0",
"uuid": "^9.0.1",
"typia": "../typia-7.0.0-dev.20241123.tgz"
"typia": "../typia-7.0.0-dev.20241124.tgz"
}
}
4 changes: 2 additions & 2 deletions package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "typia",
"version": "7.0.0-dev.20241123",
"version": "7.0.0-dev.20241124",
"description": "Superfast runtime validators with only one line",
"main": "lib/index.js",
"typings": "lib/index.d.ts",
Expand Down Expand Up @@ -42,7 +42,7 @@
},
"homepage": "https://typia.io",
"dependencies": {
"@samchon/openapi": "^2.0.0-dev.20241123-3",
"@samchon/openapi": "^2.0.0-dev.20241124",
"commander": "^10.0.0",
"comment-json": "^4.2.3",
"inquirer": "^8.2.5",
Expand Down
4 changes: 2 additions & 2 deletions packages/typescript-json/package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "typescript-json",
"version": "7.0.0-dev.20241123",
"version": "7.0.0-dev.20241124",
"description": "Superfast runtime validators with only one line",
"main": "lib/index.js",
"typings": "lib/index.d.ts",
Expand Down Expand Up @@ -38,7 +38,7 @@
},
"homepage": "https://typia.io",
"dependencies": {
"typia": "7.0.0-dev.20241123"
"typia": "7.0.0-dev.20241124"
},
"peerDependencies": {
"typescript": ">=4.8.0 <5.7.0"
Expand Down
32 changes: 8 additions & 24 deletions src/internal/_llmApplicationFinalize.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,36 +3,20 @@ import { HttpLlmConverter } from "@samchon/openapi/lib/converters/HttpLlmConvert

export const _llmApplicationFinalize = <Model extends ILlmApplication.Model>(
app: ILlmApplication<Model>,
options?: Partial<ILlmApplication.IOptions<Model>>,
options?: Partial<Pick<ILlmApplication.IOptions<Model>, "separate">>,
): void => {
app.options = (
isChatGptOptions(app, options)
? ({
separate: options?.separate ?? null,
reference: options?.reference ?? false,
constraint: options?.constraint ?? false,
} satisfies ILlmApplication.IOptions<"chatgpt">)
: ({
separate: (options?.separate ??
null) as ILlmApplication.ICommonOptions<
Exclude<Model, "chatgpt">
>["separate"],
recursive:
(options as ILlmApplication.IOptions<"3.0"> | undefined)
?.recursive ?? 3,
} satisfies ILlmApplication.ICommonOptions<Exclude<Model, "chatgpt">>)
) as ILlmApplication.IOptions<Model>;
app.options = {
separate: options?.separate ?? null,
constraint: false,
recursive: 3,
reference: false,
};
if (app.options.separate === null) return;
for (const func of app.functions)
func.separated = HttpLlmConverter.separateParameters({
func.separated = HttpLlmConverter.separate({
model: app.model,
parameters: func.parameters,
predicate: app.options
.separate as ILlmApplication.IOptions<Model>["separate"] as any,
});
};

const isChatGptOptions = <Model extends ILlmApplication.Model>(
app: ILlmApplication<Model>,
_options: unknown,
): _options is ILlmApplication.IOptions<"chatgpt"> => app.model === "chatgpt";
76 changes: 73 additions & 3 deletions src/llm.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import { IChatGptSchema, ILlmApplication } from "@samchon/openapi";
import { ILlmApplication } from "@samchon/openapi";

/**
* > You must configure the generic argument `App`.
Expand Down Expand Up @@ -84,7 +84,7 @@ export function application<
App extends object,
Model extends ILlmApplication.Model,
>(
options?: Partial<Omit<ILlmApplication.IOptions<Model>, "recursive">>,
options?: Partial<Pick<ILlmApplication.IOptions<Model>, "separate">>,
): ILlmApplication<Model>;

/**
Expand All @@ -94,6 +94,70 @@ export function application(): never {
halt("application");
}

/**
* > You must configure the generic argument `Parameters`.
*
* TypeScript parameters to LLM parameters schema.
*
* Creates an LLM (Large Language Model) parameters schema, a type metadata that is used in the
* [LLM function calling](https://platform.openai.com/docs/guides/function-calling)
* and [LLM structured outputs](https://platform.openai.com/docs/guides/structured-outputs),
* from a TypeScript parameters type.
*
* For references, LLM identifies only keyworded arguments, not positional arguments.
* Therefore, the TypeScript parameters type must be an object type, and its properties
* must be static. If dynamic properties are, it would be compilation error.
*
* Also, such parameters type can be utilized not only for the LLM function calling,
* but also for the LLM structured outputs. The LLM structured outputs is a feature
* that LLM (Large Language Model) can generate a structured output, not only a plain
* text, by filling the parameters from the conversation (maybe chatting text) with user
* (human).
*
* @template Parameters Target parameters type
* @template Model LLM schema model
* @returns LLM parameters schema
* @reference https://platform.openai.com/docs/guides/function-calling
* @reference https://platform.openai.com/docs/guides/structured-outputs
*/
export function parameters(): never;

/**
* TypeScript parameters to LLM parameters schema.
*
* Creates an LLM (Large Language Model) parameters schema, a type metadata that is used in the
* [LLM function calling](https://platform.openai.com/docs/guides/function-calling)
* and [LLM structured outputs](https://platform.openai.com/docs/guides/structured-outputs),
* from a TypeScript parameters type.
*
* For references, LLM identifies only keyworded arguments, not positional arguments.
* Therefore, the TypeScript parameters type must be an object type, and its properties
* must be static. If dynamic properties are, it would be compilation error.
*
* Also, such parameters type can be utilized not only for the LLM function calling,
* but also for the LLM structured outputs. The LLM structured outputs is a feature
* that LLM (Large Language Model) can generate a structured output, not only a plain
* text, by filling the parameters from the conversation (maybe chatting text) with user
* (human).
*
* @template Parameters Target parameters type
* @template Model LLM schema model
* @returns LLM parameters schema
* @reference https://platform.openai.com/docs/guides/function-calling
* @reference https://platform.openai.com/docs/guides/structured-outputs
*/
export function parameters<
Parameters extends object,
Model extends ILlmApplication.Model,
>(): ILlmApplication.ModelSchema[Model];

/**
* @internal
*/
export function parameters(): never {
halt("parameters");
}

/**
* > You must configure the generic argument `T`.
*
Expand Down Expand Up @@ -129,6 +193,7 @@ export function application(): never {
* @template Model LLM schema model
* @returns LLM schema
* @reference https://platform.openai.com/docs/guides/function-calling
* @reference https://platform.openai.com/docs/guides/structured-outputs
* @author Jeongho Nam - https://github.com/samchon
*/
export function schema(): never;
Expand Down Expand Up @@ -170,7 +235,12 @@ export function schema(): never;
* @author Jeongho Nam - https://github.com/samchon
*/
export function schema<T, Model extends ILlmApplication.Model>(
...$defs: Model extends "chatgpt" ? [Record<string, IChatGptSchema>] : []
...$defs: Extract<
ILlmApplication.ModelSchema[Model],
{ $ref: string }
> extends never
? []
: [Record<string, ILlmApplication.ModelSchema[Model]>]
): ILlmApplication.ModelSchema[Model];

/**
Expand Down
107 changes: 19 additions & 88 deletions src/programmers/llm/LlmApplicationProgrammer.ts
Original file line number Diff line number Diff line change
@@ -1,8 +1,5 @@
import { IChatGptSchema, ILlmApplication, OpenApi } from "@samchon/openapi";
import { ChatGptConverter } from "@samchon/openapi/lib/converters/ChatGptConverter";
import { GeminiConverter } from "@samchon/openapi/lib/converters/GeminiConverter";
import { LlmConverterV3 } from "@samchon/openapi/lib/converters/LlmConverterV3";
import { LlmConverterV3_1 } from "@samchon/openapi/lib/converters/LlmConverterV3_1";
import { ILlmApplication, OpenApi } from "@samchon/openapi";
import { LlmSchemaConverter } from "@samchon/openapi/lib/converters/LlmSchemaConverter";
import { ILlmFunction } from "@samchon/openapi/lib/structures/ILlmFunction";

import { MetadataFactory } from "../../factories/MetadataFactory";
Expand Down Expand Up @@ -120,18 +117,7 @@ export namespace LlmApplicationProgrammer {
function: func,
}),
),
options: (props.model === "chatgpt"
? ({
separate: null,
reference: false,
constraint: false,
} satisfies ILlmApplication.IOptions<"chatgpt">)
: ({
separate: null,
recursive: props.model === "chatgpt" ? undefined : (3 as any),
} satisfies ILlmApplication.ICommonOptions<
Exclude<Model, "chatgpt">
>)) as ILlmApplication.IOptions<Model>,
options: DEFAULT_CONFIG,
};
};

Expand Down Expand Up @@ -201,33 +187,12 @@ export namespace LlmApplicationProgrammer {
.map((p) => p.name),
additionalProperties: false,
};
const parameters: ILlmApplication.ModelParameters[Model] | null = (() => {
if (props.model === "chatgpt")
return ChatGptConverter.parameters({
options: DEFAULT_CHATGPT_OPTION,
components: props.components,
schema,
});
else if (props.model === "gemini")
return GeminiConverter.parameters({
recursive: DEFAULT_V3_OPTIONS.recursive,
components: props.components,
schema,
}) as ILlmApplication.ModelParameters[Model] | null;
else if (props.model === "3.0")
return LlmConverterV3.parameters({
recursive: DEFAULT_V3_OPTIONS.recursive,
components: props.components,
schema,
}) as ILlmApplication.ModelParameters[Model] | null;
else if (props.model === "3.1")
return LlmConverterV3_1.parameters({
recursive: DEFAULT_V3_OPTIONS.recursive,
components: props.components,
schema,
}) as ILlmApplication.ModelParameters[Model] | null;
else return null;
})();
const parameters: ILlmApplication.ModelParameters[Model] | null =
LlmSchemaConverter.parameters(props.model)({
config: DEFAULT_CONFIG,
components: props.components,
schema,
}) as ILlmApplication.ModelParameters[Model] | null;
if (parameters === null)
throw new Error("Failed to write LLM application parameters.");
return parameters;
Expand All @@ -240,56 +205,22 @@ export namespace LlmApplicationProgrammer {
schema: OpenApi.IJsonSchema | null;
}): ILlmApplication.ModelSchema[Model] | null => {
if (props.schema === null) return null;
const output: ILlmApplication.ModelSchema[Model] | null = (() => {
if (props.model === "chatgpt") {
const $defs =
(props.parameters as IChatGptSchema.IParameters).$defs ?? {};
const output: IChatGptSchema | null = ChatGptConverter.schema({
options: DEFAULT_CHATGPT_OPTION,
components: props.components,
$defs,
schema: props.schema,
});
if (
output !== null &&
(props.parameters as IChatGptSchema.IParameters).$defs ===
undefined &&
Object.keys($defs).length !== 0
)
(props.parameters as IChatGptSchema.IParameters).$defs = $defs;
return output;
} else if (props.model === "gemini")
return GeminiConverter.schema({
recursive: DEFAULT_V3_OPTIONS.recursive,
components: props.components,
schema: props.schema,
}) as ILlmApplication.ModelSchema[Model] | null;
else if (props.model === "3.0")
return LlmConverterV3.schema({
recursive: DEFAULT_V3_OPTIONS.recursive,
components: props.components,
schema: props.schema,
}) as ILlmApplication.ModelSchema[Model] | null;
else if (props.model === "3.1")
return LlmConverterV3_1.schema({
recursive: DEFAULT_V3_OPTIONS.recursive,
components: props.components,
schema: props.schema,
}) as ILlmApplication.ModelSchema[Model] | null;
else return null;
})();
const output: ILlmApplication.ModelSchema[Model] | null =
LlmSchemaConverter.schema(props.model)({
config: DEFAULT_CONFIG,
components: props.components,
schema: props.schema,
$defs: (props.parameters as any).$defs,
}) as ILlmApplication.ModelSchema[Model] | null;
if (output === null)
throw new Error("Failed to write LLM application output.");
return output;
};
}

const DEFAULT_CHATGPT_OPTION: ILlmApplication.IChatGptOptions = {
const DEFAULT_CONFIG = {
separate: null,
reference: false,
constraint: false,
};
const DEFAULT_V3_OPTIONS = {
separate: null,
recursive: 3,
} satisfies ILlmApplication.ICommonOptions<"3.0">;
reference: false,
};
Loading

0 comments on commit c57253c

Please sign in to comment.