-
-
Notifications
You must be signed in to change notification settings - Fork 163
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge pull request #1418 from samchon/feat/llm-of-validate
New function `typia.llm.applicationOfValidate()`.
- Loading branch information
Showing
512 changed files
with
13,164 additions
and
92 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,81 @@ | ||
import { ILlmApplication, ILlmSchema } from "@samchon/openapi"; | ||
import ts from "typescript"; | ||
|
||
import { ILlmApplicationOfValidate } from "../../schemas/llm/ILlmApplicationOfValidate"; | ||
import { Metadata } from "../../schemas/metadata/Metadata"; | ||
import { MetadataParameter } from "../../schemas/metadata/MetadataParameter"; | ||
|
||
import { ITypiaContext } from "../../transformers/ITypiaContext"; | ||
|
||
import { IValidation } from "../../IValidation"; | ||
import { ValidateProgrammer } from "../ValidateProgrammer"; | ||
import { LlmApplicationProgrammer } from "./LlmApplicationProgrammer"; | ||
|
||
export namespace LlmApplicationOfValidateProgrammer { | ||
export const validate = (model: ILlmSchema.Model) => | ||
LlmApplicationProgrammer.validate(model); | ||
|
||
export const write = <Model extends ILlmSchema.Model>(props: { | ||
context: ITypiaContext; | ||
modulo: ts.LeftHandSideExpression; | ||
model: Model; | ||
metadata: Metadata; | ||
config?: Partial<ILlmSchema.ModelConfig[Model]>; | ||
}): ILlmApplicationOfValidate<Model> => { | ||
const app: ILlmApplication<Model> = LlmApplicationProgrammer.write(props); | ||
const parameters: Record<string, MetadataParameter> = Object.fromEntries( | ||
props.metadata.objects[0]!.type.properties.filter( | ||
(p) => | ||
p.key.isSoleLiteral() && | ||
p.value.size() === 1 && | ||
p.value.nullable === false && | ||
p.value.isRequired() === true && | ||
p.value.functions.length === 1, | ||
) | ||
.filter( | ||
(p) => | ||
p.jsDocTags.find( | ||
(tag) => tag.name === "hidden" || tag.name === "internal", | ||
) === undefined, | ||
) | ||
.map((p) => [ | ||
p.key.getSoleLiteral()!, | ||
p.value.functions[0]!.parameters[0]!, | ||
]), | ||
); | ||
return { | ||
...app, | ||
functions: app.functions.map((func) => ({ | ||
...func, | ||
validate: writeValidadtor({ | ||
context: props.context, | ||
modulo: props.modulo, | ||
parameter: parameters[func.name]!, | ||
}), | ||
})), | ||
}; | ||
}; | ||
|
||
const writeValidadtor = (props: { | ||
context: ITypiaContext; | ||
modulo: ts.LeftHandSideExpression; | ||
parameter: MetadataParameter; | ||
}): ((props: object) => IValidation<unknown>) => { | ||
const type = props.parameter.tsType; | ||
if (type === undefined) | ||
// unreachable | ||
throw new Error( | ||
"Failed to write LLM application's function validator. You don't have to call `LlmApplicationOfValidator.write()` function by yourself, but only by the `typia.llm.applicationOfValidate()` function.", | ||
); | ||
return ValidateProgrammer.write({ | ||
...props, | ||
type: props.parameter.tsType!, | ||
config: { | ||
equals: false, | ||
}, | ||
name: undefined, | ||
}) satisfies ts.CallExpression as any as ( | ||
props: object, | ||
) => IValidation<unknown>; | ||
}; | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,55 @@ | ||
import { ILlmApplication, ILlmSchema } from "@samchon/openapi"; | ||
|
||
import { ILlmFunctionOfValidate } from "./ILlmFunctionOfValidate"; | ||
|
||
/** | ||
* Application of LLM function calling with validators. | ||
* | ||
* `ILlmApplication` is a data structure representing a collection of | ||
* {@link ILlmFunctionOfValidate LLM function calling schemas}, composed from a native | ||
* TypeScript class (or interface) type by the `typia.llm.applicationOfValidate<App, Model>()` | ||
* function. | ||
* | ||
* If you put the returned {@link ILlmApplicationOfValidate.functions} objects to the | ||
* LLM provider like [OpenAI (ChatGPT)](https://openai.com/), the LLM will automatically | ||
* select the proper function and fill its arguments from the conversation | ||
* (maybe chatting text) with user (human). This is the concept of the LLM function calling. | ||
* | ||
* Additionally, the LLM function calling sometimes take a mistake that composing wrong typed | ||
* {@link ILlmFunctionOfValidate.parameters}. In that case, deliver return value of the | ||
* {@link ILlmFunctionOfValidate.validate} function, then LLM provider will correct the | ||
* parameters at the next conversation. The {@link ILlmFunctionOfValidate.validate} function | ||
* is a validator function reporting the detailed information about the wrong typed parameters. | ||
* | ||
* By the way, there can be some parameters (or their nested properties) which must be | ||
* composed by Human, not by LLM. File uploading feature or some sensitive information | ||
* like secrety key (password) are the examples. In that case, you can separate the | ||
* function parameters to both LLM and human sides by configuring the | ||
* {@link ILlmApplication.IOptions.separate} property. The separated parameters are | ||
* assigned to the {@link ILlmFunction.separated} property. | ||
* | ||
* For reference, when both LLM and Human filled parameter values to call, you can | ||
* merge them by calling the {@link HttpLlm.mergeParameters} function. In other words, | ||
* if you've configured the {@link ILlmApplication.IOptions.separate} property, you | ||
* have to merge the separated parameters before the funtion call execution. | ||
* | ||
* @reference https://platform.openai.com/docs/guides/function-calling | ||
* @author Jeongho Nam - https://github.com/samchon | ||
*/ | ||
export interface ILlmApplicationOfValidate<Model extends ILlmSchema.Model> | ||
extends ILlmApplication<Model> { | ||
/** | ||
* List of function metadata. | ||
* | ||
* List of function metadata that can be used for the LLM function call. | ||
* | ||
* Also, every functions have their own parameters validator | ||
* {@link ILlmFunctionOfValidate.validate}. If the LLM function calling composes wrong | ||
* typed parameters, then deliver return value of it, then LLM will correct parameters | ||
* at the next conversation. | ||
*/ | ||
functions: ILlmFunctionOfValidate<Model>[]; | ||
} | ||
export namespace ILlmApplicationOfValidate { | ||
export import IOptions = ILlmApplication.IOptions; | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,39 @@ | ||
import { ILlmFunction, ILlmSchema } from "@samchon/openapi"; | ||
|
||
import { IValidation } from "../../IValidation"; | ||
|
||
/** | ||
* LLM function metadata with validator. | ||
* | ||
* `ILlmFunctionOfValidate` is an interface representing a function metadata, | ||
* which has been used for the LLM (Language Large Model) function | ||
* calling. Also, it's a function structure containing the function | ||
* {@link name}, {@link parameters} and {@link output return type}. | ||
* | ||
* If you provide this `ILlmFunctionOfValidate` data to the LLM provider like "OpenAI", | ||
* the "OpenAI" will compose a function arguments by analyzing conversations | ||
* with the user. With the LLM composed arguments, you can execute the function | ||
* and get the result. | ||
* | ||
* If the LLM function calling take s a mistake that composing wrong typed | ||
* {@link parameters}, you can correct the parameters by delivering the return | ||
* value of the {@link validate} function. The {@link validate} function is a | ||
* validator function reporting the detailed information about the wrong typed | ||
* {@link parameters}. | ||
* | ||
* By the way, do not ensure that LLM will always provide the correct arguments. | ||
* The LLM of present age is not perfect, and sometimes takes a mistake that composing | ||
* wrong typed {@link parameters}. In that case, you can correc the parameters by | ||
* delivering the return value of the {@link validate} function. The {@link validate} | ||
* function reports the detailed information about the wrong typed {@link parameters}, | ||
* | ||
* @reference https://platform.openai.com/docs/guides/function-calling | ||
* @author Jeongho Nam - https://github.com/samchon | ||
*/ | ||
export interface ILlmFunctionOfValidate<Model extends ILlmSchema.Model> | ||
extends ILlmFunction<Model> { | ||
validate(props: object): IValidation<unknown>; | ||
} | ||
export namespace ILlmFunctionOfValidate { | ||
export import ISeparated = ILlmFunction.ISeparated; | ||
} |
Oops, something went wrong.