-
Notifications
You must be signed in to change notification settings - Fork 4
/
Copy pathmain.d.ts
105 lines (92 loc) · 2.24 KB
/
main.d.ts
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
// These are ALL the types LLM Files can use.
// Also embeds some types for LLM Bot, if it uses any.
export type TextContent = {
type: "text";
text: string;
};
export type ImageContentPart = {
type: "image_url";
image_url: {
url: string;
detail?: string; // Optional, defaults to 'auto'
};
};
export type ContentPart = TextContent | ImageContentPart;
export type Message<T = boolean> = {
role: string;
content: T extends true ? (string | ContentPart[] | null) : (string | null);
name?: string;
tool_calls?: ToolCall[];
tool_call_id?: string;
};
export type ToolCall = {
id: string;
type: "function";
function: {
name: string;
arguments: string; // JSON format arguments
};
};
export type Tool = {
type: "function";
function: {
description?: string;
name: string;
parameters: object; // JSON Schema object
};
};
export type Response = {
id: string;
choices: Choice[];
messages: Message[];
created: number; // Unix timestamp
model: string;
object: "chat.completion";
error?: {
code: number;
message: string;
}; // If this is here, shit
};
export type Choice<T extends boolean = false> = {
finish_reason: string | null; // Depends on the model. Ex: 'stop' | 'length' | 'content_filter' | 'tool_calls' | 'function_call'
message: Message<T>;
};
export type information = {
llmFileVersion: string;
env?: string[];
functions: boolean;
functionsData?: Tool[];
multiModal: boolean;
callbackSupport: boolean;
streamingSupport?: boolean;
id: string;
name: string;
description: string;
highCostLLM: boolean;
};
export type Requirements = {
env?: {
[envName: string]: string;
};
images?: string[];
streaming?: boolean;
};
export type callbackData = {
toolCalls?: ToolCall[]; // What toolcalls did the LLM do if it called tools?
data: string | null; // Whatever the LLM said (or a chunk if streaming is on)
};
export type llmFile = {
information: information;
send: (
prompt: string,
messages: Message[],
callback?: ((information: callbackData, complete: boolean) => void) | null,
requirements?: Requirements,
) => Promise<Response>;
};
declare global {
// deno-lint-ignore no-var
var availableLLMs: {
[id: string]: llmFile;
};
}