-
-
Notifications
You must be signed in to change notification settings - Fork 11.4k
/
Copy pathindex.ts
143 lines (121 loc) · 4.57 KB
/
index.ts
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
// sort-imports-ignore
import '@anthropic-ai/sdk/shims/web';
import Anthropic from '@anthropic-ai/sdk';
import { ClientOptions } from 'openai';
import { LobeRuntimeAI } from '../BaseAI';
import { AgentRuntimeErrorType } from '../error';
import { ChatCompetitionOptions, ChatStreamPayload, ModelProvider } from '../types';
import { AgentRuntimeError } from '../utils/createError';
import { debugStream } from '../utils/debugStream';
import { desensitizeUrl } from '../utils/desensitizeUrl';
import { buildAnthropicMessages, buildAnthropicTools } from '../utils/anthropicHelpers';
import { StreamingResponse } from '../utils/response';
import { AnthropicStream } from '../utils/streams';
import { LOBE_DEFAULT_MODEL_LIST } from '@/config/aiModels';
import type { ChatModelCard } from '@/types/llm';
export interface AnthropicModelCard {
display_name: string;
id: string;
}
const DEFAULT_BASE_URL = 'https://api.anthropic.com';
export class LobeAnthropicAI implements LobeRuntimeAI {
private client: Anthropic;
baseURL: string;
apiKey?: string;
constructor({ apiKey, baseURL = DEFAULT_BASE_URL, ...res }: ClientOptions = {}) {
if (!apiKey) throw AgentRuntimeError.createError(AgentRuntimeErrorType.InvalidProviderAPIKey);
this.client = new Anthropic({ apiKey, baseURL, ...res });
this.baseURL = this.client.baseURL;
this.apiKey = apiKey;
}
async chat(payload: ChatStreamPayload, options?: ChatCompetitionOptions) {
try {
const anthropicPayload = await this.buildAnthropicPayload(payload);
const response = await this.client.messages.create(
{ ...anthropicPayload, stream: true },
{
signal: options?.signal,
},
);
const [prod, debug] = response.tee();
if (process.env.DEBUG_ANTHROPIC_CHAT_COMPLETION === '1') {
debugStream(debug.toReadableStream()).catch(console.error);
}
return StreamingResponse(AnthropicStream(prod, options?.callback), {
headers: options?.headers,
});
} catch (error) {
let desensitizedEndpoint = this.baseURL;
if (this.baseURL !== DEFAULT_BASE_URL) {
desensitizedEndpoint = desensitizeUrl(this.baseURL);
}
if ('status' in (error as any)) {
switch ((error as Response).status) {
case 401: {
throw AgentRuntimeError.chat({
endpoint: desensitizedEndpoint,
error: error as any,
errorType: AgentRuntimeErrorType.InvalidProviderAPIKey,
provider: ModelProvider.Anthropic,
});
}
case 403: {
throw AgentRuntimeError.chat({
endpoint: desensitizedEndpoint,
error: error as any,
errorType: AgentRuntimeErrorType.LocationNotSupportError,
provider: ModelProvider.Anthropic,
});
}
default: {
break;
}
}
}
throw AgentRuntimeError.chat({
endpoint: desensitizedEndpoint,
error: error as any,
errorType: AgentRuntimeErrorType.ProviderBizError,
provider: ModelProvider.Anthropic,
});
}
}
private async buildAnthropicPayload(payload: ChatStreamPayload) {
const { messages, model, max_tokens = 4096, temperature, top_p, tools } = payload;
const system_message = messages.find((m) => m.role === 'system');
const user_messages = messages.filter((m) => m.role !== 'system');
return {
max_tokens,
messages: await buildAnthropicMessages(user_messages),
model,
system: system_message?.content as string,
temperature: payload.temperature !== undefined ? temperature / 2 : undefined,
tools: buildAnthropicTools(tools),
top_p,
} satisfies Anthropic.MessageCreateParams;
}
async models() {
const url = `${this.baseURL}/v1/models`;
const response = await fetch(url, {
headers: {
'anthropic-version': '2023-06-01',
'x-api-key': `${this.apiKey}`,
},
method: 'GET',
});
const json = await response.json();
const modelList: AnthropicModelCard[] = json['data'];
return modelList
.map((model) => {
return {
displayName: model.display_name,
enabled: LOBE_DEFAULT_MODEL_LIST.find((m) => model.id.endsWith(m.id))?.enabled || false,
functionCall: model.id.toLowerCase().includes('claude-3'),
id: model.id,
vision: model.id.toLowerCase().includes('claude-3') && !model.id.toLowerCase().includes('claude-3-5-haiku'),
};
})
.filter(Boolean) as ChatModelCard[];
}
}
export default LobeAnthropicAI;