Skip to content

Commit

Permalink
feat(streaming): make baseurl configuration and add stream output (br…
Browse files Browse the repository at this point in the history
  • Loading branch information
pc-dong authored Jun 15, 2023
1 parent ed3c5fd commit 2f1182c
Show file tree
Hide file tree
Showing 5 changed files with 275 additions and 55 deletions.
77 changes: 48 additions & 29 deletions package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

190 changes: 185 additions & 5 deletions src/lib/openai.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,10 @@
import { ChatCompletionRequestMessage, Configuration, CreateImageRequestSizeEnum, OpenAIApi } from "openai";
import {
ChatCompletionRequestMessage,
Configuration,
CreateChatCompletionResponse, CreateCompletionResponse,
CreateImageRequestSizeEnum,
OpenAIApi
} from "openai";
import "@logseq/libs";
import { backOff } from "exponential-backoff";

Expand Down Expand Up @@ -41,7 +47,7 @@ const retryOptions = {
console.warn("Rate limit exceeded. Retrying...");
return true;
}
if (err.response.status >= 500){
if (err.response.status >= 500) {
return true;
}

Expand All @@ -51,17 +57,18 @@ const retryOptions = {

export async function whisper(file: File,openAiOptions:OpenAIOptions): Promise<string> {
const apiKey = openAiOptions.apiKey;
const baseUrl = openAiOptions.completionEndpoint ? "https://api.openai.com/v1" : openAiOptions.completionEndpoint;
const model = 'whisper-1';

// Create a FormData object and append the file
const formData = new FormData();
formData.append('model', model);
formData.append('file', file);

// Send a request to the OpenAI API using a form post
const response = await backOff(

() => fetch('https://api.openai.com/v1/audio/transcriptions', {
() => fetch(baseUrl + '/audio/transcriptions', {
method: 'POST',
headers: {
'Authorization': `Bearer ${apiKey}`,
Expand All @@ -87,6 +94,7 @@ export async function dallE(

const configuration = new Configuration({
apiKey: options.apiKey,
basePath: options.completionEndpoint
});

const openai = new OpenAIApi(configuration);
Expand Down Expand Up @@ -185,6 +193,178 @@ export async function openAI(
}
}

export async function openAIWithStream(
input: string,
openAiOptions: OpenAIOptions,
onContent: (content: string) => void,
onStop: () => void
): Promise<string | null> {
const options = { ...OpenAIDefaults(openAiOptions.apiKey), ...openAiOptions };
const engine = options.completionEngine!;

try {
if (engine.startsWith("gpt-3.5") || engine.startsWith("gpt-4")) {
const inputMessages: ChatCompletionRequestMessage[] = [{ role: "user", content: input }];
if (openAiOptions.chatPrompt && openAiOptions.chatPrompt.length > 0) {
inputMessages.unshift({ role: "system", content: openAiOptions.chatPrompt });
}
const body = {
messages: inputMessages,
temperature: options.temperature,
max_tokens: options.maxTokens,
top_p: 1,
frequency_penalty: 0,
presence_penalty: 0,
model: engine,
stream: true
}
const response = await backOff(
() =>
fetch(`${options.completionEndpoint}/chat/completions`, {
method: "POST",
body: JSON.stringify(body),
headers: {
Authorization: `Bearer ${options.apiKey}`,
'Content-Type': 'application/json',
'Accept': 'text/event-stream'
}
}).then((response) => {
if (response.ok && response.body) {
const reader = response.body.pipeThrough(new TextDecoderStream()).getReader();
let result = ""
const readStream = (): any =>
reader.read().then(({
value,
done
}) => {
if (done) {
reader.cancel();
onStop();
return Promise.resolve({ choices: [{ message: { content: result } }] });
}

const data = getDataFromStreamValue(value);
if (!data || !data[0]) {
return readStream();
}

let res = ""
for (let i = 0; i < data.length; i++) {
res += data[i].choices[0]?.delta?.content || ""
}
result += res
onContent(res)
return readStream();
});
return readStream();
} else {
return Promise.reject(response);
}
}),
retryOptions
);
const choices = (response as CreateChatCompletionResponse)?.choices;
if (
choices &&
choices[0] &&
choices[0].message &&
choices[0].message.content &&
choices[0].message.content.length > 0
) {
return trimLeadingWhitespace(choices[0].message.content);
} else {
return null;
}
} else {
const body = {
prompt: input,
temperature: options.temperature,
max_tokens: options.maxTokens,
top_p: 1,
frequency_penalty: 0,
presence_penalty: 0,
model: engine,
stream: true
}
const response = await backOff(
() =>
fetch(`${options.completionEndpoint}/completions`, {
method: "POST",
body: JSON.stringify(body),
headers: {
Authorization: `Bearer ${options.apiKey}`,
'Content-Type': 'application/json',
'Accept': 'text/event-stream'
}
}).then((response) => {
if (response.ok && response.body) {
const reader = response.body.pipeThrough(new TextDecoderStream()).getReader();
let result = ""
const readStream = (): any =>
reader.read().then(({
value,
done
}) => {
if (done) {
reader.cancel();
onStop();
return Promise.resolve({ choices: [{ text: result }]});
}

const data = getDataFromStreamValue(value);
if (!data || !data[0]) {
return readStream();
}

let res = ""
for (let i = 0; i < data.length; i++) {
res += data[i].choices[0]?.text || ""
}
result += res
onContent(res)
return readStream();
});
return readStream();
} else {
return Promise.reject(response);
}
}),
retryOptions
);
const choices = (response as CreateCompletionResponse)?.choices;
if (
choices &&
choices[0] &&
choices[0].text &&
choices[0].text.length > 0
) {
return trimLeadingWhitespace(choices[0].text);
} else {
return null;
}
}
} catch (e: any) {
if (e?.response?.data?.error) {
console.error(e?.response?.data?.error);
throw new Error(e?.response?.data?.error?.message);
} else {
throw e;
}
}
}

function getDataFromStreamValue(value: string) {
const matches = [...value.split("data:")];
return matches.filter(content => content.trim().length > 0 && !content.trim().includes("[DONE]"))
.map(match =>{
try{
return JSON.parse(match)
} catch(e) {
return null
}
});
}

function trimLeadingWhitespace(s: string): string {
return s.replace(/^\s+/, "");
}
Loading

0 comments on commit 2f1182c

Please sign in to comment.