Skip to content

Commit

Permalink
♻️ refactor: refactor OpenAIStreamPayload with chat name
Browse files Browse the repository at this point in the history
  • Loading branch information
arvinxx committed Oct 17, 2023
1 parent 49349b4 commit a799530
Show file tree
Hide file tree
Showing 8 changed files with 16 additions and 16 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -3,11 +3,11 @@ import OpenAI from 'openai';

import { createErrorResponse } from '@/app/api/openai/errorResponse';
import { ChatErrorType } from '@/types/fetch';
import { OpenAIStreamPayload } from '@/types/openai';
import { OpenAIChatStreamPayload } from '@/types/openai/chat';

interface CreateChatCompletionOptions {
openai: OpenAI;
payload: OpenAIStreamPayload;
payload: OpenAIChatStreamPayload;
}

export const createChatCompletion = async ({ payload, openai }: CreateChatCompletionOptions) => {
Expand Down
6 changes: 3 additions & 3 deletions src/app/api/openai/chat/route.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,18 +3,18 @@ import OpenAI from 'openai';
import { getServerConfig } from '@/config/server';
import { getOpenAIAuthFromRequest } from '@/const/fetch';
import { ChatErrorType, ErrorType } from '@/types/fetch';
import { OpenAIStreamPayload } from '@/types/openai';
import { OpenAIChatStreamPayload } from '@/types/openai/chat';

import { checkAuth } from '../../auth';
import { createAzureOpenai } from '../createAzureOpenai';
import { createChatCompletion } from '../createChatCompletion';
import { createOpenai } from '../createOpenai';
import { createErrorResponse } from '../errorResponse';
import { createChatCompletion } from './createChatCompletion';

export const runtime = 'edge';

export const POST = async (req: Request) => {
const payload = (await req.json()) as OpenAIStreamPayload;
const payload = (await req.json()) as OpenAIChatStreamPayload;

const { apiKey, accessCode, endpoint, useAzure, apiVersion } = getOpenAIAuthFromRequest(req);

Expand Down
8 changes: 4 additions & 4 deletions src/prompts/agent.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import { OpenAIStreamPayload } from '@/types/openai';
import { OpenAIChatStreamPayload } from '@/types/openai/chat';

// 自动起名
export const promptSummaryAgentName = (content: string): Partial<OpenAIStreamPayload> => ({
export const promptSummaryAgentName = (content: string): Partial<OpenAIChatStreamPayload> => ({
messages: [
{
content: `你是一名擅长起名的起名大师,你需要将用户的描述总结为 20 个字以内的角色,格式要求如下:
Expand Down Expand Up @@ -32,7 +32,7 @@ export const promptSummaryAgentName = (content: string): Partial<OpenAIStreamPay
});

// 自动挑选 emoji 和背景色
export const promptPickEmoji = (content: string): Partial<OpenAIStreamPayload> => ({
export const promptPickEmoji = (content: string): Partial<OpenAIChatStreamPayload> => ({
messages: [
{
content: '你是一名非常懂设计与时尚的设计师,你需要从用户的描述中匹配一个合适的 emoji。',
Expand Down Expand Up @@ -61,7 +61,7 @@ export const promptPickEmoji = (content: string): Partial<OpenAIStreamPayload> =
],
});

export const promptSummaryDescription = (content: string): Partial<OpenAIStreamPayload> => ({
export const promptSummaryDescription = (content: string): Partial<OpenAIChatStreamPayload> => ({
messages: [
{
content:
Expand Down
4 changes: 2 additions & 2 deletions src/prompts/chat.ts
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
import { chatHelpers } from '@/store/session/helpers';
import { LanguageModel } from '@/types/llm';
import { OpenAIChatMessage, OpenAIStreamPayload } from '@/types/openai';
import { OpenAIChatMessage, OpenAIChatStreamPayload } from '@/types/openai/chat';

export const promptSummaryTitle = async (
messages: OpenAIChatMessage[],
): Promise<Partial<OpenAIStreamPayload>> => {
): Promise<Partial<OpenAIChatStreamPayload>> => {
const finalMessages: OpenAIChatMessage[] = [
{
content:
Expand Down
4 changes: 2 additions & 2 deletions src/services/chatModel.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ import { merge } from 'lodash-es';

import { pluginSelectors, usePluginStore } from '@/store/plugin';
import { initialLobeAgentConfig } from '@/store/session/initialState';
import type { ChatCompletionFunctions, OpenAIStreamPayload } from '@/types/openai';
import type { ChatCompletionFunctions, OpenAIChatStreamPayload } from '@/types/openai/chat';

import { createHeaderWithOpenAI } from './_header';
import { OPENAI_URLS } from './_url';
Expand All @@ -15,7 +15,7 @@ interface FetchChatModelOptions {
* 专门用于对话的 fetch
*/
export const fetchChatModel = (
{ plugins: enabledPlugins, ...params }: Partial<OpenAIStreamPayload>,
{ plugins: enabledPlugins, ...params }: Partial<OpenAIChatStreamPayload>,
options?: FetchChatModelOptions,
) => {
const payload = merge(
Expand Down
2 changes: 1 addition & 1 deletion src/store/plugin/selectors.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ import { uniqBy } from 'lodash-es';

import { PLUGIN_SCHEMA_SEPARATOR } from '@/const/plugin';
import { pluginHelpers } from '@/store/plugin/helpers';
import { ChatCompletionFunctions } from '@/types/openai';
import { ChatCompletionFunctions } from '@/types/openai/chat';

import { PluginStoreState } from './initialState';

Expand Down
2 changes: 1 addition & 1 deletion src/store/session/slices/chat/helpers.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import { OpenAIChatMessage } from '@/types/openai';
import { OpenAIChatMessage } from '@/types/openai/chat';
import { encodeAsync } from '@/utils/tokenizer';

export const getMessagesTokenCount = async (messages: OpenAIChatMessage[]) =>
Expand Down
2 changes: 1 addition & 1 deletion src/types/openai.ts → src/types/openai/chat.ts
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ export interface OpenAIChatMessage {
/**
* @title OpenAI Stream Payload
*/
export interface OpenAIStreamPayload {
export interface OpenAIChatStreamPayload {
/**
* @title 控制生成文本中的惩罚系数,用于减少重复性
* @default 0
Expand Down

0 comments on commit a799530

Please sign in to comment.