diff --git a/packages/bot-engine/blocks/integrations/legacy/openai/createChatCompletionOpenAI.ts b/packages/bot-engine/blocks/integrations/legacy/openai/createChatCompletionOpenAI.ts index e908acc8497..5a2ff31ac13 100644 --- a/packages/bot-engine/blocks/integrations/legacy/openai/createChatCompletionOpenAI.ts +++ b/packages/bot-engine/blocks/integrations/legacy/openai/createChatCompletionOpenAI.ts @@ -13,7 +13,6 @@ import { decrypt } from '@typebot.io/lib/api/encryption/decrypt' import { resumeChatCompletion } from './resumeChatCompletion' import { parseChatCompletionMessages } from './parseChatCompletionMessages' import { executeChatCompletionOpenAIRequest } from './executeChatCompletionOpenAIRequest' -import { isPlaneteScale } from '@typebot.io/lib/isPlanetScale' import prisma from '@typebot.io/lib/prisma' import { ExecuteIntegrationResponse } from '../../../../types' import { parseVariableNumber } from '@typebot.io/variables/parseVariableNumber' @@ -23,6 +22,7 @@ import { defaultOpenAIOptions, } from '@typebot.io/schemas/features/blocks/integrations/openai/constants' import { BubbleBlockType } from '@typebot.io/schemas/features/blocks/bubbles/constants' +import { isPlaneteScale } from '@typebot.io/lib/isPlanetScale' export const createChatCompletionOpenAI = async ( state: SessionState, @@ -90,7 +90,8 @@ export const createChatCompletionOpenAI = async ( blockId, assistantMessageVariableName ) && - !process.env.VERCEL_ENV + (!process.env.VERCEL_ENV || + (isPlaneteScale() && credentials && isCredentialsV2(credentials))) ) { return { clientSideActions: [ @@ -101,6 +102,7 @@ export const createChatCompletionOpenAI = async ( content?: string role: (typeof chatCompletionMessageRoles)[number] }[], + runtime: process.env.VERCEL_ENV ? 'edge' : 'nodejs', }, expectsDedicatedReply: true, }, diff --git a/packages/bot-engine/forge/executeForgedBlock.ts b/packages/bot-engine/forge/executeForgedBlock.ts index 1306a1a3b1a..ca235a1f7fc 100644 --- a/packages/bot-engine/forge/executeForgedBlock.ts +++ b/packages/bot-engine/forge/executeForgedBlock.ts @@ -59,11 +59,8 @@ export const executeForgedBlock = async ( ) && state.isStreamEnabled && !state.whatsApp && - // TODO: Enable once chat api is rolling - isPlaneteScale() && - credentials && - isCredentialsV2(credentials) - // !process.env.VERCEL_ENV + (!process.env.VERCEL_ENV || + (isPlaneteScale() && credentials && isCredentialsV2(credentials))) ) { return { outgoingEdgeId: block.outgoingEdgeId, @@ -72,6 +69,7 @@ export const executeForgedBlock = async ( type: 'stream', expectsDedicatedReply: true, stream: true, + runtime: process.env.VERCEL_ENV ? 'edge' : 'nodejs', }, ], } diff --git a/packages/embeds/js/package.json b/packages/embeds/js/package.json index 42c64df847c..ab522c68c4f 100644 --- a/packages/embeds/js/package.json +++ b/packages/embeds/js/package.json @@ -1,6 +1,6 @@ { "name": "@typebot.io/js", - "version": "0.2.70", + "version": "0.2.71", "description": "Javascript library to display typebots on your website", "type": "module", "main": "dist/index.js", diff --git a/packages/embeds/js/src/components/ConversationContainer/ConversationContainer.tsx b/packages/embeds/js/src/components/ConversationContainer/ConversationContainer.tsx index 8b03a431a94..729ff53ca85 100644 --- a/packages/embeds/js/src/components/ConversationContainer/ConversationContainer.tsx +++ b/packages/embeds/js/src/components/ConversationContainer/ConversationContainer.tsx @@ -154,6 +154,7 @@ export const ConversationContainer = (props: Props) => { const longRequest = setTimeout(() => { setIsSending(true) }, 1000) + autoScrollToBottom() const { data, error } = await continueChatQuery({ apiHost: props.context.apiHost, sessionId: props.initialChatReply.sessionId, @@ -205,6 +206,11 @@ export const ConversationContainer = (props: Props) => { isNotDefined(action.lastBubbleBlockId) ) await processClientSideActions(actionsBeforeFirstBubble) + if ( + data.clientSideActions.length === 1 && + data.clientSideActions[0].type === 'stream' + ) + return } setChatChunks((displayedChunks) => [ ...displayedChunks, diff --git a/packages/embeds/js/src/components/bubbles/StreamingBubble.tsx b/packages/embeds/js/src/components/bubbles/StreamingBubble.tsx index 69534ee1fc2..383f62987fd 100644 --- a/packages/embeds/js/src/components/bubbles/StreamingBubble.tsx +++ b/packages/embeds/js/src/components/bubbles/StreamingBubble.tsx @@ -1,5 +1,5 @@ import { streamingMessage } from '@/utils/streamingMessageSignal' -import { createEffect, createSignal } from 'solid-js' +import { For, createEffect, createSignal } from 'solid-js' import { marked } from 'marked' import domPurify from 'dompurify' @@ -8,7 +8,7 @@ type Props = { } export const StreamingBubble = (props: Props) => { - const [content, setContent] = createSignal('') + const [content, setContent] = createSignal([]) marked.use({ renderer: { @@ -19,12 +19,16 @@ export const StreamingBubble = (props: Props) => { }) createEffect(() => { - if (streamingMessage()?.id === props.streamingMessageId) - setContent( - domPurify.sanitize(marked.parse(streamingMessage()?.content ?? ''), { - ADD_ATTR: ['target'], - }) - ) + if (streamingMessage()?.id !== props.streamingMessageId) return [] + setContent( + streamingMessage() + ?.content.split('\n\n') + .map((line) => + domPurify.sanitize(marked.parse(line), { + ADD_ATTR: ['target'], + }) + ) ?? [] + ) }) return ( @@ -43,8 +47,9 @@ export const StreamingBubble = (props: Props) => { class={ 'flex flex-col overflow-hidden text-fade-in mx-4 my-2 relative text-ellipsis h-full gap-6' } - innerHTML={content()} - /> + > + {(line) => } + diff --git a/packages/embeds/js/src/features/blocks/integrations/openai/streamChat.ts b/packages/embeds/js/src/features/blocks/integrations/openai/streamChat.ts index 17a3ab2b132..087f96f33fe 100644 --- a/packages/embeds/js/src/features/blocks/integrations/openai/streamChat.ts +++ b/packages/embeds/js/src/features/blocks/integrations/openai/streamChat.ts @@ -7,16 +7,22 @@ let abortController: AbortController | null = null const secondsToWaitBeforeRetries = 3 const maxRetryAttempts = 3 +const edgeRuntimePath = '/api/integrations/openai/streamer' +const nodejsRuntimePath = (sessionId: string) => + `/api/v1/sessions/${sessionId}/streamMessage` + export const streamChat = (context: ClientSideActionContext & { retryAttempt?: number }) => async ({ messages, + runtime, onMessageStream, }: { messages?: { content?: string | undefined role?: 'system' | 'user' | 'assistant' | undefined }[] + runtime: 'edge' | 'nodejs' onMessageStream?: (props: { id: string; message: string }) => void }): Promise<{ message?: string; error?: object }> => { try { @@ -25,9 +31,12 @@ export const streamChat = const apiHost = context.apiHost const res = await fetch( - `${ - isNotEmpty(apiHost) ? apiHost : guessApiHost() - }/api/integrations/openai/streamer`, + isNotEmpty(apiHost) + ? apiHost + : guessApiHost() + + (runtime === 'edge' + ? edgeRuntimePath + : nodejsRuntimePath(context.sessionId)), { method: 'POST', headers: { @@ -35,7 +44,7 @@ export const streamChat = }, body: JSON.stringify({ messages, - sessionId: context.sessionId, + sessionId: runtime === 'edge' ? context.sessionId : undefined, }), signal: abortController.signal, } @@ -52,7 +61,7 @@ export const streamChat = return streamChat({ ...context, retryAttempt: (context.retryAttempt ?? 0) + 1, - })({ messages, onMessageStream }) + })({ messages, onMessageStream, runtime }) } return { error: (await res.json()) || 'Failed to fetch the chat response.', diff --git a/packages/embeds/js/src/utils/executeClientSideActions.ts b/packages/embeds/js/src/utils/executeClientSideActions.ts index 05123373006..f0c7811f8be 100644 --- a/packages/embeds/js/src/utils/executeClientSideActions.ts +++ b/packages/embeds/js/src/utils/executeClientSideActions.ts @@ -54,12 +54,17 @@ export const executeClientSideAction = async ({ 'streamOpenAiChatCompletion' in clientSideAction || 'stream' in clientSideAction ) { + const runtime = + 'streamOpenAiChatCompletion' in clientSideAction + ? clientSideAction.streamOpenAiChatCompletion.runtime + : clientSideAction.runtime const { error, message } = await streamChat(context)({ messages: 'streamOpenAiChatCompletion' in clientSideAction ? clientSideAction.streamOpenAiChatCompletion?.messages : undefined, onMessageStream, + runtime, }) if (error) return { diff --git a/packages/embeds/nextjs/package.json b/packages/embeds/nextjs/package.json index 140cf0c3980..3f5e0412c06 100644 --- a/packages/embeds/nextjs/package.json +++ b/packages/embeds/nextjs/package.json @@ -1,6 +1,6 @@ { "name": "@typebot.io/nextjs", - "version": "0.2.70", + "version": "0.2.71", "description": "Convenient library to display typebots on your Next.js website", "main": "dist/index.js", "types": "dist/index.d.ts", diff --git a/packages/embeds/react/package.json b/packages/embeds/react/package.json index 795cc14f05b..3e4e6f25146 100644 --- a/packages/embeds/react/package.json +++ b/packages/embeds/react/package.json @@ -1,6 +1,6 @@ { "name": "@typebot.io/react", - "version": "0.2.70", + "version": "0.2.71", "description": "Convenient library to display typebots on your React app", "main": "dist/index.js", "types": "dist/index.d.ts", diff --git a/packages/schemas/features/chat/clientSideAction.ts b/packages/schemas/features/chat/clientSideAction.ts index 0964816c4e1..8b48726c76d 100644 --- a/packages/schemas/features/chat/clientSideAction.ts +++ b/packages/schemas/features/chat/clientSideAction.ts @@ -110,6 +110,7 @@ export const clientSideActionSchema = z.discriminatedUnion('type', [ messages: z.array( nativeMessageSchema.pick({ content: true, role: true }) ), + runtime: z.enum(['edge', 'nodejs']), }), }) .merge(clientSideActionBaseSchema) @@ -151,6 +152,7 @@ export const clientSideActionSchema = z.discriminatedUnion('type', [ .object({ type: z.literal('stream'), stream: z.literal(true), + runtime: z.enum(['edge', 'nodejs']), }) .merge(clientSideActionBaseSchema) .openapi({