Skip to content

Commit

Permalink
remember canvas states on reload, experimental support for reasoning …
Browse files Browse the repository at this point in the history
…models, updated state persistence & canvas icon (#239)
  • Loading branch information
bkrabach authored Nov 12, 2024
1 parent 5c2bd3f commit fa2fdda
Show file tree
Hide file tree
Showing 14 changed files with 164 additions and 70 deletions.
72 changes: 55 additions & 17 deletions assistants/explorer-assistant/assistant/chat.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
from assistant_extensions.artifacts._model import ArtifactsConfigModel
from assistant_extensions.attachments import AttachmentsExtension
from content_safety.evaluators import CombinedContentSafetyEvaluator
from openai.types.chat import ChatCompletionMessageParam
from openai.types.chat import ChatCompletion, ChatCompletionMessageParam, ParsedChatCompletion
from semantic_workbench_api_model.workbench_model import (
AssistantStateEvent,
ConversationEvent,
Expand Down Expand Up @@ -215,12 +215,19 @@ async def respond_to_conversation(
# add the guardrails prompt to the system message content
system_message_content += f"\n\n{config.guardrails_prompt}"

completion_messages: list[ChatCompletionMessageParam] = [
{
# reasoning models do not support system messages, so set the role to "user" for the system message
completion_messages: list[ChatCompletionMessageParam] = []
if config.request_config.is_reasoning_model:
# if the model is a reasoning model, add the system message as a user message
completion_messages.append({
"role": "user",
"content": system_message_content,
})
else:
completion_messages.append({
"role": "system",
"content": system_message_content,
}
]
})

# generate the attachment messages from the attachment agent
attachment_messages = await attachments_extension.get_completion_messages_for_attachments(
Expand Down Expand Up @@ -269,6 +276,7 @@ async def respond_to_conversation(
# initialize variables for the response content and total tokens used
content = ""
completion_total_tokens = 0
completion: ParsedChatCompletion | ChatCompletion | None = None

# set default response message type
message_type = MessageType.chat
Expand Down Expand Up @@ -312,19 +320,49 @@ async def respond_to_conversation(

else:
# call the OpenAI API to generate a completion
completion = await client.chat.completions.create(
messages=completion_messages,
model=config.request_config.openai_model,
max_tokens=config.request_config.response_tokens,
)

content = completion.choices[0].message.content
try:
if config.request_config.is_reasoning_model:
# for reasoning models, use max_completion_tokens instead of max_tokens
completion = await client.chat.completions.create(
messages=completion_messages,
model=config.request_config.openai_model,
max_completion_tokens=config.request_config.response_tokens,
)
else:
completion = await client.chat.completions.create(
messages=completion_messages,
model=config.request_config.openai_model,
max_tokens=config.request_config.response_tokens,
)

content = completion.choices[0].message.content
except Exception as e:
logger.exception(f"exception occurred calling openai chat completion: {e}")
content = (
"An error occurred while calling the OpenAI API. Is it configured correctly?"
" View the debug inspector for more information."
)
message_type = MessageType.notice
deepmerge.always_merger.merge(
metadata,
{
"debug": {
method_metadata_key: {
"request": {
"model": config.request_config.openai_model,
"messages": completion_messages,
},
"error": str(e),
},
}
},
)

# get the total tokens used for the completion
completion_total_tokens = completion.usage.total_tokens if completion.usage else 0
footer_items = [
_get_token_usage_message(config.request_config.max_tokens, completion_total_tokens),
]
footer_items = []
if completion is not None:
# get the total tokens used for the completion
completion_total_tokens = completion.usage.total_tokens if completion.usage else 0
footer_items.append(_get_token_usage_message(config.request_config.max_tokens, completion_total_tokens))

# add the completion to the metadata for debugging
deepmerge.always_merger.merge(
Expand Down
8 changes: 8 additions & 0 deletions assistants/explorer-assistant/assistant/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -111,6 +111,14 @@ class RequestConfig(BaseModel):
Field(title="OpenAI Model", description="The OpenAI model to use for generating responses."),
] = "gpt-4o"

is_reasoning_model: Annotated[
bool,
Field(
title="Is Reasoning Model (o1-preview, o1-mini, etc)",
description="Experimental: enable support for reasoning models such as o1-preview, o1-mini, etc.",
),
] = False


# the workbench app builds dynamic forms based on the configuration model and UI schema
class AssistantConfigModel(BaseModel):
Expand Down
3 changes: 3 additions & 0 deletions libraries/python/openai-client/openai_client/tokens.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,9 @@ def num_tokens_from_message(message: ChatCompletionMessageParam, model: str) ->
"gpt-4-32k-0613",
"gpt-4o-mini-2024-07-18",
"gpt-4o-2024-08-06",
# TODO: determine correct handling of reasoning models
"o1-preview",
"o1-mini",
}:
tokens_per_message = 3
tokens_per_name = 1
Expand Down
20 changes: 5 additions & 15 deletions workbench-app/src/components/FrontDoor/Chat/ChatCanvas.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -32,14 +32,14 @@ export const ChatCanvas: React.FC<ChatCanvasProps> = (props) => {
preventAssistantModifyOnParticipantIds,
readOnly,
} = props;
const chatCanvasState = useAppSelector((state) => state.chatCanvas);
const { open, mode, selectedAssistantId } = useAppSelector((state) => state.chatCanvas);
const chatCanvasController = useChatCanvasController();
const [firstRun, setFirstRun] = React.useState(true);
const [selectedAssistant, setSelectedAssistant] = React.useState<Assistant>();

// Set the selected assistant based on the chat canvas state
React.useEffect(() => {
if (!chatCanvasState.selectedAssistantId || !chatCanvasState.open || chatCanvasState.mode !== 'assistant') {
if (!selectedAssistantId || !open || mode !== 'assistant') {
// If the assistant id is not set, the canvas is closed, or the mode is not assistant, clear
// the selected assistant and exit early
setSelectedAssistant(undefined);
Expand All @@ -59,9 +59,7 @@ export const ChatCanvas: React.FC<ChatCanvasProps> = (props) => {
}

// Find the assistant that corresponds to the selected assistant id
const assistant = conversationAssistants.find(
(assistant) => assistant.id === chatCanvasState.selectedAssistantId,
);
const assistant = conversationAssistants.find((assistant) => assistant.id === selectedAssistantId);

// If the selected assistant is not found in the conversation, select the first assistant in the conversation
if (!assistant) {
Expand All @@ -75,18 +73,10 @@ export const ChatCanvas: React.FC<ChatCanvasProps> = (props) => {
log(`Setting selected assistant to ${assistant.id}`);
setSelectedAssistant(assistant);
}
}, [
conversationAssistants,
chatCanvasController,
selectedAssistant,
firstRun,
chatCanvasState.selectedAssistantId,
chatCanvasState.open,
chatCanvasState.mode,
]);
}, [conversationAssistants, chatCanvasController, selectedAssistant, firstRun, selectedAssistantId, open, mode]);

// Determine which drawer to open, default to none
const openDrawer = chatCanvasState.open ? chatCanvasState.mode : 'none';
const openDrawer = open ? mode : 'none';
return (
<>
<ConversationDrawer
Expand Down
4 changes: 2 additions & 2 deletions workbench-app/src/components/FrontDoor/Chat/ChatControls.tsx
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
// Copyright (c) Microsoft. All rights reserved.

import { Button, makeStyles, tokens, Tooltip } from '@fluentui/react-components';
import { AppsList24Regular, BookInformation24Regular, Dismiss24Regular } from '@fluentui/react-icons';
import { BookInformation24Regular, ChatSettingsRegular, Dismiss24Regular } from '@fluentui/react-icons';
import { EventSourceMessage } from '@microsoft/fetch-event-source';
import React from 'react';
import { useChatCanvasController } from '../../../libs/useChatCanvasController';
Expand Down Expand Up @@ -75,7 +75,7 @@ export const ChatControls: React.FC<ChatControlsProps> = (props) => {
<Tooltip content="Open conversation canvas" relationship="label">
<Button
disabled={conversationActive || chatCanvasController.isTransitioning}
icon={<AppsList24Regular />}
icon={<ChatSettingsRegular />}
onClick={handleActivateConversation}
/>
</Tooltip>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,9 @@ export const AssistantSelector: React.FC<AssistantSelectorProps> = (props) => {
disabled={disabled}
onOptionSelect={(_event, data) => onChange(data.optionValue as string)}
>
<Option text="Create new assistant" value="new">
Create new assistant
</Option>
<OptionGroup label="Existing Assistants">
{assistants
?.slice()
Expand All @@ -27,11 +30,6 @@ export const AssistantSelector: React.FC<AssistantSelectorProps> = (props) => {
</Option>
))}
</OptionGroup>
<OptionGroup label="New Assistant">
<Option text="Create new assistant" value="new">
Create new assistant
</Option>
</OptionGroup>
</Dropdown>
);
};
13 changes: 8 additions & 5 deletions workbench-app/src/redux/features/app/AppState.ts
Original file line number Diff line number Diff line change
@@ -1,19 +1,22 @@
// Copyright (c) Microsoft. All rights reserved.

export interface AppState {
// persisted
devMode: boolean;
errors: {
id: string;
title?: string;
message?: string;
}[];
completedFirstRun: {
app: boolean;
experimental: boolean;
workflow: boolean;
};
hideExperimentalNotice: boolean;
chatWidthPercent: number;
globalContentOpen: boolean;
// transient
isDraggingOverBody?: boolean;
activeConversationId?: string;
errors: {
id: string;
title?: string;
message?: string;
}[];
}
21 changes: 14 additions & 7 deletions workbench-app/src/redux/features/app/appSlice.ts
Original file line number Diff line number Diff line change
Expand Up @@ -8,12 +8,13 @@ import { conversationApi } from '../../../services/workbench';
import { AppState } from './AppState';

const localStorageKey = {
devMode: 'devMode',
chatWidthPercent: 'chatWidthPercent',
completedFirstRunApp: 'completedFirstRun:app',
completedFirstRunExperimental: 'completedFirstRun:experimental',
completedFirstRunWorkflow: 'completedFirstRun:workflow',
hideExperimentalNotice: 'hideExperimentalNotice',
devMode: 'app.dev-mode',
completedFirstRunApp: 'app.completed-first-run.app',
completedFirstRunExperimental: 'app.completed-first-run.experimental',
completedFirstRunWorkflow: 'app.completed-first-run.workflow',
hideExperimentalNotice: 'app.hide-experimental-notice',
chatWidthPercent: 'app.chat-width-percent',
globalContentOpen: 'app.global-content-open',
};

const initialState: AppState = {
Expand All @@ -30,6 +31,7 @@ const initialState: AppState = {
},
hideExperimentalNotice:
AppStorage.getInstance().loadObject<boolean>(localStorageKey.hideExperimentalNotice) ?? false,
globalContentOpen: AppStorage.getInstance().loadObject<boolean>(localStorageKey.globalContentOpen) ?? false,
};

export const appSlice = createSlice({
Expand All @@ -38,7 +40,7 @@ export const appSlice = createSlice({
reducers: {
toggleDevMode: (state: AppState) => {
state.devMode = !state.devMode;
localStorage.setItem(localStorageKey.devMode, state.devMode.toString());
AppStorage.getInstance().saveObject(localStorageKey.devMode, state.devMode);
},
setIsDraggingOverBody: (state: AppState, action: PayloadAction<boolean>) => {
state.isDraggingOverBody = action.payload;
Expand Down Expand Up @@ -108,6 +110,10 @@ export const appSlice = createSlice({
);
}
},
setGlobalContentOpen: (state: AppState, action: PayloadAction<boolean>) => {
AppStorage.getInstance().saveObject(localStorageKey.globalContentOpen, action.payload);
state.globalContentOpen = action.payload;
},
},
});

Expand All @@ -121,6 +127,7 @@ export const {
setCompletedFirstRun,
setHideExperimentalNotice,
setActiveConversationId,
setGlobalContentOpen,
} = appSlice.actions;

export default appSlice.reducer;
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
// Copyright (c) Microsoft. All rights reserved.

export interface ChatCanvasState {
// persisted
open: boolean;
mode: 'conversation' | 'assistant';
selectedAssistantId?: string;
Expand Down
29 changes: 27 additions & 2 deletions workbench-app/src/redux/features/chatCanvas/chatCanvasSlice.ts
Original file line number Diff line number Diff line change
@@ -1,11 +1,21 @@
// Copyright (c) Microsoft. All rights reserved.

import { PayloadAction, createSlice } from '@reduxjs/toolkit';
import { AppStorage } from '../../../libs/AppStorage';
import { ChatCanvasState } from './ChatCanvasState';

const localStorageKey = {
chatCanvasOpen: 'chat-canvas.open',
chatCanvasMode: 'chat-canvas.mode',
chatCanvasSelectedAssistantId: 'chat-canvas.selected-assistant-id',
chatCanvasSelectedAssistantStateId: 'chat-canvas.selected-assistant-state-id',
};

const initialState: ChatCanvasState = {
open: false,
mode: 'conversation',
open: localStorage.getItem(localStorageKey.chatCanvasOpen) === 'true',
mode: localStorage.getItem(localStorageKey.chatCanvasMode) === 'assistant' ? 'assistant' : 'conversation',
selectedAssistantId: localStorage.getItem(localStorageKey.chatCanvasSelectedAssistantId) ?? undefined,
selectedAssistantStateId: localStorage.getItem(localStorageKey.chatCanvasSelectedAssistantStateId) ?? undefined,
};

export const chatCanvasSlice = createSlice({
Expand All @@ -14,22 +24,37 @@ export const chatCanvasSlice = createSlice({
reducers: {
setChatCanvasOpen: (state: ChatCanvasState, action: PayloadAction<boolean>) => {
state.open = action.payload;
persistState(state);
},
setChatCanvasMode: (state: ChatCanvasState, action: PayloadAction<ChatCanvasState['mode']>) => {
state.mode = action.payload;
persistState(state);
},
setChatCanvasAssistantId: (state: ChatCanvasState, action: PayloadAction<string | undefined>) => {
state.selectedAssistantId = action.payload;
persistState(state);
},
setChatCanvasAssistantStateId: (state: ChatCanvasState, action: PayloadAction<string | undefined>) => {
state.selectedAssistantStateId = action.payload;
persistState(state);
},
setChatCanvasState: (state: ChatCanvasState, action: PayloadAction<ChatCanvasState>) => {
Object.assign(state, action.payload);
persistState(state);
},
},
});

const persistState = (state: ChatCanvasState) => {
AppStorage.getInstance().saveObject(localStorageKey.chatCanvasOpen, state.open);
AppStorage.getInstance().saveObject(localStorageKey.chatCanvasMode, state.mode);
AppStorage.getInstance().saveObject(localStorageKey.chatCanvasSelectedAssistantId, state.selectedAssistantId);
AppStorage.getInstance().saveObject(
localStorageKey.chatCanvasSelectedAssistantStateId,
state.selectedAssistantStateId,
);
};

export const {
setChatCanvasOpen,
setChatCanvasMode,
Expand Down
Loading

0 comments on commit fa2fdda

Please sign in to comment.