Skip to content

Commit

Permalink
Separate debug_data from metadata (#205)
Browse files Browse the repository at this point in the history
Service stores in a separate table with a separate endpoint to retrieve.
This PR includes updates to the app.
  • Loading branch information
markwaddle authored Nov 5, 2024
1 parent b73b916 commit a79cc7e
Show file tree
Hide file tree
Showing 17 changed files with 429 additions and 61 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -161,6 +161,7 @@ class ConversationMessage(BaseModel):
content: str
filenames: list[str]
metadata: dict[str, Any]
has_debug_data: bool

@property
def command_name(self) -> str:
Expand All @@ -177,6 +178,11 @@ def command_args(self) -> str:
return "".join(self.content.split(" ", 1)[1:])


class ConversationMessageDebug(BaseModel):
message_id: uuid.UUID
debug_data: dict[str, Any]


class ConversationMessageList(BaseModel):
messages: list[ConversationMessage]

Expand Down Expand Up @@ -435,6 +441,7 @@ class NewConversationMessage(BaseModel):
content_type: str = "text/plain"
filenames: list[str] | None = None
metadata: dict[str, Any] | None = None
debug_data: dict[str, Any] | None = None


class NewConversationShare(BaseModel):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -157,6 +157,7 @@ async def on_chat_message(
content="Hello, world",
filenames=[],
metadata={},
has_debug_data=False,
).model_dump(mode="json")
},
)
Expand Down Expand Up @@ -184,6 +185,7 @@ async def on_chat_message(
content="Hello, world",
filenames=[],
metadata={},
has_debug_data=False,
).model_dump(mode="json")
},
)
Expand Down Expand Up @@ -211,6 +213,7 @@ async def on_chat_message(
content="Hello, world",
filenames=[],
metadata={},
has_debug_data=False,
).model_dump(mode="json")
},
)
Expand Down
24 changes: 21 additions & 3 deletions workbench-app/src/components/Conversations/DebugInspector.tsx
Original file line number Diff line number Diff line change
@@ -1,10 +1,11 @@
// Copyright (c) Microsoft. All rights reserved.

import { Button, Tooltip, makeStyles } from '@fluentui/react-components';
import { Button, DialogOpenChangeData, DialogOpenChangeEvent, Tooltip, makeStyles } from '@fluentui/react-components';
import { Info16Regular } from '@fluentui/react-icons';
import React from 'react';
import { JSONTree } from 'react-json-tree';
import { DialogControl } from '../App/DialogControl';
import { Loading } from '../App/Loading';
import { ContentRenderer } from './ContentRenderers/ContentRenderer';

const useClasses = makeStyles({
Expand All @@ -23,13 +24,27 @@ const useClasses = makeStyles({

interface DebugInspectorProps {
debug?: { [key: string]: any };
loading?: boolean;
trigger?: JSX.Element;
onOpen?: () => void;
onClose?: () => void;
}

export const DebugInspector: React.FC<DebugInspectorProps> = (props) => {
const { debug, trigger } = props;
const { debug, loading, trigger, onOpen, onClose } = props;
const classes = useClasses();

const onOpenChanged = React.useCallback(
(_: DialogOpenChangeEvent, data: DialogOpenChangeData) => {
if (data.open) {
onOpen?.();
return;
}
onClose?.();
},
[onOpen, onClose],
);

if (!debug) {
return null;
}
Expand All @@ -48,8 +63,11 @@ export const DebugInspector: React.FC<DebugInspectorProps> = (props) => {
}
classNames={{ dialogSurface: classes.root }}
title="Debug Inspection"
onOpenChange={onOpenChanged}
content={
debug.content ? (
loading ? (
<Loading />
) : debug.content ? (
<ContentRenderer content={debug.content} contentType={debug.contentType} />
) : (
<div className={classes.content}>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -354,6 +354,7 @@ export const InteractInput: React.FC<InteractInputProps> = (props) => {
contentType: 'text/plain',
filenames: [],
metadata,
hasDebugData: false,
},
]),
);
Expand Down
25 changes: 22 additions & 3 deletions workbench-app/src/components/Conversations/InteractMessage.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,10 @@ import { Utility } from '../../libs/Utility';
import { Conversation } from '../../models/Conversation';
import { ConversationMessage } from '../../models/ConversationMessage';
import { ConversationParticipant } from '../../models/ConversationParticipant';
import { useCreateConversationMessageMutation } from '../../services/workbench';
import {
useCreateConversationMessageMutation,
useGetConversationMessageDebugDataQuery,
} from '../../services/workbench';
import { CopyButton } from '../App/CopyButton';
import { ContentRenderer } from './ContentRenderers/ContentRenderer';
import { ConversationFileIcon } from './ConversationFileIcon';
Expand Down Expand Up @@ -153,6 +156,15 @@ export const InteractMessage: React.FC<InteractMessageProps> = (props) => {
const { getAvatarData } = useParticipantUtility();
const [createConversationMessage] = useCreateConversationMessageMutation();
const { isMessageVisibleRef, isMessageVisible, isUnread } = useConversationUtility();
const [skipDebugLoad, setSkipDebugLoad] = React.useState(true);
const {
data: debugData,
isLoading: isLoadingDebugData,
isUninitialized: isUninitializedDebugData,
} = useGetConversationMessageDebugDataQuery(
{ conversationId: conversation.id, messageId: message.id },
{ skip: skipDebugLoad },
);

const isUser = participant.role === 'user';

Expand Down Expand Up @@ -228,7 +240,14 @@ export const InteractMessage: React.FC<InteractMessageProps> = (props) => {
() => (
<>
{!readOnly && <MessageLink conversation={conversation} messageId={message.id} />}
<DebugInspector debug={message.metadata?.debug} />
<DebugInspector
debug={message.hasDebugData ? debugData?.debugData || { loading: true } : undefined}
loading={isLoadingDebugData || isUninitializedDebugData}
onOpen={() => {
console.log('OPEN!');
setSkipDebugLoad(false);
}}
/>
<CopyButton data={message.content} tooltip="Copy message" size="small" appearance="transparent" />
{!readOnly && (
<>
Expand All @@ -238,7 +257,7 @@ export const InteractMessage: React.FC<InteractMessageProps> = (props) => {
)}
</>
),
[conversation, message, readOnly],
[conversation, debugData?.debugData, isLoadingDebugData, isUninitializedDebugData, message, readOnly],
);

const getRenderedMessage = React.useCallback(() => {
Expand Down
2 changes: 2 additions & 0 deletions workbench-app/src/models/ConversationMessage.ts
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ export interface ConversationMessage {
metadata?: {
[key: string]: any;
};
hasDebugData: boolean;
}

export const conversationMessageFromJSON = (json: any): ConversationMessage => {
Expand All @@ -29,5 +30,6 @@ export const conversationMessageFromJSON = (json: any): ConversationMessage => {
contentType: json.content_type,
filenames: json.filenames,
metadata: json.metadata,
hasDebugData: json.has_debug_data,
};
};
15 changes: 15 additions & 0 deletions workbench-app/src/models/ConversationMessageDebug.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
// Copyright (c) Microsoft. All rights reserved.

export interface ConversationMessageDebug {
id: string;
debugData: {
[key: string]: any;
};
}

export const conversationMessageDebugFromJSON = (json: any): ConversationMessageDebug => {
return {
id: json.id,
debugData: json.debug_data,
};
};
34 changes: 20 additions & 14 deletions workbench-app/src/services/workbench/conversation.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import { Conversation } from '../../models/Conversation';
import { ConversationMessage } from '../../models/ConversationMessage';
import { ConversationMessage, conversationMessageFromJSON } from '../../models/ConversationMessage';
import { ConversationMessageDebug, conversationMessageDebugFromJSON } from '../../models/ConversationMessageDebug';
import { transformResponseToConversationParticipant } from './participant';
import { workbenchApi } from './workbench';

Expand Down Expand Up @@ -44,6 +45,14 @@ export const conversationApi = workbenchApi.injectEndpoints({
providesTags: ['Conversation'],
transformResponse: (response: any) => transformResponseToConversationMessages(response),
}),
getConversationMessageDebugData: builder.query<
ConversationMessageDebug,
{ conversationId: string; messageId: string }
>({
query: ({ conversationId, messageId }) =>
`/conversations/${conversationId}/messages/${messageId}/debug_data`,
transformResponse: (response: any) => transformResponseToConversationMessageDebug(response),
}),
createConversationMessage: builder.mutation<
ConversationMessage,
{ conversationId: string } & Partial<ConversationMessage> &
Expand Down Expand Up @@ -80,6 +89,7 @@ export const {
useGetAssistantConversationsQuery,
useGetConversationQuery,
useGetConversationMessagesQuery,
useGetConversationMessageDebugDataQuery,
useCreateConversationMessageMutation,
useDeleteConversationMessageMutation,
} = conversationApi;
Expand Down Expand Up @@ -118,24 +128,20 @@ const transformResponseToConversationMessages = (response: any): ConversationMes

const transformResponseToMessage = (response: any): ConversationMessage => {
try {
return {
id: response.id,
sender: {
participantId: response.sender.participant_id,
participantRole: response.sender.participant_role,
},
timestamp: response.timestamp,
messageType: response.message_type ?? 'chat',
content: response.content,
contentType: response.content_type,
filenames: response.filenames,
metadata: response.metadata,
};
return conversationMessageFromJSON(response);
} catch (error) {
throw new Error(`Failed to transform message response: ${error}`);
}
};

const transformResponseToConversationMessageDebug = (response: any): ConversationMessageDebug => {
try {
return conversationMessageDebugFromJSON(response);
} catch (error) {
throw new Error(`Failed to transform message debug response: ${error}`);
}
};

const transformMessageForRequest = (message: Partial<ConversationMessage>) => {
const request: Record<string, any> = {
timestamp: message.timestamp,
Expand Down
4 changes: 2 additions & 2 deletions workbench-service/Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ WORKBENCH__DB__URL ?= postgresql:///workbench

.PHONY: alembic-upgrade-head
alembic-upgrade-head:
WORKBENCH__DB__URL="$(WORKBENCH__DB__URL)" alembic upgrade head
WORKBENCH__DB__URL="$(WORKBENCH__DB__URL)" uv run alembic upgrade head

.PHONY: alembic-generate-migration
alembic-generate-migration:
Expand All @@ -15,7 +15,7 @@ ifndef migration
$(info ex: make alembic-generate-migration migration="neato changes")
$(error "migration" is not set)
else
WORKBENCH__DB__URL="$(WORKBENCH__DB__URL)" alembic revision --autogenerate -m "$(migration)"
WORKBENCH__DB__URL="$(WORKBENCH__DB__URL)" uv run alembic revision --autogenerate -m "$(migration)"
endif

DOCKER_PATH = $(repo_root)
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,99 @@
"""conversationmessagedebug
Revision ID: 5149c7fb5a32
Revises: 039bec8edc33
Create Date: 2024-11-04 20:40:29.252951
"""

from typing import Sequence, Union

import sqlalchemy as sa
import sqlmodel as sm
from alembic import op
from semantic_workbench_service import db

# revision identifiers, used by Alembic.
revision: str = "5149c7fb5a32"
down_revision: Union[str, None] = "039bec8edc33"
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None


def upgrade() -> None:
op.create_table(
"conversationmessagedebug",
sa.Column("message_id", sa.Uuid(), nullable=False),
sa.Column("data", sa.JSON(), nullable=False),
sa.ForeignKeyConstraint(
["message_id"],
["conversationmessage.message_id"],
name="fk_conversationmessagedebug_message_id_conversationmessage",
ondelete="CASCADE",
),
sa.PrimaryKeyConstraint("message_id"),
)

bind = op.get_bind()
max_sequence = bind.execute(sm.select(sm.func.max(db.ConversationMessage.sequence))).scalar()
if max_sequence is not None:
step = 100
for sequence_start in range(1, max_sequence + 1, step):
sequence_end_exclusive = sequence_start + step

results = bind.execute(
sm.select(db.ConversationMessage.message_id, db.ConversationMessage.meta_data).where(
db.ConversationMessage.sequence >= sequence_start,
db.ConversationMessage.sequence < sequence_end_exclusive,
)
).fetchall()

for message_id, meta_data in results:
debug = meta_data.pop("debug", None)
if not debug:
continue

bind.execute(
sm.insert(db.ConversationMessageDebug).values(
message_id=message_id,
data=debug,
)
)

bind.execute(
sm.update(db.ConversationMessage)
.where(db.ConversationMessage.message_id == message_id)
.values(meta_data=meta_data)
)


def downgrade() -> None:
bind = op.get_bind()

max_sequence = bind.execute(sm.select(sm.func.max(db.ConversationMessage.sequence))).scalar()
if max_sequence is not None:
step = 100
for sequence_start in range(1, max_sequence + 1, step):
sequence_end_exclusive = sequence_start + step
results = bind.execute(
sm.select(
db.ConversationMessageDebug.message_id,
db.ConversationMessageDebug.data,
db.ConversationMessage.meta_data,
)
.join(db.ConversationMessage)
.where(
db.ConversationMessage.sequence >= sequence_start,
db.ConversationMessage.sequence < sequence_end_exclusive,
)
).fetchall()

for message_id, debug_data, meta_data in results:
meta_data["debug"] = debug_data
bind.execute(
sm.update(db.ConversationMessage)
.where(db.ConversationMessage.message_id == message_id)
.values(meta_data=meta_data)
)

op.drop_table("conversationmessagedebug")
Loading

0 comments on commit a79cc7e

Please sign in to comment.