Skip to content

Commit

Permalink
papayne/guided conversation skill (microsoft#248)
Browse files Browse the repository at this point in the history
- Re-implements the guided conversation library as a skill. WIP... tests
and examples and being able to compose in subroutines coming next.
- Removes RunContext from config of skill library (should only be in
request/response calls to existing assistants).
- Changes Skill dependency management from class to instance... which
allows us to use multiple instances of a skill, but configured as
dependencies to other assistants. This allows (requires) a dev to define
their skill dependency entirely, deterministically.

---------

Co-authored-by: Paul Payne <[email protected]>
  • Loading branch information
payneio and Paul Payne authored Nov 18, 2024
1 parent 5b42067 commit 1287049
Show file tree
Hide file tree
Showing 52 changed files with 1,558 additions and 2,829 deletions.
13 changes: 8 additions & 5 deletions assistants/skill-assistant/assistant/assistant_registry.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import asyncio
import logging
from pathlib import Path
from typing import List
from typing import Optional

from openai_client.chat_driver import ChatDriverConfig
from skill_library import Assistant, Skill
Expand Down Expand Up @@ -29,7 +29,7 @@ async def get_or_create_assistant(
assistant_id: str,
event_mapper: SkillEventMapperProtocol,
chat_driver_config: ChatDriverConfig,
skills: List[Skill] = [],
skills: Optional[dict[str, Skill]] = None,
) -> Assistant:
"""
Get or create an assistant for the given conversation context.
Expand All @@ -52,23 +52,26 @@ async def register_assistant(
assistant_id: str,
event_mapper: SkillEventMapperProtocol,
chat_driver_config: ChatDriverConfig,
skills: List[Skill] = [],
skills: dict[str, Skill] | None = None,
) -> Assistant:
"""
Define the skill assistant that you want to have backing this assistant
service. You can configure the assistant instructions and which skills
to include here.
"""

# for skill in skills:
# FIXME: add emit here?

# Create the assistant.
assistant = Assistant(
name="Assistant",
assistant_id=assistant_id,
drive_root=Path(".data") / assistant_id / "assistant",
metadrive_drive_root=Path(".data") / assistant_id / ".assistant",
metadata_drive_root=Path(".data") / assistant_id / ".assistant",
chat_driver_config=chat_driver_config,
skills=skills,
)
assistant.register_skills(skills)

# Assistant event consumer.
async def subscribe() -> None:
Expand Down
106 changes: 56 additions & 50 deletions assistants/skill-assistant/assistant/skill_assistant.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,9 +9,12 @@

import logging
from pathlib import Path
from typing import Any, Optional

import openai_client
from content_safety.evaluators import CombinedContentSafetyEvaluator
from form_filler_skill import FormFillerSkill
from form_filler_skill.guided_conversation import GuidedConversationSkill
from openai_client.chat_driver import ChatDriverConfig

# from form_filler_skill import FormFillerSkill
Expand All @@ -30,6 +33,7 @@
ContentSafetyEvaluator,
ConversationContext,
)
from skill_library.types import Metadata

from assistant.skill_event_mapper import SkillEventMapper

Expand Down Expand Up @@ -97,9 +101,28 @@ async def content_evaluator_factory(context: ConversationContext) -> ContentSafe
assistant_registry = AssistantRegistry()


# Handle the event triggered when the assistant is added to a conversation.
@assistant.events.conversation.on_created
async def on_conversation_created(conversation_context: ConversationContext) -> None:
"""
Handle the event triggered when the assistant is added to a conversation.
"""

# send a welcome message to the conversation
config = await assistant_config.get(conversation_context.assistant)
welcome_message = config.welcome_message
await conversation_context.send_messages(
NewConversationMessage(
content=welcome_message,
message_type=MessageType.chat,
metadata={"generated_content": False},
)
)


@assistant.events.conversation.message.chat.on_created
async def on_message_created(
context: ConversationContext, event: ConversationEvent, message: ConversationMessage
conversation_context: ConversationContext, event: ConversationEvent, message: ConversationMessage
) -> None:
"""
Handle the event triggered when a new chat message is created in the conversation.
Expand All @@ -115,56 +138,38 @@ async def on_message_created(
"""

# pass the message to the core response logic
await respond_to_conversation(context, event, message)
async with conversation_context.set_status("thinking..."):
config = await assistant_config.get(conversation_context.assistant)
metadata: dict[str, Any] = {"debug": {"content_safety": event.data.get(content_safety.metadata_key, {})}}
await respond_to_conversation(conversation_context, config, message, metadata)


@assistant.events.conversation.message.command.on_created
async def on_command_message_created(
context: ConversationContext, event: ConversationEvent, message: ConversationMessage
) -> None:
"""
Handle the event triggered when a new command message is created in the conversation.
"""
# @assistant.events.conversation.message.command.on_created
# async def on_command_message_created(
# conversation_context: ConversationContext, event: ConversationEvent, message: ConversationMessage
# ) -> None:
# """
# Handle the event triggered when a new command message is created in the conversation.
# """

# pass the message to the core response logic
await respond_to_conversation(context, event, message)


# Handle the event triggered when the assistant is added to a conversation.
@assistant.events.conversation.on_created
async def on_conversation_created(context: ConversationContext) -> None:
"""
Handle the event triggered when the assistant is added to a conversation.
"""

# send a welcome message to the conversation
config = await assistant_config.get(context.assistant)
welcome_message = config.welcome_message
await context.send_messages(
NewConversationMessage(
content=welcome_message,
message_type=MessageType.chat,
metadata={"generated_content": False},
)
)
# # pass the message to the core response logic
# async with conversation_context.set_status("thinking..."):
# config = await assistant_config.get(conversation_context.assistant)
# metadata: dict[str, Any] = {"debug": {"content_safety": event.data.get(content_safety.metadata_key, {})}}
# await respond_to_conversation(conversation_context, config, message, metadata)


# Core response logic for handling messages (chat or command) in the conversation.
async def respond_to_conversation(
conversation_context: ConversationContext,
event: ConversationEvent,
config: AssistantConfigModel,
message: ConversationMessage,
metadata: Optional[Metadata] = None,
) -> None:
"""
Respond to a conversation message.
"""

# Get the assistant configuration.
config = await assistant_config.get(conversation_context.assistant)

# TODO: pass metadata to the assistant for at least adding the content safety metadata to debug.
# metadata = {"debug": {"content_safety": event.data.get(content_safety.metadata_key, {})}}

# Update the participant status to indicate the assistant is thinking.
await conversation_context.update_participant_me(UpdateParticipant(status="thinking..."))

Expand All @@ -174,27 +179,31 @@ async def respond_to_conversation(
# Create and register an assistant if necessary.
if not assistant:
try:
async_client = openai_client.create_client(config.service_config)
language_model = openai_client.create_client(config.service_config)
chat_driver_config = ChatDriverConfig(
openai_client=async_client,
openai_client=language_model,
model=config.chat_driver_config.openai_model,
instructions=config.chat_driver_config.instructions,
# context will be overwritten by the assistant when initialized.
)
assistant = await assistant_registry.register_assistant(
conversation_context.id,
SkillEventMapper(conversation_context),
chat_driver_config,
[
PosixSkill(
{
"posix": PosixSkill(
sandbox_dir=Path(".data") / conversation_context.id,
chat_driver_config=chat_driver_config,
mount_dir="/mnt/data",
),
# FormFillerSkill(
# chat_driver_config=chat_driver_config,
# ),
],
"form_filler": FormFillerSkill(
chat_driver_config=chat_driver_config,
language_model=language_model,
),
"guided_conversation": GuidedConversationSkill(
chat_driver_config=chat_driver_config,
language_model=language_model,
),
},
)

except Exception as e:
Expand All @@ -210,7 +219,7 @@ async def respond_to_conversation(
await conversation_context.update_participant_me(UpdateParticipant(status=None))

try:
await assistant.put_message(message.content)
await assistant.put_message(message.content, metadata)
except Exception as e:
logging.exception("exception in on_message_created")
await conversation_context.send_messages(
Expand All @@ -219,6 +228,3 @@ async def respond_to_conversation(
content=f"Unhandled error: {e}",
)
)
finally:
# update the participant status to indicate the assistant is done thinking
await conversation_context.update_participant_me(UpdateParticipant(status=None))
39 changes: 31 additions & 8 deletions libraries/python/openai-client/openai_client/__init__.py
Original file line number Diff line number Diff line change
@@ -1,16 +1,30 @@
import logging
import logging as _logging # Avoid name conflict with local logging module.

from .client import (
create_client,
)
from .completion import message_content_from_completion, message_from_completion
from .config import (
AzureOpenAIApiKeyAuthConfig,
AzureOpenAIAzureIdentityAuthConfig,
AzureOpenAIServiceConfig,
OpenAIServiceConfig,
ServiceConfig,
)
from .errors import (
CompletionError,
validate_completion,
)
from .logging import (
add_serializable_data,
make_completion_args_serializable,
)
from .messages import (
create_assistant_message,
create_system_message,
create_user_message,
format_with_dict,
format_with_liquid,
truncate_messages_for_logging,
)
from .tokens import (
Expand All @@ -19,19 +33,28 @@
num_tokens_from_tools_and_messages,
)

logger = logging.getLogger(__name__)

logger = _logging.getLogger(__name__)

__all__ = [
"add_serializable_data",
"AzureOpenAIApiKeyAuthConfig",
"AzureOpenAIAzureIdentityAuthConfig",
"AzureOpenAIServiceConfig",
"CompletionError",
"create_client",
"truncate_messages_for_logging",
"create_assistant_message",
"create_system_message",
"create_user_message",
"format_with_dict",
"format_with_liquid",
"make_completion_args_serializable",
"message_content_from_completion",
"message_from_completion",
"num_tokens_from_message",
"num_tokens_from_messages",
"num_tokens_from_tools_and_messages",
"AzureOpenAIApiKeyAuthConfig",
"AzureOpenAIAzureIdentityAuthConfig",
"AzureOpenAIServiceConfig",
"OpenAIServiceConfig",
"ServiceConfig",
"logger",
"truncate_messages_for_logging",
"validate_completion",
]
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
from typing import Any, Callable, Union

from events import BaseEvent, ErrorEvent, MessageEvent
from openai import AsyncOpenAI
from openai import AsyncAzureOpenAI, AsyncOpenAI
from openai.types.chat import (
ChatCompletionMessageParam,
ChatCompletionSystemMessageParam,
Expand All @@ -21,7 +21,7 @@

@dataclass
class ChatDriverConfig:
openai_client: AsyncOpenAI
openai_client: AsyncOpenAI | AsyncAzureOpenAI
model: str
instructions: str | list[str] = "You are a helpful assistant."
instruction_formatter: MessageFormatter | None = None
Expand Down Expand Up @@ -130,6 +130,7 @@ async def respond(
response_format: Union[ResponseFormat, type[BaseModel]] = TEXT_RESPONSE_FORMAT,
function_choice: list[str] | None = None,
instruction_parameters: dict[str, Any] | None = None,
metadata: dict[str, Any] | None = None,
) -> BaseEvent:
"""
Respond to a user message.
Expand Down Expand Up @@ -165,7 +166,7 @@ async def respond(
await self.add_message(user_message)

# Generate a response.
metadata = {}
metadata = metadata or {}

completion_args = {
"model": self.model,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
from context import Context

from .assistant import Assistant
from .routine import FunctionRoutine, InstructionRoutine, ProgramRoutine, RoutineTypes
from .routine import InstructionRoutine, ProgramRoutine, RoutineTypes, StateMachineRoutine
from .skill import EmitterType, Skill

logger = logging.getLogger(__name__)
Expand All @@ -12,7 +12,7 @@
"Assistant",
"Context",
"EmitterType",
"FunctionRoutine",
"StateMachineRoutine",
"InstructionRoutine",
"ProgramRoutine",
"RoutineTypes",
Expand Down
Loading

0 comments on commit 1287049

Please sign in to comment.