From 125ffb51fca8f64a6c7a5eb6b7b08bc74415846a Mon Sep 17 00:00:00 2001 From: Mollie Munoz Date: Tue, 3 Dec 2024 14:14:30 -0800 Subject: [PATCH] Remove GC attachment check material (#276) Remains in history if needed later at some point. Not to be included in document outline creation today. --- .../document/gc_attachment_check_config.py | 167 ------------------ .../assistant/agents/document/status.py | 1 - .../assistant/agents/document_agent.py | 125 ------------- 3 files changed, 293 deletions(-) delete mode 100644 assistants/prospector-assistant/assistant/agents/document/gc_attachment_check_config.py diff --git a/assistants/prospector-assistant/assistant/agents/document/gc_attachment_check_config.py b/assistants/prospector-assistant/assistant/agents/document/gc_attachment_check_config.py deleted file mode 100644 index 00d6d431..00000000 --- a/assistants/prospector-assistant/assistant/agents/document/gc_attachment_check_config.py +++ /dev/null @@ -1,167 +0,0 @@ -import json -from typing import Annotated, Any, Dict, List, Type - -from guided_conversation.utils.resources import ResourceConstraint, ResourceConstraintMode, ResourceConstraintUnit -from pydantic import BaseModel, Field, create_model -from semantic_workbench_assistant.config import UISchema - -from ... import helpers -from . import config_defaults as config_defaults -from .config import GuidedConversationConfigModel, ResourceConstraintConfigModel - - -# Artifact - The artifact is like a form that the agent must complete throughout the conversation. -# It can also be thought of as a working memory for the agent. -# We allow any valid Pydantic BaseModel class to be used. -class ArtifactModel(BaseModel): - final_response: str = Field(description="The final response from the agent to the user.") - conversation_status: str = Field(description="The status of the conversation.") - filenames: str = Field( - description="Names of the available files currently uploaded as attachments. May be an empty string if no files are attached." - ) - - -# Rules - These are the do's and don'ts that the agent should follow during the conversation. -rules = [ - "Terminate the conversation immediately if the user asks for harmful or inappropriate content.", - "Set the conversation_status to user_completed once you have provided a final_response.", -] - -# Conversation Flow (optional) - This defines in natural language the steps of the conversation. -conversation_flow = """1. Start by asking if the user has all their documents attached to the conversation that they -would like to use in drafting their outline. If any filenames are available, list those to the user to demonstrate you -know what they have already attached. If no filenames are available, let the user know no documents have been attached." -2. If the user attaches files, be sure to let them know all the filenames you are aware of that have been attached. -3.You want to reach the point that the user confirms all the docs they want attached have been attached. Once you interpret -the user's response as a confirmation, then go ahead and provide a final response. -4. Your final response should share the list of attachments being used and how they will be used. In this scenario they will be used -to construct a draft outline, which you will be requesting user feedback on. With this final response, the conversation_status must be -marked as user_completed. -""" - -# Context (optional) - This is any additional information or the circumstances the agent is in that it should be aware of. -# It can also include the high level goal of the conversation if needed. -context = """The purpose of gathering these attachments is for the future user need to draft an outline. The purpose of this conversation -is to make sure the user is aware of what documents they have uploaded as attachments and if they need to upload anymore before -the user proceeds to drafting the outline.""" - - -# Resource Constraints (optional) - This defines the constraints on the conversation such as time or turns. -# It can also help with pacing the conversation, -# For example, here we have set an exact time limit of 10 turns which the agent will try to fill. -resource_constraint = ResourceConstraint( - quantity=5, - unit=ResourceConstraintUnit.TURNS, - mode=ResourceConstraintMode.MAXIMUM, -) - - -# -# region Helpers -# - -# take a full json schema and return a pydantic model, including support for -# nested objects and typed arrays - - -def json_type_to_python_type(json_type: str) -> Type: - # Mapping JSON types to Python types - type_mapping = {"integer": int, "string": str, "number": float, "boolean": bool, "object": dict, "array": list} - return type_mapping.get(json_type, Any) - - -def create_pydantic_model_from_json_schema(schema: Dict[str, Any], model_name="DynamicModel") -> Type[BaseModel]: - # Nested function to parse properties from the schema - def parse_properties(properties: Dict[str, Any]) -> Dict[str, Any]: - fields = {} - for prop_name, prop_attrs in properties.items(): - prop_type = prop_attrs.get("type") - description = prop_attrs.get("description", None) - - if prop_type == "object": - nested_model = create_pydantic_model_from_json_schema(prop_attrs, model_name=prop_name.capitalize()) - fields[prop_name] = (nested_model, Field(..., description=description)) - elif prop_type == "array": - items = prop_attrs.get("items", {}) - if items.get("type") == "object": - nested_model = create_pydantic_model_from_json_schema(items) - fields[prop_name] = (List[nested_model], Field(..., description=description)) - else: - nested_type = json_type_to_python_type(items.get("type")) - fields[prop_name] = (List[nested_type], Field(..., description=description)) - else: - python_type = json_type_to_python_type(prop_type) - fields[prop_name] = (python_type, Field(..., description=description)) - return fields - - properties = schema.get("properties", {}) - fields = parse_properties(properties) - return create_model(model_name, **fields) - - -# endregion - - -# -# region Models -# - - -class GCAttachmentCheckConfigModel(GuidedConversationConfigModel): - enabled: Annotated[ - bool, - Field(description=helpers.load_text_include("guided_conversation_agent_enabled.md")), - UISchema(enable_markdown_in_description=True), - ] = False - - artifact: Annotated[ - str, - Field( - title="Artifact", - description="The artifact that the agent will manage.", - ), - UISchema(widget="baseModelEditor"), - ] = json.dumps(ArtifactModel.model_json_schema(), indent=2) - - rules: Annotated[ - list[str], - Field(title="Rules", description="Do's and don'ts that the agent should attempt to follow"), - UISchema(schema={"items": {"ui:widget": "textarea", "ui:options": {"rows": 2}}}), - ] = rules - - conversation_flow: Annotated[ - str, - Field( - title="Conversation Flow", - description="A loose natural language description of the steps of the conversation", - ), - UISchema(widget="textarea", schema={"ui:options": {"rows": 10}}, placeholder="[optional]"), - ] = conversation_flow.strip() - - context: Annotated[ - str, - Field( - title="Context", - description="General background context for the conversation.", - ), - UISchema(widget="textarea", placeholder="[optional]"), - ] = context.strip() - - resource_constraint: Annotated[ - ResourceConstraintConfigModel, - Field( - title="Resource Constraint", - ), - UISchema(schema={"quantity": {"ui:widget": "updown"}}), - ] = ResourceConstraintConfigModel( - unit=resource_constraint.unit, - quantity=resource_constraint.quantity, - mode=resource_constraint.mode, - ) - - def get_artifact_model(self) -> Type[BaseModel]: - schema = json.loads(self.artifact) - return create_pydantic_model_from_json_schema(schema) - - -# endregion diff --git a/assistants/prospector-assistant/assistant/agents/document/status.py b/assistants/prospector-assistant/assistant/agents/document/status.py index 734abc24..b033ae0d 100644 --- a/assistants/prospector-assistant/assistant/agents/document/status.py +++ b/assistants/prospector-assistant/assistant/agents/document/status.py @@ -11,7 +11,6 @@ class Status(StrEnum): class StepName(StrEnum): UNDEFINED = "undefined" - DO_GC_ATTACHMENT_CHECK = "step_gc_attachment_check" DO_DRAFT_OUTLINE = "step_draft_outline" DO_GC_GET_OUTLINE_FEEDBACK = "step_gc_get_outline_feedback" DO_FINISH = "step_finish" diff --git a/assistants/prospector-assistant/assistant/agents/document_agent.py b/assistants/prospector-assistant/assistant/agents/document_agent.py index 912f7a6e..e074155f 100644 --- a/assistants/prospector-assistant/assistant/agents/document_agent.py +++ b/assistants/prospector-assistant/assistant/agents/document_agent.py @@ -23,7 +23,6 @@ from ..config import AssistantConfigModel from .document.config import GuidedConversationConfigModel -from .document.gc_attachment_check_config import GCAttachmentCheckConfigModel from .document.gc_draft_outline_feedback_config import GCDraftOutlineFeedbackConfigModel from .document.guided_conversation import GuidedConversation from .document.status import Status, StepName @@ -556,7 +555,6 @@ async def _mode_draft_outline( if mode_status is Status.INITIATED: self._state.mode.set_step_order( [ - {"step_name": StepName.DO_GC_ATTACHMENT_CHECK, "run_count": 0}, {"step_name": StepName.DO_DRAFT_OUTLINE, "run_count": 0}, {"step_name": StepName.DO_GC_GET_OUTLINE_FEEDBACK, "run_count": 0}, {"step_name": StepName.DO_FINISH, "run_count": 0}, @@ -572,7 +570,6 @@ async def _mode_draft_outline( self._write_state(context) self._step_name_to_method: dict[StepName, Callable] = { - StepName.DO_GC_ATTACHMENT_CHECK: self._step_gc_attachment_check, StepName.DO_DRAFT_OUTLINE: self._step_draft_outline, StepName.DO_GC_GET_OUTLINE_FEEDBACK: self._step_gc_get_outline_feedback, StepName.DO_FINISH: self._step_finish, @@ -614,7 +611,6 @@ async def _mode_draft_paper( if mode_status is Status.INITIATED: self._state.mode.set_step_order( [ - {"step_name": StepName.DO_GC_ATTACHMENT_CHECK, "run_count": 0}, {"step_name": StepName.DO_DRAFT_OUTLINE, "run_count": 0}, {"step_name": StepName.DO_GC_GET_OUTLINE_FEEDBACK, "run_count": 0}, {"step_name": StepName.DP_DRAFT_CONTENT, "run_count": 0}, @@ -630,7 +626,6 @@ async def _mode_draft_paper( self._write_state(context) self._step_name_to_method: dict[StepName, Callable] = { - StepName.DO_GC_ATTACHMENT_CHECK: self._step_gc_attachment_check, StepName.DO_DRAFT_OUTLINE: self._step_draft_outline, StepName.DO_GC_GET_OUTLINE_FEEDBACK: self._step_gc_get_outline_feedback, StepName.DP_DRAFT_CONTENT: self._step_draft_content, @@ -639,47 +634,6 @@ async def _mode_draft_paper( # Run return await self._run_mode(config, context, message, metadata) - async def _step_gc_attachment_check( - self, - config: AssistantConfigModel, - context: ConversationContext, - message: ConversationMessage | None, - metadata: dict[str, Any] = {}, - ) -> tuple[Status, StepName | None]: - next_step = None - - # Pre-requisites - if self._state is None: - logger.error("Document Agent state is None. Returning.") - return Status.UNDEFINED, next_step - - step = self._state.mode.get_step() - step_name = step.get_name() - step_status = step.get_status() - - # Pre-requisites - step_called = StepName.DO_GC_ATTACHMENT_CHECK - if step_name is not step_called or ( - step_status is not Status.NOT_COMPLETED and step_status is not Status.INITIATED - ): - logger.error( - "Document Agent state step: %s, step called: %s, state step completion status: %s. Resetting Mode.", - step_name, - step_called, - step_status, - ) - self._state.mode.reset() - self._write_state(context) - return self._state.mode.get_status(), next_step - - # Run - logger.info("Document Agent running step: %s", step_name) - status, next_step_name = await self._gc_attachment_check(config, context, message, metadata) - step.set_status(status) - self._state.mode.set_step(step) - self._write_state(context) - return step.get_status(), next_step_name - async def _step_draft_outline( self, config: AssistantConfigModel, @@ -823,85 +777,6 @@ async def _step_draft_content( # # region language model methods # - - async def _gc_attachment_check( - self, - config: AssistantConfigModel, - context: ConversationContext, - message: ConversationMessage | None, - metadata: dict[str, Any] = {}, - ) -> tuple[Status, StepName | None]: - method_metadata_key = "document_agent_gc_attachment_check" - - gc_attachment_conversation_config: GuidedConversationConfigModel = GCAttachmentCheckConfigModel() - - guided_conversation = GuidedConversation( - config=config, - openai_client=openai_client.create_client(config.service_config), - agent_config=gc_attachment_conversation_config, - conversation_context=context, - ) - - # update artifact - filenames = await self._attachments_extension.get_attachment_filenames(context) - filenames_str = ", ".join(filenames) - - artifact_dict = guided_conversation.get_artifact_dict() - if artifact_dict is not None: - artifact_dict["filenames"] = filenames_str - guided_conversation.set_artifact_dict(artifact_dict) - else: - logger.error("artifact_dict unavailable.") - - conversation_status = Status.UNDEFINED - next_step_name = None - # run guided conversation step - try: - if message is None: - user_message = None - else: - user_message = message.content - response_message, conversation_status, next_step_name = await guided_conversation.step_conversation( - last_user_message=user_message, - ) - - # add the completion to the metadata for debugging - deepmerge.always_merger.merge( - metadata, - { - "debug": { - f"{method_metadata_key}": {"response": response_message}, - } - }, - ) - - except Exception as e: - logger.exception(f"exception occurred processing guided conversation: {e}") - response_message = "An error occurred while processing the guided conversation." - deepmerge.always_merger.merge( - metadata, - { - "debug": { - f"{method_metadata_key}": { - "error": str(e), - }, - } - }, - ) - - await context.send_messages( - NewConversationMessage( - content=response_message, - message_type=MessageType.chat, - metadata=metadata, - ) - ) - - # Need to add a good way to stop mode if an exception occurs. - # Also need to update the gc state turn count to 0 (and any thing else that needs to be reset) once conversation is over... or exception occurs?) - - return conversation_status, next_step_name - async def _draft_outline( self, config: AssistantConfigModel,