Skip to content

Commit

Permalink
Doc agent re-flow changes + remove rest of GC agent. (#160)
Browse files Browse the repository at this point in the history
- Updates rest of GC agent removal from prospector.
- New WIP changes for document agent.  
- Moves GC agent config material under document/ for future reference if
needed.
  • Loading branch information
momuno authored Oct 23, 2024
1 parent f62b63e commit 9d23e42
Show file tree
Hide file tree
Showing 8 changed files with 441 additions and 402 deletions.
Original file line number Diff line number Diff line change
@@ -1,59 +1,58 @@
import json
from typing import Annotated, Any, Dict, List, Type, get_type_hints
from typing import TYPE_CHECKING, Annotated, Any, Dict, List, Type

from guided_conversation.utils.resources import ResourceConstraint, ResourceConstraintMode, ResourceConstraintUnit
from pydantic import BaseModel, Field, create_model
from pydantic_core import PydanticUndefinedType
from semantic_workbench_assistant.config import UISchema

from ... import helpers
from . import draft_grant_proposal_config_defaults as config_defaults
from . import config_defaults as config_defaults

if TYPE_CHECKING:
pass


#
# region Helpers
#

# take a full json schema and return a pydantic model, including support for
# nested objects and typed arrays

def determine_type(type_str: str) -> Type:
type_mapping = {"str": str, "int": int, "float": float, "bool": bool, "list": List[Any], "dict": Dict[str, Any]}
return type_mapping.get(type_str, Any)

def json_type_to_python_type(json_type: str) -> Type:
# Mapping JSON types to Python types
type_mapping = {"integer": int, "string": str, "number": float, "boolean": bool, "object": dict, "array": list}
return type_mapping.get(json_type, Any)

def create_pydantic_model_from_json(json_data: str) -> Type[BaseModel]:
data = json.loads(json_data)

def create_fields(data: Dict[str, Any]) -> Dict[str, Any]:
def create_pydantic_model_from_json_schema(schema: Dict[str, Any], model_name="DynamicModel") -> Type[BaseModel]:
# Nested function to parse properties from the schema
def parse_properties(properties: Dict[str, Any]) -> Dict[str, Any]:
fields = {}
for key, value in data.items():
if value["type"] == "dict":
nested_model = create_pydantic_model_from_json(json.dumps(value["value"]))
fields[key] = (nested_model, Field(description=value["description"]))
for prop_name, prop_attrs in properties.items():
prop_type = prop_attrs.get("type")
description = prop_attrs.get("description", None)

if prop_type == "object":
nested_model = create_pydantic_model_from_json_schema(prop_attrs, model_name=prop_name.capitalize())
fields[prop_name] = (nested_model, Field(..., description=description))
elif prop_type == "array":
items = prop_attrs.get("items", {})
if items.get("type") == "object":
nested_model = create_pydantic_model_from_json_schema(items)
fields[prop_name] = (List[nested_model], Field(..., description=description))
else:
nested_type = json_type_to_python_type(items.get("type"))
fields[prop_name] = (List[nested_type], Field(..., description=description))
else:
fields[key] = (
determine_type(value["type"]),
Field(default=value["value"], description=value["description"]),
)
python_type = json_type_to_python_type(prop_type)
fields[prop_name] = (python_type, Field(..., description=description))
return fields

fields = create_fields(data)
return create_model("DynamicModel", **fields)


def pydantic_model_to_json(model: BaseModel) -> Dict[str, Any]:
def get_type_str(py_type: Any) -> str:
type_mapping = {str: "str", int: "int", float: "float", bool: "bool", list: "list", dict: "dict"}
return type_mapping.get(py_type, "any")

json_dict = {}
for field_name, field in model.model_fields.items():
field_type = get_type_hints(model)[field_name]
default_value = field.default if not isinstance(field.default, PydanticUndefinedType) else ""
json_dict[field_name] = {
"value": default_value,
"type": get_type_str(field_type),
"description": field.description or "",
}
return json_dict
properties = schema.get("properties", {})
fields = parse_properties(properties)
return create_model(model_name, **fields)


# endregion
Expand All @@ -77,13 +76,13 @@ class GuidedConversationAgentConfigModel(BaseModel):
title="Artifact",
description="The artifact that the agent will manage.",
),
UISchema(widget="textarea"),
] = json.dumps(pydantic_model_to_json(config_defaults.ArtifactModel), indent=2) # type: ignore
UISchema(widget="baseModelEditor"),
] = json.dumps(config_defaults.ArtifactModel.model_json_schema(), indent=2)

rules: Annotated[
list[str],
Field(title="Rules", description="Do's and don'ts that the agent should attempt to follow"),
UISchema(schema={"items": {"ui:widget": "textarea"}}),
UISchema(schema={"items": {"ui:widget": "textarea", "ui:options": {"rows": 2}}}),
] = config_defaults.rules

conversation_flow: Annotated[
Expand All @@ -92,7 +91,7 @@ class GuidedConversationAgentConfigModel(BaseModel):
title="Conversation Flow",
description="A loose natural language description of the steps of the conversation",
),
UISchema(widget="textarea", placeholder="[optional]"),
UISchema(widget="textarea", schema={"ui:options": {"rows": 10}}, placeholder="[optional]"),
] = config_defaults.conversation_flow.strip()

context: Annotated[
Expand Down Expand Up @@ -141,7 +140,8 @@ class ResourceConstraint(ResourceConstraint):
] = ResourceConstraint()

def get_artifact_model(self) -> Type[BaseModel]:
return create_pydantic_model_from_json(self.artifact)
schema = json.loads(self.artifact)
return create_pydantic_model_from_json_schema(schema)


# endregion
Original file line number Diff line number Diff line change
@@ -0,0 +1,145 @@
import json
import logging
from pathlib import Path

from guided_conversation.guided_conversation_agent import GuidedConversation
from openai import AsyncOpenAI
from semantic_kernel import Kernel
from semantic_kernel.connectors.ai.open_ai import OpenAIChatCompletion
from semantic_workbench_assistant.assistant_app import (
ConversationContext,
storage_directory_for_context,
)

from ...config import AssistantConfigModel
from .config import GuidedConversationAgentConfigModel

logger = logging.getLogger(__name__)


#
# region Agent
#


class GuidedConversationAgent:
"""
An agent for managing artifacts.
"""

@staticmethod
def get_state(
conversation_context: ConversationContext,
) -> dict | None:
"""
Get the state of the guided conversation agent.
"""
return _read_guided_conversation_state(conversation_context)

@staticmethod
async def step_conversation(
config: AssistantConfigModel,
openai_client: AsyncOpenAI,
agent_config: GuidedConversationAgentConfigModel,
conversation_context: ConversationContext,
last_user_message: str | None,
) -> tuple[str | None, bool]:
"""
Step the conversation to the next turn.
"""

rules = agent_config.rules
conversation_flow = agent_config.conversation_flow
context = agent_config.context
resource_constraint = agent_config.resource_constraint
artifact = agent_config.get_artifact_model()

kernel = Kernel()
service_id = "gc_main"

chat_service = OpenAIChatCompletion(
service_id=service_id,
async_client=openai_client,
ai_model_id=config.request_config.openai_model,
)
kernel.add_service(chat_service)

guided_conversation_agent: GuidedConversation

state = _read_guided_conversation_state(conversation_context)
if state:
guided_conversation_agent = GuidedConversation.from_json(
json_data=state,
kernel=kernel,
artifact=artifact, # type: ignore
conversation_flow=conversation_flow,
context=context,
rules=rules,
resource_constraint=resource_constraint,
service_id=service_id,
)
else:
guided_conversation_agent = GuidedConversation(
kernel=kernel,
artifact=artifact, # type: ignore
conversation_flow=conversation_flow,
context=context,
rules=rules,
resource_constraint=resource_constraint,
service_id=service_id,
)

# Step the conversation to start the conversation with the agent
# or message
result = await guided_conversation_agent.step_conversation(last_user_message)

# Save the state of the guided conversation agent
_write_guided_conversation_state(conversation_context, guided_conversation_agent.to_json())

return result.ai_message, result.is_conversation_over

# endregion


#
# region Helpers
#


def _get_guided_conversation_storage_path(context: ConversationContext, filename: str | None = None) -> Path:
"""
Get the path to the directory for storing guided conversation files.
"""
path = storage_directory_for_context(context) / "guided-conversation"
if filename:
path /= filename
return path


def _write_guided_conversation_state(context: ConversationContext, state: dict) -> None:
"""
Write the state of the guided conversation agent to a file.
"""
json_data = json.dumps(state)
path = _get_guided_conversation_storage_path(context)
if not path.exists():
path.mkdir(parents=True)
path = path / "state.json"
path.write_text(json_data)


def _read_guided_conversation_state(context: ConversationContext) -> dict | None:
"""
Read the state of the guided conversation agent from a file.
"""
path = _get_guided_conversation_storage_path(context, "state.json")
if path.exists():
try:
json_data = path.read_text()
return json.loads(json_data)
except Exception:
pass
return None


# endregion
Loading

0 comments on commit 9d23e42

Please sign in to comment.