Skip to content

Commit

Permalink
feat: parse files and commands by AI
Browse files Browse the repository at this point in the history
  • Loading branch information
mba committed Jun 19, 2023
1 parent d38150a commit cc42147
Showing 1 changed file with 84 additions and 1 deletion.
85 changes: 84 additions & 1 deletion gpt_engineer/chat_to_files.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,72 @@
import json
import re

from typing import List, Tuple

import openai


def openai_func_call(
model: str, messages: List, functions: List, function_call: dict
) -> dict:
# TODO(junlin): reuse AI class
# ref: https://platform.openai.com/docs/api-reference/chat/create
response = openai.ChatCompletion.create(
model=model, messages=messages, functions=functions, function_call=function_call
)
message = response["choices"][0]["message"]
func_call = message["function_call"]
func_call_args = json.loads(func_call["arguments"])
return func_call_args


def parse_files_by_ai(chat, model="gpt-3.5-turbo-0613"): # -> List[Tuple[str, str]]:
func_call_args = openai_func_call(
model=model,
messages=[
{
"role": "user",
"content": "you are an AI file writer, "
"please write the following file to disk: " + chat,
},
],
functions=[
{
"name": "write_files",
"description": "Write the file to the disk based on "
"the given file filename and content.",
"parameters": {
"type": "object",
"properties": {
"files": {
"type": "array",
"description": "an array of file which "
"contains filename and content",
"items": {
"type": "object",
"properties": {
"filename": {
"type": "string",
"description": "the relative path to the file",
},
"content": {
"type": "string",
"description": "the full content of the file",
},
},
"required": ["filename", "content"],
},
}
},
},
}
],
function_call={"name": "write_files"},
)

files = func_call_args["files"]
return list(map(lambda f: (f["filename"], f["content"]), files))


def parse_chat(chat): # -> List[Tuple[str, str]]:
# Get all ``` blocks and preceding filenames
Expand All @@ -25,9 +92,25 @@ def parse_chat(chat): # -> List[Tuple[str, str]]:
return files


def parse_files(chat, try_ai: bool = True) -> List[Tuple[str, str]]:
if try_ai:
try:
print("Try to parse files by ai")
# TODO: Add a switch to change the model of parse_chat_by_ai,
# or use the same model as the main flow."
parsed = parse_files_by_ai(chat)
if parsed:
return parsed
except Exception as e:
print(f"parse files by ai failed `{e}` , use legacy method instead")

return parse_chat(chat)


def to_files(chat, workspace):
workspace["all_output.txt"] = chat

files = parse_chat(chat)
files = parse_files(chat)
for file_name, file_content in files:
print("saving ", file_name)
workspace[file_name] = file_content

0 comments on commit cc42147

Please sign in to comment.