From ee2eabc0cc1e3a4e2142039ea422414b61771462 Mon Sep 17 00:00:00 2001 From: pgosar Date: Sun, 9 Apr 2023 22:11:42 -0500 Subject: [PATCH] add support for configuring GPT model --- README.md | 7 +++++-- chatgdb/cli.py | 19 +++++++++++++++++-- chatgdb/utils.py | 35 +++++++++++++++++++++++++++++------ pyproject.toml | 2 +- 4 files changed, 52 insertions(+), 11 deletions(-) diff --git a/README.md b/README.md index b635d3e..2249ab8 100644 --- a/README.md +++ b/README.md @@ -29,8 +29,11 @@ To do that, run the command ```chatgdb -k ``` -Without the API key, you won't be able to make requests to OpenAI. The API key is stored in -text in the same directory as the installed script, which is currently in your python site packages +You also need to set the model to use. There are two possible options, ```gpt-3.5-turbo``` and ```gpt-4```: + +```chatgdb -m ``` + +This information is stored in text in the same directory as the installed script, which is currently in your python site packages folder along with the main script. You can easily find this location by running the following in your terminal: ``` python -m site --user-site``` diff --git a/chatgdb/cli.py b/chatgdb/cli.py index 592e6cf..b23f37c 100644 --- a/chatgdb/cli.py +++ b/chatgdb/cli.py @@ -4,14 +4,21 @@ from urllib.request import Request, urlopen import json +PATH = dirname(abspath(getfile(currentframe()))) + def set_key(key): """Set the api key for ChatGDB""" - path = dirname(abspath(getfile(currentframe()))) + "/.secret.txt" - with open(path, "w") as f: + with open(PATH + "/.secret.txt", "w") as f: f.write("OPENAI_KEY=\"" + key + "\"") +def set_model(model): + """Set the model for ChatGDB""" + with open(PATH + "/.model.txt", "w") as f: + f.write("MODEL=\"" + model + "\"") + + def version(): """Return version information""" with urlopen(Request("https://pypi.org/pypi/chatgdb/json"), timeout=10) as f: @@ -27,6 +34,12 @@ def main(): "--key", type=str, help="Provide an api key for ChatGDB") + parser.add_argument( + '-m', + "--model", + type=str, + choices=["gpt-3.5-turbo", "gpt-4"], + help="Provide a model for ChatGDB (gpt-3.5-turbo or gpt-4)") parser.add_argument( '-v', "--version", @@ -37,6 +50,8 @@ def main(): args = parser.parse_args() if args.key: set_key(args.key) + elif args.model: + set_model(args.model) else: parser.print_help() diff --git a/chatgdb/utils.py b/chatgdb/utils.py index a7ab9ae..4080b00 100644 --- a/chatgdb/utils.py +++ b/chatgdb/utils.py @@ -7,7 +7,7 @@ def get_key(): - """Gets api key from .env file + """Gets api key from secret file Returns: (str) api key """ @@ -24,12 +24,36 @@ def get_key(): secret = k.split('"')[1::2] except FileNotFoundError: print("Could not find api key. Please make sure you've run the CLI " - "tool and set up your api key") + "tool and set up your model") quit("Exiting...") return secret[0] +def get_model(): + """Gets model from model file + + Returns: (str) model + """ + model = [] + model_name = "" + # gets path of this script - OS independent + path = dirname(abspath(getfile(currentframe()))) + "/.model.txt" + try: + # get appropriate api key + with open(path) as f: + model = [line.strip() for line in f] + for m in model: + if m.startswith("MODEL"): + model_name = m.split('"')[1::2] + except FileNotFoundError: + print("Could not find model. Please make sure you've run the CLI " + "tool and set up your model") + quit("Exiting...") + + return model_name[0] + + def make_request(url, headers=None, data=None): """Makes API request @@ -74,7 +98,6 @@ def chat_help(): "Content-Type": "application/json" } URL = "https://api.openai.com/v1/chat/completions" -MODEL = "gpt-3.5-turbo" def explain_helper(prev_command, command, prompt): @@ -86,7 +109,7 @@ def explain_helper(prev_command, command, prompt): prompt (str): prompt to use for explanation """ question = prompt + prev_command if command == "" else command - data = {"model": MODEL, + data = {"model": get_model(), "messages": [{"role": "user", "content": question}]} body, response = make_request(URL, HEADERS, data=bytes(json.dumps(data), @@ -98,12 +121,12 @@ def explain_helper(prev_command, command, prompt): def chat_helper(command, prompt): """Generates GDB/LLDB command based on user input - + Params: command (str): user input prompt (str): prompt to use for command generation """ - data = {"model": MODEL, + data = {"model": get_model(), "messages": [{"role": "user", "content": prompt + command}]} diff --git a/pyproject.toml b/pyproject.toml index 29ee3e7..380face 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "ChatGDB" -version = "1.1.0" +version = "1.2.0" authors = ["Pranay Gosar "] description = "Harness the power of ChatGPT directly inside the GDB debugger!" readme = "README.md"