Skip to content

Commit

Permalink
Remove -4, add shortcut mechanism, refs #17
Browse files Browse the repository at this point in the history
  • Loading branch information
simonw committed Jun 15, 2023
1 parent 0f88e92 commit 68c3848
Show file tree
Hide file tree
Showing 2 changed files with 9 additions and 5 deletions.
4 changes: 3 additions & 1 deletion docs/usage.md
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,9 @@ To disable streaming and only return the response once it has completed:

To switch from ChatGPT 3.5 (the default) to GPT-4 if you have access:

llm 'Ten names for cheesecakes' -4
llm 'Ten names for cheesecakes' -m gpt4

You can use `-m 4` as an even shorter shortcut.

Pass `--model <model name>` to use a different model.

Expand Down
10 changes: 6 additions & 4 deletions llm/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,8 @@

DEFAULT_MODEL = "gpt-3.5-turbo"

MODEL_ALIASES = {"4": "gpt-4", "gpt4": "gpt-4", "chatgpt": "gpt-3.5-turbo"}


@click.group(
cls=DefaultGroup,
Expand All @@ -29,7 +31,6 @@ def cli():
@cli.command(name="prompt")
@click.argument("prompt", required=False)
@click.option("--system", help="System prompt to use")
@click.option("-4", "--gpt4", is_flag=True, help="Use GPT-4")
@click.option("-m", "--model", help="Model to use")
@click.option("--no-stream", is_flag=True, help="Do not stream output")
@click.option("-n", "--no-log", is_flag=True, help="Don't log to database")
Expand All @@ -48,14 +49,12 @@ def cli():
type=int,
)
@click.option("--key", help="API key to use")
def prompt(prompt, system, gpt4, model, no_stream, no_log, _continue, chat_id, key):
def prompt(prompt, system, model, no_stream, no_log, _continue, chat_id, key):
"Execute a prompt against on OpenAI model"
if prompt is None:
# Read from stdin instead
prompt = sys.stdin.read()
openai.api_key = get_key(key, "openai", "OPENAI_API_KEY")
if gpt4:
model = "gpt-4"
messages = []
if _continue:
_continue = -1
Expand All @@ -77,6 +76,9 @@ def prompt(prompt, system, gpt4, model, no_stream, no_log, _continue, chat_id, k
messages.append({"role": "user", "content": prompt})
if model is None:
model = history_model or DEFAULT_MODEL
else:
# Resolve model aliases
model = MODEL_ALIASES.get(model, model)
try:
if no_stream:
response = openai.ChatCompletion.create(
Expand Down

0 comments on commit 68c3848

Please sign in to comment.