diff --git a/src/apps/settings-modal/UxLabsSettings.tsx b/src/apps/settings-modal/UxLabsSettings.tsx index 40874c6e12..42727b4d46 100644 --- a/src/apps/settings-modal/UxLabsSettings.tsx +++ b/src/apps/settings-modal/UxLabsSettings.tsx @@ -2,6 +2,7 @@ import * as React from 'react'; import { FormControl, Typography } from '@mui/joy'; import CallIcon from '@mui/icons-material/Call'; +import DeviceThermostatIcon from '@mui/icons-material/DeviceThermostat'; import FormatPaintIcon from '@mui/icons-material/FormatPaint'; import VerticalSplitIcon from '@mui/icons-material/VerticalSplit'; import YouTubeIcon from '@mui/icons-material/YouTube'; @@ -16,8 +17,8 @@ export function UxLabsSettings() { // external state const { - labsCalling, /*labsEnhancedUI,*/ labsMagicDraw, labsPersonaYTCreator, labsSplitBranching, - setLabsCalling, /*setLabsEnhancedUI,*/ setLabsMagicDraw, setLabsPersonaYTCreator, setLabsSplitBranching, + labsCalling, /*labsEnhancedUI,*/ labsLlmOvertemp, labsMagicDraw, labsPersonaYTCreator, labsSplitBranching, + setLabsCalling, /*setLabsEnhancedUI,*/ setLabsLlmOvertemp, setLabsMagicDraw, setLabsPersonaYTCreator, setLabsSplitBranching, } = useUXLabsStore(); return <> @@ -42,6 +43,11 @@ export function UxLabsSettings() { checked={labsSplitBranching} onChange={setLabsSplitBranching} /> + Overtemperature} description={labsLlmOvertemp ? 'LLMs ♨️' : 'Disabled'} + checked={labsLlmOvertemp} onChange={setLabsLlmOvertemp} + /> + {/* void; + labsLlmOvertemp: boolean; + setLabsLlmOvertemp: (labsLlmOvertemp: boolean) => void; + } export const useUXLabsStore = create()( @@ -48,6 +51,9 @@ export const useUXLabsStore = create()( labsSplitBranching: false, setLabsSplitBranching: (labsSplitBranching: boolean) => set({ labsSplitBranching }), + labsLlmOvertemp: false, + setLabsLlmOvertemp: (labsLlmOvertemp: boolean) => set({ labsLlmOvertemp }), + }), { name: 'app-ux-labs', diff --git a/src/modules/llms/transports/server/openai/openai.router.ts b/src/modules/llms/transports/server/openai/openai.router.ts index 6870ca43b8..d98ec43aa9 100644 --- a/src/modules/llms/transports/server/openai/openai.router.ts +++ b/src/modules/llms/transports/server/openai/openai.router.ts @@ -28,7 +28,7 @@ export type OpenAIAccessSchema = z.infer; export const openAIModelSchema = z.object({ id: z.string(), - temperature: z.number().min(0).max(1).optional(), + temperature: z.number().min(0).max(2).optional(), maxTokens: z.number().min(1).max(1000000), }); export type OpenAIModelSchema = z.infer; diff --git a/src/modules/llms/vendors/openai/OpenAILLMOptions.tsx b/src/modules/llms/vendors/openai/OpenAILLMOptions.tsx index 60f7c0448e..3d813d6175 100644 --- a/src/modules/llms/vendors/openai/OpenAILLMOptions.tsx +++ b/src/modules/llms/vendors/openai/OpenAILLMOptions.tsx @@ -1,6 +1,7 @@ import * as React from 'react'; import { FormSliderControl } from '~/common/components/forms/FormSliderControl'; +import { useUXLabsStore } from '~/common/state/store-ux-labs'; import { DLLM, useModelsStore } from '../../store-llms'; import { LLMOptionsOpenAI } from './openai.vendor'; @@ -18,6 +19,10 @@ function normalizeOpenAIOptions(partialOptions?: Partial) { export function OpenAILLMOptions(props: { llm: DLLM }) { + // external state + const labsLlmOvertemp = useUXLabsStore.getState().labsLlmOvertemp; + + // derived state const { id: llmId, maxOutputTokens, options } = props.llm; const { llmResponseTokens, llmTemperature } = normalizeOpenAIOptions(options); @@ -25,8 +30,8 @@ export function OpenAILLMOptions(props: { llm: DLLM } 0.67 ? 'Larger freedom' : 'Creativity'} - min={0} max={1} step={0.1} defaultValue={0.5} + description={llmTemperature < 0.33 ? 'More strict' : llmTemperature > 1 ? 'Extra hot ♨️' : llmTemperature > 0.67 ? 'Larger freedom' : 'Creativity'} + min={0} max={labsLlmOvertemp ? 2 : 1} step={0.1} defaultValue={0.5} valueLabelDisplay='on' value={llmTemperature} onChange={value => useModelsStore.getState().updateLLMOptions(llmId, { llmTemperature: value })}