Skip to content

Commit

Permalink
Fix & Feat Chats Summarizing
Browse files Browse the repository at this point in the history
- [+] fix(chat.ts): fix known issue where summarize is not using the current model selected
- [+] feat(chat.ts): add support for user-selected summarizing method
  • Loading branch information
H0llyW00dzZ committed Dec 3, 2023
1 parent cf50299 commit a2f7985
Showing 1 changed file with 13 additions and 5 deletions.
18 changes: 13 additions & 5 deletions app/store/chat.ts
Original file line number Diff line number Diff line change
Expand Up @@ -80,9 +80,10 @@ function createEmptySession(): ChatSession {
};
}

function getSummarizeModel(currentModel: string) {
// if it is using gpt-* models, force to use 3.5 to summarize
return currentModel.startsWith("gpt") ? SUMMARIZE_MODEL : currentModel;
// fix known issue where summarize is not using the current model selected
function getSummarizeModel(currentModel: string, modelConfig: ModelConfig) {
// should be depends of user selected
return currentModel ? modelConfig.model : currentModel;
}

function countMessages(msgs: ChatMessage[]) {
Expand Down Expand Up @@ -490,10 +491,14 @@ export const useChatStore = createPersistStore(
content: Locale.Store.Prompt.Topic,
}),
);
// this summarizing method should be depends of user selected
const sessionModelConfig = this.currentSession().mask.modelConfig;
const topicModel = getSummarizeModel(session.mask.modelConfig.model, sessionModelConfig);

api.llm.chat({
messages: topicMessages,
config: {
model: getSummarizeModel(session.mask.modelConfig.model),
model: topicModel,
},
onFinish(message) {
get().updateCurrentSession(
Expand Down Expand Up @@ -539,6 +544,9 @@ export const useChatStore = createPersistStore(
historyMsgLength > modelConfig.compressMessageLengthThreshold &&
modelConfig.sendMemory
) {
// this summarizing method should be depends of user selected
const sessionModelConfig = this.currentSession().mask.modelConfig;
const summarizeModel = getSummarizeModel(session.mask.modelConfig.model, sessionModelConfig);
api.llm.chat({
messages: toBeSummarizedMsgs.concat(
createMessage({
Expand All @@ -550,7 +558,7 @@ export const useChatStore = createPersistStore(
config: {
...modelConfig,
stream: true,
model: getSummarizeModel(session.mask.modelConfig.model),
model: summarizeModel,
},
onUpdate(message) {
session.memoryPrompt = message;
Expand Down

0 comments on commit a2f7985

Please sign in to comment.