Skip to content

Commit 44de253

Browse files
committed
refactor to improve input performance
1 parent 818a914 commit 44de253

File tree

5 files changed

+57
-11
lines changed

5 files changed

+57
-11
lines changed

packages/types/src/aiChat.ts

Lines changed: 10 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
import { z } from 'zod';
22

3-
import { MessageRoleType } from './message';
3+
import { ChatMessage, MessageRoleType } from './message';
4+
import { ChatTopic } from './topic';
45

56
export interface SendNewMessage {
67
content: string;
@@ -20,7 +21,6 @@ export interface SendMessageServerParams {
2021
title?: string;
2122
topicMessageIds?: string[];
2223
};
23-
onlyAddUserMessage?: boolean;
2424
}
2525

2626
export const AiSendMessageServerSchema = z.object({
@@ -39,6 +39,13 @@ export const AiSendMessageServerSchema = z.object({
3939
topicMessageIds: z.array(z.string()).optional(),
4040
})
4141
.optional(),
42-
onlyAddUserMessage: z.boolean().optional(),
4342
topicId: z.string().optional(),
4443
});
44+
45+
export interface SendMessageServerResponse {
46+
isCreatNewTopic: boolean;
47+
messageId: string;
48+
messages: ChatMessage[];
49+
topicId: string;
50+
topics?: ChatTopic[];
51+
}

packages/types/src/index.ts

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,7 @@ export * from './llm';
1313
export * from './message';
1414
export * from './meta';
1515
export * from './serverConfig';
16+
export * from './topic';
1617
export * from './user';
1718
export * from './user/settings';
1819
// FIXME: I think we need a refactor for the "openai" types

src/server/routers/lambda/aiChat.ts

Lines changed: 13 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,11 @@
1+
import { AiSendMessageServerSchema, SendMessageServerResponse } from '@lobechat/types';
2+
13
import { MessageModel } from '@/database/models/message';
24
import { TopicModel } from '@/database/models/topic';
35
import { authedProcedure, router } from '@/libs/trpc/lambda';
46
import { serverDatabase } from '@/libs/trpc/lambda/middleware';
57
import { AiChatService } from '@/server/services/aiChat';
68
import { FileService } from '@/server/services/file';
7-
import { AiSendMessageServerSchema } from '@/types/aiChat';
89

910
const aiChatProcedure = authedProcedure.use(serverDatabase).use(async (opts) => {
1011
const { ctx } = opts;
@@ -24,10 +25,11 @@ export const aiChatRouter = router({
2425
.input(AiSendMessageServerSchema)
2526
.mutation(async ({ input, ctx }) => {
2627
let messageId: string;
27-
let topicId = input.topicId!;
28+
let topicId = input.newMessage.topicId!;
2829

2930
let isCreatNewTopic = false;
3031

32+
// create topic if there should be a new topic
3133
if (input.newTopic) {
3234
const topicItem = await ctx.topicModel.create({
3335
messages: input.newTopic.topicMessageIds,
@@ -44,11 +46,19 @@ export const aiChatRouter = router({
4446
});
4547
messageId = messageItem.id;
4648

49+
// retrieve latest messages and topic with
4750
const { messages, topics } = await ctx.aiChatService.getMessagesAndTopics({
51+
isCreatNewTopic,
4852
sessionId: input.newMessage.sessionId,
4953
topicId,
5054
});
5155

52-
return { isCreatNewTopic, messageId: messageId, messages, topicId, topics };
56+
return {
57+
isCreatNewTopic,
58+
messageId: messageId,
59+
messages,
60+
topicId,
61+
topics,
62+
} as SendMessageServerResponse;
5363
}),
5464
});

src/server/services/aiChat/index.ts

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,7 @@ export class AiChatService {
1919

2020
async getMessagesAndTopics(params: {
2121
current?: number;
22+
isCreatNewTopic?: boolean;
2223
pageSize?: number;
2324
sessionId?: string;
2425
topicId?: string;
@@ -27,7 +28,7 @@ export class AiChatService {
2728
this.messageModel.query(params, {
2829
postProcessUrl: (path) => this.fileService.getFullFileUrl(path),
2930
}),
30-
this.topicModel.query({ sessionId: params.sessionId }),
31+
params.isCreatNewTopic ? this.topicModel.query({ sessionId: params.sessionId }) : undefined,
3132
]);
3233

3334
return { messages, topics };

src/store/chat/slices/aiChat/actions/generateAIChatV2.ts

Lines changed: 31 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
/* eslint-disable sort-keys-fix/sort-keys-fix, typescript-sort-keys/interface */
22
// Disable the auto sort key eslint rule to make the code more logic and readable
3+
import { ChatMessage, ChatTopic, CreateMessageParams, SendMessageParams } from '@lobechat/types';
34
import { t } from 'i18next';
45
import { StateCreator } from 'zustand/vanilla';
56

@@ -8,7 +9,6 @@ import { getAgentStoreState } from '@/store/agent';
89
import { ChatStore } from '@/store/chat/store';
910
import { messageMapKey } from '@/store/chat/utils/messageMapKey';
1011
import { getSessionStoreState } from '@/store/session';
11-
import { CreateMessageParams, SendMessageParams } from '@/types/message';
1212
import { cleanObject } from '@/utils/object';
1313

1414
import { chatSelectors, topicSelectors } from '../../../selectors';
@@ -18,6 +18,12 @@ export interface AIGenerateV2Action {
1818
* Sends a new message to the AI chat system
1919
*/
2020
sendMessageInServer: (params: SendMessageParams) => Promise<void>;
21+
internal_refreshAiChat: (params: {
22+
topics?: ChatTopic[];
23+
messages: ChatMessage[];
24+
sessionId: string;
25+
topicId?: string;
26+
}) => void;
2127
}
2228

2329
export const generateAIChatV2: StateCreator<
@@ -55,6 +61,10 @@ export const generateAIChatV2: StateCreator<
5561
};
5662
const messages = chatSelectors.activeBaseChats(get());
5763

64+
// use optimistic update to avoid the slow waiting
65+
const tempId = get().internal_createTmpMessage(newMessage);
66+
67+
get().internal_toggleMessageLoading(true, tempId);
5868
set({ isCreatingMessage: true }, false, 'creatingMessage/start');
5969

6070
const data = await aiChatService.sendMessageInServer({
@@ -66,15 +76,21 @@ export const generateAIChatV2: StateCreator<
6676
title: t('defaultTitle', { ns: 'topic' }),
6777
}
6878
: undefined,
69-
onlyAddUserMessage,
79+
});
80+
81+
// refresh the total data
82+
get().internal_refreshAiChat({
83+
messages: data.messages,
84+
topics: data.topics,
85+
sessionId: activeId,
86+
topicId: data.topicId,
7087
});
7188

7289
if (!activeTopicId) {
7390
await get().switchTopic(data.topicId!, true);
7491
}
7592

76-
get().internal_dispatchTopic({ type: 'updateTopics', value: data.topics as any[] });
77-
get().internal_dispatchMessage({ type: 'updateMessages', value: data.messages });
93+
get().internal_toggleMessageLoading(false, tempId);
7894

7995
// update assistant update to make it rerank
8096
getSessionStoreState().triggerSessionUpdate(get().activeId);
@@ -117,4 +133,15 @@ export const generateAIChatV2: StateCreator<
117133

118134
await Promise.all([summaryTitle(), addFilesToAgent()]);
119135
},
136+
137+
internal_refreshAiChat: ({ topics, messages, sessionId, topicId }) => {
138+
set(
139+
{
140+
topicMaps: topics ? { ...get().topicMaps, [sessionId]: topics } : get().topicMaps,
141+
messagesMap: { ...get().messagesMap, [messageMapKey(sessionId, topicId)]: messages },
142+
},
143+
false,
144+
'refreshAiChat',
145+
);
146+
},
120147
});

0 commit comments

Comments
 (0)