diff --git a/packages/x-sdk/src/x-chat/__test__/index.test.tsx b/packages/x-sdk/src/x-chat/__test__/index.test.tsx index c65228571..eed25dd1e 100644 --- a/packages/x-sdk/src/x-chat/__test__/index.test.tsx +++ b/packages/x-sdk/src/x-chat/__test__/index.test.tsx @@ -194,6 +194,7 @@ describe('useXChat', () => { expect(requestFallback).toHaveBeenCalledWith( { query: 'little' }, { + message: undefined, error: new Error('failed'), messages: [{ query: 'little' }], }, diff --git a/packages/x-sdk/src/x-chat/__test__/providers.test.ts b/packages/x-sdk/src/x-chat/__test__/providers.test.ts index aae4c5efd..142be8245 100644 --- a/packages/x-sdk/src/x-chat/__test__/providers.test.ts +++ b/packages/x-sdk/src/x-chat/__test__/providers.test.ts @@ -120,6 +120,8 @@ describe('DefaultChatProvider test', () => { describe('OpenAiChatProvider test', () => { const headers = new Headers(); headers.set('content-type', 'text/event-stream'); + const jsonHeaders = new Headers(); + jsonHeaders.set('content-type', 'application/json'); it('should initialize successfully', () => { const openAIProvider = new OpenAIChatProvider({ @@ -129,120 +131,665 @@ describe('OpenAiChatProvider test', () => { }); expect(openAIProvider).not.toBeNull(); + expect(openAIProvider).toBeInstanceOf(OpenAIChatProvider); }); - it('should transformParams work successfully', () => { - const openAIProvider = new OpenAIChatProvider({ - request: XRequest(baseURL, { - manual: true, - }), - }); - openAIProvider.injectGetMessages(() => [ - { - role: 'user', - content: 'test', - }, - ]); - const openAITransformParams = openAIProvider.transformParams( - { - test2: 'test2', - }, - { - params: { - test3: 'test3', - }, - }, - ); - expect(openAITransformParams).toEqual({ - test2: 'test2', - test3: 'test3', - messages: [ + describe('transformParams', () => { + it('should transformParams work successfully with basic parameters', () => { + const openAIProvider = new OpenAIChatProvider({ + request: XRequest(baseURL, { + manual: true, + }), + }); + openAIProvider.injectGetMessages(() => [ { role: 'user', content: 'test', }, - ], + ]); + const openAITransformParams = openAIProvider.transformParams( + { + test2: 'test2', + }, + { + params: { + test3: 'test3', + }, + }, + ); + expect(openAITransformParams).toEqual({ + test2: 'test2', + test3: 'test3', + messages: [ + { + role: 'user', + content: 'test', + }, + ], + }); }); - }); - it('should transformLocalMessage work successfully', () => { - const openAIProvider = new OpenAIChatProvider({ - request: XRequest(baseURL, { - manual: true, - }), + it('should transformParams work with empty requestParams', () => { + const openAIProvider = new OpenAIChatProvider({ + request: XRequest(baseURL, { + manual: true, + }), + }); + openAIProvider.injectGetMessages(() => [ + { + role: 'user', + content: 'hello', + }, + ]); + const result = openAIProvider.transformParams({}, { params: { model: 'gpt-3.5-turbo' } }); + expect(result).toEqual({ + model: 'gpt-3.5-turbo', + messages: [ + { + role: 'user', + content: 'hello', + }, + ], + }); }); - const openAIMsg = openAIProvider.transformLocalMessage({ - messages: [ + + it('should transformParams work with empty options params', () => { + const openAIProvider = new OpenAIChatProvider({ + request: XRequest(baseURL, { + manual: true, + }), + }); + openAIProvider.injectGetMessages(() => [ + { + role: 'assistant', + content: 'response', + }, + ]); + const result = openAIProvider.transformParams({ temperature: 0.7 }, {}); + expect(result).toEqual({ + temperature: 0.7, + messages: [ + { + role: 'assistant', + content: 'response', + }, + ], + }); + }); + + it('should transformParams override options params with requestParams', () => { + const openAIProvider = new OpenAIChatProvider({ + request: XRequest(baseURL, { + manual: true, + }), + }); + openAIProvider.injectGetMessages(() => []); + const result = openAIProvider.transformParams( + { temperature: 0.9, max_tokens: 100 }, + { params: { temperature: 0.7, model: 'gpt-4' } }, + ); + expect(result).toEqual({ + temperature: 0.9, + max_tokens: 100, + model: 'gpt-4', + messages: [], + }); + }); + }); + + describe('transformLocalMessage', () => { + it('should transformLocalMessage work successfully with single message', () => { + const openAIProvider = new OpenAIChatProvider({ + request: XRequest(baseURL, { + manual: true, + }), + }); + const openAIMsg = openAIProvider.transformLocalMessage({ + messages: [ + { + role: 'user', + content: 'test', + }, + ], + }); + expect(openAIMsg).toEqual([ { role: 'user', content: 'test', }, - ], + ]); + }); + + it('should transformLocalMessage work with multiple messages', () => { + const openAIProvider = new OpenAIChatProvider({ + request: XRequest(baseURL, { + manual: true, + }), + }); + const openAIMsg = openAIProvider.transformLocalMessage({ + messages: [ + { role: 'user', content: 'hello' }, + { role: 'assistant', content: 'hi there' }, + { role: 'user', content: 'how are you?' }, + ], + }); + expect(openAIMsg).toEqual([ + { role: 'user', content: 'hello' }, + { role: 'assistant', content: 'hi there' }, + { role: 'user', content: 'how are you?' }, + ]); + }); + + it('should transformLocalMessage return empty array when no messages', () => { + const openAIProvider = new OpenAIChatProvider({ + request: XRequest(baseURL, { + manual: true, + }), + }); + const openAIMsg = openAIProvider.transformLocalMessage({}); + expect(openAIMsg).toEqual([]); }); - expect(openAIMsg).toEqual({ - role: 'user', - content: 'test', + + it('should transformLocalMessage handle empty messages array', () => { + const openAIProvider = new OpenAIChatProvider({ + request: XRequest(baseURL, { + manual: true, + }), + }); + const openAIMsg = openAIProvider.transformLocalMessage({ messages: [] }); + expect(openAIMsg).toEqual([]); }); }); - it('should transformMessage not throw error', () => { - let chunk = {}; - const openAIProvider = new OpenAIChatProvider({ - request: XRequest(baseURL, { - manual: true, - }), + describe('transformMessage', () => { + it('should transformMessage not throw error with invalid JSON', () => { + const chunk = { + data: 'invalid json', + }; + const openAIProvider = new OpenAIChatProvider({ + request: XRequest(baseURL, { + manual: true, + }), + }); + + const openAIMsg = openAIProvider.transformMessage({ + chunk, + chunks: [], + status: 'loading', + responseHeaders: headers, + }); + expect(openAIMsg).toEqual({ role: 'assistant', content: '' }); }); - // error json format - chunk = { - data: 'test', - }; - const openAIMsg = openAIProvider.transformMessage({ - chunk, - chunks: [], - status: 'loading', - responseHeaders: headers, + + it('should transformMessage work successfully with streaming response', () => { + const chunk = { + data: '{"choices":[{"delta":{"role":"assistant","content":"test2"}}]}', + }; + const openAIProvider = new OpenAIChatProvider({ + request: XRequest(baseURL, { + manual: true, + }), + }); + const openAIMsg = openAIProvider.transformMessage({ + originMessage: { + role: 'assistant', + content: 'test', + } as any, + chunk, + chunks: [], + status: 'loading', + responseHeaders: headers, + }); + expect(openAIMsg).toEqual({ role: 'assistant', content: 'testtest2' }); }); - expect(openAIMsg).toEqual({ role: 'assistant', content: '' }); - }); - it('should transformMessage work successfully', () => { - let chunk = {}; - const openAIProvider = new OpenAIChatProvider({ - request: XRequest(baseURL, { - manual: true, - }), + it('should transformMessage work with normal HTTP response', () => { + const chunk = { + choices: [{ message: { role: 'assistant', content: 'test3' } }], + } as any; + const openAIProvider = new OpenAIChatProvider({ + request: XRequest(baseURL, { + manual: true, + }), + }); + const openAIMsg = openAIProvider.transformMessage({ + originMessage: { + role: 'assistant', + content: 'test', + } as any, + chunk, + chunks: [], + status: 'loading', + responseHeaders: jsonHeaders, + }); + expect(openAIMsg).toEqual({ role: 'assistant', content: 'testtest3' }); }); - // test for streaming - chunk = { - data: '{"choices":[{"delta":{"role":"assistant","content":"test2"}}]}', - }; - let openAIMsg = openAIProvider.transformMessage({ - originMessage: { + + it('should transformMessage handle [DONE] signal in streaming', () => { + const chunk = { + data: '[DONE]', + }; + const openAIProvider = new OpenAIChatProvider({ + request: XRequest(baseURL, { + manual: true, + }), + }); + const openAIMsg = openAIProvider.transformMessage({ + originMessage: { + role: 'assistant', + content: 'completed response', + } as any, + chunk, + chunks: [], + status: 'loading', + responseHeaders: headers, + }); + expect(openAIMsg).toEqual({ role: 'assistant', content: 'completed response' }); + }); + + it('should transformMessage handle multiple choices in streaming', () => { + const chunk = { + data: '{"choices":[{"delta":{"role":"assistant","content":"part1"}},{"delta":{"role":"assistant","content":"part2"}}]}', + }; + const openAIProvider = new OpenAIChatProvider({ + request: XRequest(baseURL, { + manual: true, + }), + }); + const openAIMsg = openAIProvider.transformMessage({ + originMessage: { + role: 'assistant', + content: 'start', + } as any, + chunk, + chunks: [], + status: 'loading', + responseHeaders: headers, + }); + expect(openAIMsg).toEqual({ role: 'assistant', content: 'startpart1part2' }); + }); + + it('should transformMessage handle empty delta content', () => { + const chunk = { + data: '{"choices":[{"delta":{"role":"assistant","content":""}}]}', + }; + const openAIProvider = new OpenAIChatProvider({ + request: XRequest(baseURL, { + manual: true, + }), + }); + const openAIMsg = openAIProvider.transformMessage({ + originMessage: { + role: 'assistant', + content: 'existing', + } as any, + chunk, + chunks: [], + status: 'loading', + responseHeaders: headers, + }); + expect(openAIMsg).toEqual({ role: 'assistant', content: 'existing' }); + }); + + it('should transformMessage handle missing role in delta', () => { + const chunk = { + data: '{"choices":[{"delta":{"content":"new content"}}]}', + }; + const openAIProvider = new OpenAIChatProvider({ + request: XRequest(baseURL, { + manual: true, + }), + }); + const openAIMsg = openAIProvider.transformMessage({ + originMessage: { + role: 'assistant', + content: 'previous', + } as any, + chunk, + chunks: [], + status: 'loading', + responseHeaders: headers, + }); + expect(openAIMsg).toEqual({ role: 'assistant', content: 'previousnew content' }); + }); + + it('should transformMessage handle chunks array fallback', () => { + const chunk = { choices: [{ message: { role: 'user', content: 'fallback' } }] }; + const openAIProvider = new OpenAIChatProvider({ + request: XRequest(baseURL, { + manual: true, + }), + }); + const openAIMsg = openAIProvider.transformMessage({ + originMessage: undefined, + chunk: undefined as any, + chunks: [chunk] as any, + status: 'loading', + responseHeaders: jsonHeaders, + }); + expect(openAIMsg).toEqual({ role: 'user', content: 'fallback' }); + }); + + it('should transformMessage handle null originMessage', () => { + const chunk = { + data: '{"choices":[{"delta":{"role":"system","content":"initial message"}}]}', + }; + const openAIProvider = new OpenAIChatProvider({ + request: XRequest(baseURL, { + manual: true, + }), + }); + const openAIMsg = openAIProvider.transformMessage({ + originMessage: undefined, + chunk, + chunks: [], + status: 'loading', + responseHeaders: headers, + }); + expect(openAIMsg).toEqual({ role: 'system', content: 'initial message' }); + }); + + it('should transformMessage handle complex nested structure', () => { + const chunk = { + data: '{"choices":[{"delta":{"role":"assistant","content":"Hello, world!"}}],"usage":{"prompt_tokens":10,"completion_tokens":5}}', + }; + const openAIProvider = new OpenAIChatProvider({ + request: XRequest(baseURL, { + manual: true, + }), + }); + const openAIMsg = openAIProvider.transformMessage({ + originMessage: { + role: 'assistant', + content: '', + } as any, + chunk, + chunks: [], + status: 'loading', + responseHeaders: headers, + }); + expect(openAIMsg).toEqual({ role: 'assistant', content: 'Hello, world!' }); + }); + + it('should handle real-world OpenAI streaming response format', () => { + const openAIProvider = new OpenAIChatProvider({ + request: XRequest(baseURL, { + manual: true, + }), + }); + + // Simulate streaming chunks + const chunks = [ + { + data: '{"choices":[{"delta":{"role":"assistant"}}],"id":"chatcmpl-123","object":"chat.completion.chunk","created":1234567890}', + }, + { + data: '{"choices":[{"delta":{"content":"Hello"}}],"id":"chatcmpl-123","object":"chat.completion.chunk","created":1234567890}', + }, + { + data: '{"choices":[{"delta":{"content":", world!"}}],"id":"chatcmpl-123","object":"chat.completion.chunk","created":1234567890}', + }, + { data: '[DONE]' }, + ]; + + let result = { role: 'assistant', content: '' } as any; + + chunks.forEach((chunk) => { + if (chunk.data !== '[DONE]') { + result = openAIProvider.transformMessage({ + originMessage: result, + chunk, + chunks: [], + status: 'loading', + responseHeaders: headers, + }); + } + }); + + expect(result).toEqual({ role: 'assistant', content: 'Hello, world!' }); + }); + + it('should handle real-world OpenAI non-streaming response format', () => { + const openAIProvider = new OpenAIChatProvider({ + request: XRequest(baseURL, { + manual: true, + }), + }); + + const response = { + choices: [ + { + message: { + role: 'assistant', + content: 'This is a complete response from OpenAI.', + }, + finish_reason: 'stop', + index: 0, + }, + ], + usage: { + prompt_tokens: 20, + completion_tokens: 10, + total_tokens: 30, + }, + id: 'chatcmpl-456', + object: 'chat.completion', + created: 1234567890, + model: 'gpt-3.5-turbo', + } as any; + + const result = openAIProvider.transformMessage({ + originMessage: undefined, + chunk: response, + chunks: [], + status: 'loading', + responseHeaders: jsonHeaders, + }); + + expect(result).toEqual({ role: 'assistant', - content: 'test', - }, - chunk, - chunks: [], - status: 'loading', - responseHeaders: headers, + content: 'This is a complete response from OpenAI.', + }); }); - expect(openAIMsg).toEqual({ role: 'assistant', content: 'testtest2' }); - // test for normal http - chunk = { - data: '{"choices":[{"message":{"role":"assistant","content":"test3"}}]}', - }; - openAIMsg = openAIProvider.transformMessage({ - originMessage: { + it('should handle function call responses', () => { + const openAIProvider = new OpenAIChatProvider({ + request: XRequest(baseURL, { + manual: true, + }), + }); + + const response = { + choices: [ + { + message: { + role: 'assistant', + content: null, + function_call: { + name: 'get_weather', + arguments: '{"location": "San Francisco, CA"}', + }, + }, + }, + ], + } as any; + + const result = openAIProvider.transformMessage({ + originMessage: undefined, + chunk: response, + chunks: [], + status: 'loading', + responseHeaders: jsonHeaders, + }); + + expect(result).toEqual({ role: 'assistant', - content: 'test', - }, - chunk, - chunks: [], - status: 'loading', - responseHeaders: headers, + content: '', + }); + }); + + it('should handle tool call responses', () => { + const openAIProvider = new OpenAIChatProvider({ + request: XRequest(baseURL, { + manual: true, + }), + }); + + const response = { + choices: [ + { + message: { + role: 'assistant', + content: null, + tool_calls: [ + { + id: 'call_123', + type: 'function', + function: { + name: 'get_weather', + arguments: '{"location": "San Francisco, CA"}', + }, + }, + ], + }, + }, + ], + } as any; + + const result = openAIProvider.transformMessage({ + originMessage: undefined, + chunk: response, + chunks: [], + status: 'loading', + responseHeaders: jsonHeaders, + }); + + expect(result).toEqual({ + role: 'assistant', + content: '', + }); + }); + + it('should handle edge case with malformed choices', () => { + const openAIProvider = new OpenAIChatProvider({ + request: XRequest(baseURL, { + manual: true, + }), + }); + + const response = { + choices: [{ delta: { content: 'valid' } }, { invalid: 'structure' }, null], + } as any; + + const result = openAIProvider.transformMessage({ + originMessage: { role: 'assistant', content: 'start' } as any, + chunk: response, + chunks: [], + status: 'loading', + responseHeaders: jsonHeaders, + }); + + expect(result).toEqual({ + role: 'assistant', + content: 'startvalid', + }); + }); + + it('should handle empty response gracefully', () => { + const openAIProvider = new OpenAIChatProvider({ + request: XRequest(baseURL, { + manual: true, + }), + }); + + const testCases = [ + {}, + { choices: [] }, + { choices: [null] }, + { choices: [{}] }, + { choices: [{ delta: {} }] }, + { choices: [{ message: {} }] }, + ]; + + testCases.forEach((testCase) => { + const result = openAIProvider.transformMessage({ + originMessage: { role: 'assistant', content: 'existing' } as any, + chunk: testCase as any, + chunks: [], + status: 'loading', + responseHeaders: jsonHeaders, + }); + expect(result).toEqual({ role: 'assistant', content: 'existing' }); + }); + }); + }); + + describe('Integration tests', () => { + it('should handle complete conversation flow', () => { + const openAIProvider = new OpenAIChatProvider({ + request: XRequest(baseURL, { + manual: true, + }), + }); + + // Setup messages + const messages = [ + { role: 'system', content: 'You are a helpful assistant.' }, + { role: 'user', content: 'What is the weather like?' }, + ] as any; + openAIProvider.injectGetMessages(() => messages); + + // Test parameter transformation + const params = openAIProvider.transformParams({ temperature: 0.7, max_tokens: 150 } as any, { + params: { model: 'gpt-3.5-turbo' }, + }); + expect(params).toEqual({ + temperature: 0.7, + max_tokens: 150, + model: 'gpt-3.5-turbo', + messages, + }); + + // Test local message extraction + const localMsg = openAIProvider.transformLocalMessage({ messages }); + expect(localMsg).toEqual([ + { role: 'system', content: 'You are a helpful assistant.' }, + { role: 'user', content: 'What is the weather like?' }, + ]); + }); + + it('should handle type safety with generic types', () => { + interface CustomMessage { + role: 'user' | 'assistant' | 'system'; + content: string; + metadata?: Record; + } + + interface CustomParams { + model: string; + temperature?: number; + max_tokens?: number; + messages: CustomMessage[]; + } + + const openAIProvider = new OpenAIChatProvider({ + request: XRequest(baseURL, { + manual: true, + }), + }); + + openAIProvider.injectGetMessages(() => [ + { role: 'user', content: 'test', metadata: { test: true } }, + ]); + + const params = openAIProvider.transformParams({ temperature: 0.8 } as any, { + params: { model: 'gpt-4', messages: [] }, + }); + + expect(params.model).toBe('gpt-4'); + expect(params.temperature).toBe(0.8); + expect(params.messages).toHaveLength(1); }); - expect(openAIMsg).toEqual({ role: 'assistant', content: 'testtest3' }); }); }); @@ -308,10 +855,12 @@ describe('DeepSeekChatProvider test', () => { }, ], }); - expect(openAIMsg).toEqual({ - role: 'user', - content: 'test', - }); + expect(openAIMsg).toEqual([ + { + role: 'user', + content: 'test', + }, + ]); }); it('should transformMessage not throw error', () => { @@ -349,7 +898,7 @@ describe('DeepSeekChatProvider test', () => { originMessage: { role: 'assistant', content: 'test', - }, + } as any, chunk, chunks: [], status: 'loading', @@ -365,7 +914,7 @@ describe('DeepSeekChatProvider test', () => { originMessage: { role: 'assistant', content: 'test', - }, + } as any, chunk, chunks: [], status: 'loading', diff --git a/packages/x-sdk/src/x-chat/index.ts b/packages/x-sdk/src/x-chat/index.ts index 3920e4133..76ba27e98 100644 --- a/packages/x-sdk/src/x-chat/index.ts +++ b/packages/x-sdk/src/x-chat/index.ts @@ -20,14 +20,14 @@ enum MessageStatusEnum { export type MessageStatus = `${MessageStatusEnum}`; -type RequestPlaceholderFn = ( - message: Message, +type RequestPlaceholderFn = ( + requestParams: Partial, info: { messages: Message[] }, ) => Message; -type RequestFallbackFn = ( - message: Message, - info: { error: Error; messages: Message[] }, +type RequestFallbackFn = ( + requestParams: Partial, + info: { error: Error; messages: Message[]; messageInfo: MessageInfo }, ) => Message | Promise; export type RequestParams = { @@ -45,8 +45,8 @@ export interface XChatConfig< defaultMessages?: DefaultMessageInfo[]; /** Convert agent message to bubble usage message type */ parser?: (message: ChatMessage) => BubbleMessage | BubbleMessage[]; - requestPlaceholder?: ChatMessage | RequestPlaceholderFn; - requestFallback?: ChatMessage | RequestFallbackFn; + requestPlaceholder?: ChatMessage | RequestPlaceholderFn; + requestFallback?: ChatMessage | RequestFallbackFn, ChatMessage>; } export interface MessageInfo { @@ -76,6 +76,8 @@ function toArray(item: T | T[]): T[] { return Array.isArray(item) ? item : [item]; } +const IsRequestingMap = new Map(); + export default function useXChat< ChatMessage extends SimpleType = string, ParsedMessage extends SimpleType = ChatMessage, @@ -169,7 +171,10 @@ export default function useXChat< } const { updatingId, reload } = opts || {}; let loadingMsgId: number | string | null | undefined = null; - const message = provider.transformLocalMessage(requestParams); + const localMessage = provider.transformLocalMessage(requestParams); + const messages = (Array.isArray(localMessage) ? localMessage : [localMessage]).map((message) => + createMessage(message, 'local', opts?.extra), + ); if (reload) { loadingMsgId = updatingId; setMessages((ori: MessageInfo[]) => { @@ -178,9 +183,12 @@ export default function useXChat< let placeholderMsg: ChatMessage; if (typeof requestPlaceholder === 'function') { // typescript has bug that not get real return type when use `typeof function` check - placeholderMsg = (requestPlaceholder as RequestPlaceholderFn)(message, { - messages: getFilteredMessages(nextMessages), - }); + placeholderMsg = (requestPlaceholder as RequestPlaceholderFn)( + requestParams, + { + messages: getFilteredMessages(nextMessages), + }, + ); } else { placeholderMsg = requestPlaceholder; } @@ -199,14 +207,17 @@ export default function useXChat< } else { // Add placeholder message setMessages((ori: MessageInfo[]) => { - let nextMessages = [...ori, createMessage(message, 'local', opts?.extra)]; + let nextMessages = [...ori, ...messages]; if (requestPlaceholder) { let placeholderMsg: ChatMessage; if (typeof requestPlaceholder === 'function') { // typescript has bug that not get real return type when use `typeof function` check - placeholderMsg = (requestPlaceholder as RequestPlaceholderFn)(message, { - messages: getFilteredMessages(nextMessages), - }); + placeholderMsg = (requestPlaceholder as RequestPlaceholderFn)( + requestParams, + { + messages: getFilteredMessages(nextMessages), + }, + ); } else { placeholderMsg = requestPlaceholder; } @@ -288,10 +299,12 @@ export default function useXChat< }, onSuccess: (chunks: Output[], headers: Headers) => { setIsRequesting(false); + conversationKey && IsRequestingMap.delete(conversationKey); updateMessage('success', undefined as Output, chunks, headers); }, onError: async (error: Error) => { setIsRequesting(false); + conversationKey && IsRequestingMap.delete(conversationKey); if (requestFallback) { let fallbackMsg: ChatMessage; // Update as error @@ -301,13 +314,14 @@ export default function useXChat< const msg = getMessages().find( (info) => info.id === loadingMsgId || info.id === updatingMsgId, ); - fallbackMsg = await (requestFallback as RequestFallbackFn)( - msg?.message || message, - { - error, - messages, - }, - ); + + fallbackMsg = await ( + requestFallback as RequestFallbackFn, ChatMessage> + )(requestParams, { + error, + messageInfo: msg as MessageInfo, + messages, + }); } else { fallbackMsg = requestFallback; } @@ -334,7 +348,9 @@ export default function useXChat< } }, }); + setIsRequesting(true); + conversationKey && IsRequestingMap.set(conversationKey, true); provider.request.run(provider.transformParams(requestParams, provider.request.options)); }; @@ -375,7 +391,7 @@ export default function useXChat< } requestHandlerRef.current?.abort(); }, - isRequesting, + isRequesting: conversationKey ? IsRequestingMap?.get(conversationKey) || false : isRequesting, onReload, } as const; } diff --git a/packages/x-sdk/src/x-chat/providers/AbstractChatProvider.ts b/packages/x-sdk/src/x-chat/providers/AbstractChatProvider.ts index c1b3d0e8d..4964fb691 100644 --- a/packages/x-sdk/src/x-chat/providers/AbstractChatProvider.ts +++ b/packages/x-sdk/src/x-chat/providers/AbstractChatProvider.ts @@ -46,7 +46,7 @@ export default abstract class AbstractChatProvider { * 将onRequest传入的参数转换为本地(用户发送)的ChatMessage,用于消息渲染 * @param requestParams onRequest传入的参数 */ - abstract transformLocalMessage(requestParams: Partial): ChatMessage; + abstract transformLocalMessage(requestParams: Partial): ChatMessage | ChatMessage[]; /** * 可在更新返回数据时对messages做转换,同时会更新到messages diff --git a/packages/x-sdk/src/x-chat/providers/DeepSeekChatProvider.ts b/packages/x-sdk/src/x-chat/providers/DeepSeekChatProvider.ts index 89bdfbec5..789f03d2b 100644 --- a/packages/x-sdk/src/x-chat/providers/DeepSeekChatProvider.ts +++ b/packages/x-sdk/src/x-chat/providers/DeepSeekChatProvider.ts @@ -22,9 +22,8 @@ export default class DeepSeekChatProvider< } as unknown as Input; } - transformLocalMessage(requestParams: Partial): ChatMessage { - const lastMessage = requestParams?.messages?.[requestParams?.messages?.length - 1]; - return lastMessage as unknown as ChatMessage; + transformLocalMessage(requestParams: Partial): ChatMessage[] { + return (requestParams?.messages || []) as ChatMessage[]; } transformMessage(info: TransformMessage): ChatMessage { diff --git a/packages/x-sdk/src/x-chat/providers/OpenAIChatProvider.ts b/packages/x-sdk/src/x-chat/providers/OpenAIChatProvider.ts index bddbddc0c..9802b9c24 100644 --- a/packages/x-sdk/src/x-chat/providers/OpenAIChatProvider.ts +++ b/packages/x-sdk/src/x-chat/providers/OpenAIChatProvider.ts @@ -22,9 +22,8 @@ export default class OpenAIChatProvider< } as unknown as Input; } - transformLocalMessage(requestParams: Partial): ChatMessage { - const lastMessage = requestParams?.messages?.[requestParams?.messages?.length - 1]; - return lastMessage as unknown as ChatMessage; + transformLocalMessage(requestParams: Partial): ChatMessage[] { + return (requestParams?.messages || []) as ChatMessage[]; } transformMessage(info: TransformMessage): ChatMessage { diff --git a/packages/x/docs/playground/copilot.tsx b/packages/x/docs/playground/copilot.tsx index 16c9d85b0..734c59447 100644 --- a/packages/x/docs/playground/copilot.tsx +++ b/packages/x/docs/playground/copilot.tsx @@ -259,9 +259,8 @@ const Copilot = (props: CopilotProps) => { role: 'assistant', }; }, - requestFallback: (message, info) => { - console.log(message, info); - return message; + requestFallback: (_, { messageInfo }) => { + return messageInfo?.message; }, }); diff --git a/packages/x/docs/playground/independent.tsx b/packages/x/docs/playground/independent.tsx index f63c27785..e39e00fb8 100644 --- a/packages/x/docs/playground/independent.tsx +++ b/packages/x/docs/playground/independent.tsx @@ -533,10 +533,10 @@ const Independent: React.FC = () => { role: 'assistant', }; }, - requestFallback: (e) => { + requestFallback: (_, { messageInfo }) => { return { - ...e, - content: e.content || locale.requestFailedPleaseTryAgain, + ...messageInfo?.message, + content: messageInfo?.message?.content || locale.requestFailedPleaseTryAgain, }; }, }); diff --git a/packages/x/docs/playground/ultramodern.tsx b/packages/x/docs/playground/ultramodern.tsx index 600f0a474..4b2336846 100644 --- a/packages/x/docs/playground/ultramodern.tsx +++ b/packages/x/docs/playground/ultramodern.tsx @@ -287,10 +287,10 @@ const App = () => { role: 'assistant', }; }, - requestFallback: (e) => { + requestFallback: (_, { messageInfo }) => { return { - ...e, - content: e.content || locale.requestFailedPleaseTryAgain, + ...messageInfo?.message, + content: messageInfo?.message.content || locale.requestFailedPleaseTryAgain, }; }, }); diff --git a/packages/x/docs/x-sdk/demos/x-conversations/with-x-chat.tsx b/packages/x/docs/x-sdk/demos/x-conversations/with-x-chat.tsx index ab8e0dd78..d5adcc0b5 100644 --- a/packages/x/docs/x-sdk/demos/x-conversations/with-x-chat.tsx +++ b/packages/x/docs/x-sdk/demos/x-conversations/with-x-chat.tsx @@ -81,13 +81,14 @@ export default () => { role: 'assistant', }; }, - requestFallback: (e) => { + requestFallback: (_, { error }) => { return { - ...e, - content: e.content, + role: 'assistant', + content: error.message, }; }, }); + useEffect(() => { senderRef.current?.clear(); }, [activeConversationKey]); diff --git a/packages/x/docs/x-sdk/use-x-chat.en-US.md b/packages/x/docs/x-sdk/use-x-chat.en-US.md index 2a885352c..99203bcf8 100644 --- a/packages/x/docs/x-sdk/use-x-chat.en-US.md +++ b/packages/x/docs/x-sdk/use-x-chat.en-US.md @@ -44,8 +44,8 @@ type useXChat< | provider | Data provider used to convert data and requests of different structures into formats that useXChat can consume. The platform includes built-in `DefaultChatProvider` and `OpenAIChatProvider`, and you can also implement your own Provider by inheriting `AbstractChatProvider`. See: [Chat Provider Documentation](/x-sdks/chat-provider-en) | AbstractChatProvider\ | - | - | | defaultMessages | Default display messages | { message: ChatMessage, status: MessageStatus }[] | - | - | | parser | Converts ChatMessage into ParsedMessage for consumption. When not set, ChatMessage is consumed directly. Supports converting one ChatMessage into multiple ParsedMessages | (message: ChatMessage) => BubbleMessage \| BubbleMessage[] | - | - | -| requestFallback | Fallback message for failed requests. When not provided, no message will be displayed | ChatMessage \| () => ChatMessage | - | - | -| requestPlaceholder | Placeholder message during requests. When not provided, no message will be displayed | ChatMessage \| () => ChatMessage | - | - | +| requestFallback | Fallback message for failed requests. When not provided, no message will be displayed | ChatMessage \| (requestParams: Partial\,info: { error: Error; messages: ChatMessage[], message: ChatMessage }) => ChatMessage\|Promise\ | - | - | +| requestPlaceholder | Placeholder message during requests. When not provided, no message will be displayed | ChatMessage \| (requestParams: Partial\, info: { error: Error; messages: ChatMessage[], messagesInfo: { id: string \| number; message: ChatMessage; status: MessageStatus; } }) => ChatMessage \|Promise<\Message\> | - | - | ### XChatConfigReturnType diff --git a/packages/x/docs/x-sdk/use-x-chat.zh-CN.md b/packages/x/docs/x-sdk/use-x-chat.zh-CN.md index bbd57c3c2..127bafc5f 100644 --- a/packages/x/docs/x-sdk/use-x-chat.zh-CN.md +++ b/packages/x/docs/x-sdk/use-x-chat.zh-CN.md @@ -44,8 +44,8 @@ type useXChat< | provider | 数据提供方,用于将不同结构的数据及请求转换为useXChat能消费的格式,平台内置了`DefaultChatProvider`和`OpenAIChatProvider`,你也可以通过继承`AbstractChatProvider`实现自己的Provider。详见:[Chat Provider文档](/x-sdks/chat-provider-cn) | AbstractChatProvider\ | - | - | | defaultMessages | 默认展示信息 | { message: ChatMessage ,status: MessageStatus}[] | - | - | | parser | 将 ChatMessage 转换成消费使用的 ParsedMessage,不设置时则直接消费 ChatMessage。支持将一条 ChatMessage 转换成多条 ParsedMessage | (message: ChatMessage) => BubbleMessage \| BubbleMessage[] | - | - | -| requestFallback | 请求失败的兜底信息,不提供则不会展示 | ChatMessage \| () => ChatMessage | - | - | -| requestPlaceholder | 请求中的占位信息,不提供则不会展示 | ChatMessage \| () => ChatMessage | - | - | +| requestFallback | 请求失败的兜底信息,不提供则不会展示 | ChatMessage \| (requestParams: Partial\,info: { error: Error; messages: ChatMessage[], message: ChatMessage }) => ChatMessage\|Promise\ | - | - | +| requestPlaceholder | 请求中的占位信息,不提供则不会展示 | ChatMessage \| (requestParams: Partial\, info: { error: Error; messagesInfo: { id: string \| number; message: ChatMessage; status: MessageStatus; }, message: ChatMessage }) => ChatMessage \|Promise<\Message\>| - | - | ### XChatConfigReturnType