From 161298160b12cb4876caf49a94a3e4d52b5be002 Mon Sep 17 00:00:00 2001 From: JR Boos Date: Wed, 6 Aug 2025 16:26:23 -0400 Subject: [PATCH] Enhance Lightspeed Chatbot functionality and documentation - Added prerequisites and setup instructions for running the backend API services in the README. - Updated the `QueryRequest` interface to include a `no_tools` property. - Modified the `StreamEvent` interface to support a new `tool_call` event type. - Introduced `StreamToolCallData` interface for handling tool call data in streaming responses. - Updated the `useChatbot` hook to manage tool call events and integrate them into the chatbot's state management. - Adjusted API service to handle tool call events in the streaming query response. --- README.md | 7 +++- src/app/LightspeedChatbot/hooks/useChatbot.ts | 39 +++++++++++++------ src/app/LightspeedChatbot/services/api.ts | 6 +++ src/app/LightspeedChatbot/types.ts | 9 ++++- 4 files changed, 47 insertions(+), 14 deletions(-) diff --git a/README.md b/README.md index 0c0355f..ffe54dc 100644 --- a/README.md +++ b/README.md @@ -15,6 +15,10 @@ A reference implementation of a chatbot interface built with React, TypeScript, ## 🚀 Quick Start +**Prerequisites**: Ensure the [lightspeed-stack](https://github.com/lightspeed-core/lightspeed-stack) is running to provide the backend API services. + +If you need help getting `lightspeed-stack` running follow this [guide](https://github.com/lightspeed-core/lightspeed-stack/blob/main/docs/getting_started.md). + ```bash git clone https://github.com/your-org/lightspeed-reference-ui cd lightspeed-reference-ui @@ -166,6 +170,7 @@ Body: { Response: Server-Sent Events stream with events: - start: { conversation_id: string } - token: { id: number, role: string, token: string } +- tool_call: { id: number, role: string, token: string | Record } - end: { referenced_documents: any[], truncated: any, input_tokens: number, output_tokens: number } ``` @@ -245,6 +250,6 @@ This project is licensed under the MIT License - see the [LICENSE](LICENSE) file ## 🆘 Support If you encounter any issues or have questions: -- Check the [Issues](https://github.com/your-org/lightspeed-reference-ui/issues) page +- Check the [Issues](https://github.com/lightspeed-core/lightspeed-reference-ui/issues) page - Review the component documentation in `src/app/LightspeedChatbot/README.md` - Refer to the [PatternFly documentation](https://www.patternfly.org/get-started/develop) for UI components diff --git a/src/app/LightspeedChatbot/hooks/useChatbot.ts b/src/app/LightspeedChatbot/hooks/useChatbot.ts index 11b4a8c..7892f14 100644 --- a/src/app/LightspeedChatbot/hooks/useChatbot.ts +++ b/src/app/LightspeedChatbot/hooks/useChatbot.ts @@ -4,7 +4,7 @@ import { MessageProps } from '@patternfly/chatbot/dist/dynamic/Message'; import { Conversation } from '@patternfly/chatbot/dist/dynamic/ChatbotConversationHistoryNav'; import { DropEvent, DropdownItem, DropdownList } from '@patternfly/react-core'; -import { Model, QueryRequest, StreamTokenData, StreamEndData, ConversationResponse } from '../types'; +import { Model, QueryRequest, StreamTokenData, StreamEndData, ConversationResponse, StreamToolCallData } from '../types'; import { INITIAL_MESSAGES, INITIAL_CONVERSATIONS, USER_AVATAR, BOT_AVATAR, DEFAULT_SYSTEM_PROMPT } from '../constants'; import { fetchModels, sendStreamingQuery, fetchConversation, deleteConversation } from '../services/api'; import { generateId, findMatchingItems, copyToClipboard } from '../utils/helpers'; @@ -41,7 +41,7 @@ export const useChatbot = () => { // Set first LLM model as default const defaultModel = models.find((model) => model.api_model_type === 'llm'); if (defaultModel) { - setSelectedModel(defaultModel.identifier); + setSelectedModel(defaultModel.provider_resource_id); setSelectedProvider(defaultModel.provider_id); } }; @@ -148,7 +148,12 @@ export const useChatbot = () => { // Selection handlers const onSelectModel = (_event?: React.MouseEvent, value?: string | number) => { - setSelectedModel(value as string); + const selectedIdentifier = value as string; + const selectedModelData = availableModels.find(model => model.identifier === selectedIdentifier); + if (selectedModelData) { + setSelectedModel(selectedModelData.provider_resource_id); + setSelectedProvider(selectedModelData.provider_id); + } }; const onSelectDisplayMode = (_event?: React.MouseEvent, value?: string | number) => { @@ -328,6 +333,7 @@ export const useChatbot = () => { conversation_id: currentConversationId || undefined, model: selectedModel || undefined, provider: selectedProvider || undefined, + no_tools: false, system_prompt: DEFAULT_SYSTEM_PROMPT, attachments: attachedFiles.length > 0 && fileContents.length > 0 @@ -347,15 +353,7 @@ export const useChatbot = () => { queryRequest, // onToken callback (token: string, tokenData?: StreamTokenData) => { - if (tokenData && tokenData.role === 'tool_execution') { - currentToolExecutions.push(token); - setToolExecutions((prev) => ({ - ...prev, - [botMessageId]: [...currentToolExecutions], - })); - } else { - streamingContent += token; - } + streamingContent += token; setMessages((prevMessages) => { const updatedMessages = [...prevMessages]; @@ -407,6 +405,23 @@ export const useChatbot = () => { }); setAnnouncement(`Message from Lightspeed AI: ${streamingContent}`); }, + // onToolCall callback + (toolCallData: StreamToolCallData) => { + console.log('toolCallData', toolCallData); + if ( + typeof toolCallData.token === 'object' && + toolCallData.token !== null && + typeof toolCallData.token.tool_name === 'string' && + typeof toolCallData.token.arguments === 'object' + ) { + const { tool_name, arguments: toolArgs } = toolCallData.token; + currentToolExecutions.push(`${tool_name}(${JSON.stringify(toolArgs)})`); + } + setToolExecutions((prev) => ({ + ...prev, + [botMessageId]: [...currentToolExecutions], + })); + }, ); } catch (error) { console.error('Error sending streaming query:', error); diff --git a/src/app/LightspeedChatbot/services/api.ts b/src/app/LightspeedChatbot/services/api.ts index a77f69e..6cb681e 100644 --- a/src/app/LightspeedChatbot/services/api.ts +++ b/src/app/LightspeedChatbot/services/api.ts @@ -6,6 +6,7 @@ import { StreamEvent, StreamStartData, StreamTokenData, + StreamToolCallData, StreamEndData, ConversationResponse, } from '../types'; @@ -79,6 +80,7 @@ export const sendStreamingQuery = async ( onToken: (token: string, tokenData?: StreamTokenData) => void, onStart: (conversationId: string) => void, onEnd: (endData: StreamEndData) => void, + onToolCall: (toolCallData: StreamToolCallData) => void, ): Promise => { try { const response = await fetch(`${API_BASE_URL}/v1/streaming_query`, { @@ -117,6 +119,10 @@ export const sendStreamingQuery = async ( const startData = eventData.data as StreamStartData; onStart(startData.conversation_id); break; + case 'tool_call': + const toolCallData = eventData.data as StreamToolCallData; + onToolCall(toolCallData); + break; case 'token': const tokenData = eventData.data as StreamTokenData; onToken(tokenData.token, tokenData); diff --git a/src/app/LightspeedChatbot/types.ts b/src/app/LightspeedChatbot/types.ts index c1537cf..b145f2d 100644 --- a/src/app/LightspeedChatbot/types.ts +++ b/src/app/LightspeedChatbot/types.ts @@ -14,6 +14,7 @@ export interface QueryRequest { conversation_id?: string; provider?: string; model?: string; + no_tools?: boolean; system_prompt?: string; attachments?: Array<{ attachment_type: string; @@ -42,7 +43,7 @@ export interface ConversationResponse { // Streaming types export interface StreamEvent { - event: 'start' | 'token' | 'end'; + event: 'start' | 'token' | 'tool_call' | 'end'; data: any; } @@ -56,6 +57,12 @@ export interface StreamTokenData { token: string; } +export interface StreamToolCallData { + id: number; + role: string; + token: string | Record; +} + export interface StreamEndData { referenced_documents: Array<{ doc_url: string;