Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 7 additions & 0 deletions workspaces/lightspeed/.changeset/smart-turkeys-watch.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
---
'@red-hat-developer-hub/backstage-plugin-lightspeed': patch
---

Fixed "new chat" cta behavior
Added vertical scroll when too many models are available
Removed model grouping/categories in the model selector dropdown
Original file line number Diff line number Diff line change
Expand Up @@ -345,11 +345,14 @@ export const verifyNoResultsFoundMessage = async (
page: Page,
translations: LightspeedMessages,
) => {
await expect(page.getByLabel(translations['button.newChat']))
.toMatchAriaSnapshot(`
- heading "${translations['chatbox.emptyState.noResults.title']}"
- text: ${translations['chatbox.emptyState.noResults.body']}
`);
await expect(
page.getByRole('heading', {
name: translations['chatbox.emptyState.noResults.title'],
}),
).toBeVisible();
await expect(
page.getByText(translations['chatbox.emptyState.noResults.body']),
).toBeVisible();
};

export const verifyChatUnpinned = async (
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -225,8 +225,11 @@ export const LightspeedChat = ({
const { allowed: hasUpdateAccess } = useLightspeedUpdatePermission();
const samplePrompts = useWelcomePrompts();
useEffect(() => {
if (user && lastOpenedId === null && isReady) {
if (!user || !isReady) return;
if (lastOpenedId === null) {
setConversationId(TEMP_CONVERSATION_ID);
}
if (lastOpenedId === TEMP_CONVERSATION_ID || lastOpenedId === null) {
setNewChatCreated(true);
}
}, [user, isReady, lastOpenedId, setConversationId]);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
* limitations under the License.
*/

import { Ref, useMemo, useState } from 'react';
import { Ref, useState } from 'react';

import { createStyles, makeStyles } from '@material-ui/core';
import ToggleOffOutlinedIcon from '@mui/icons-material/ToggleOffOutlined';
Expand Down Expand Up @@ -89,22 +89,12 @@ export const LightspeedChatBoxHeader = ({

const styles = useStyles();

// Group models by provider
const groupedModels = useMemo(() => {
const groups: {
[key: string]: { label: string; value: string; provider: string }[];
} = {};

models.forEach(model => {
const provider = model.provider || t('chatbox.provider.other');
if (!groups[provider]) {
groups[provider] = [];
}
groups[provider].push(model);
});

return groups;
}, [models, t]);
const maxLabelLength = Math.max(
...models.map(m => m.label.length),
selectedModel.length,
1,
);
const toggleMinWidth = `${maxLabelLength + 4}ch`;

const toggle = (toggleRef: Ref<MenuToggleElement>) => (
<MenuToggle
Expand All @@ -115,6 +105,7 @@ export const LightspeedChatBoxHeader = ({
isExpanded={isOptionsMenuOpen}
isDisabled={isModelSelectorDisabled}
onClick={() => setIsOptionsMenuOpen(!isOptionsMenuOpen)}
style={{ minWidth: toggleMinWidth }}
>
{selectedModel}
</MenuToggle>
Expand All @@ -136,6 +127,10 @@ export const LightspeedChatBoxHeader = ({
setDisplayMode(ChatbotDisplayMode.default);
};

const isOverlayMode = displayMode === ChatbotDisplayMode.default;
const scrollThreshold = isOverlayMode ? 8 : 10;
const isModelDropdownScrollable = models.length > scrollThreshold;

return (
<ChatbotHeaderActions>
<Dropdown
Expand All @@ -150,29 +145,21 @@ export const LightspeedChatBoxHeader = ({
shouldFocusToggleOnSelect
shouldFocusFirstItemOnOpen={false}
toggle={toggle}
isScrollable={isModelDropdownScrollable}
maxMenuHeight={isModelDropdownScrollable ? '240px' : undefined}
>
<DropdownList>
{Object.entries(groupedModels).map(
([provider, providerModels], index) => (
<>
<DropdownGroup
className={styles.groupTitle}
key={provider}
label={provider}
labelHeadingLevel="h1"
>
{providerModels.map(model => (
<DropdownItem value={model.value} key={model.value}>
{model.label}
</DropdownItem>
))}
</DropdownGroup>
{index < Object.entries(groupedModels).length - 1 && (
<Divider component="li" />
)}
</>
),
)}
{models.map(model => (
<DropdownGroup className={styles.groupTitle} key={model.label}>
<DropdownItem
value={model.value}
key={model.value}
isSelected={selectedModel === model.value}
>
{model.label}
</DropdownItem>
</DropdownGroup>
))}
Comment on lines +152 to +162

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Action required

1. Unkeyed fragment in models map 🐞 Bug ✓ Correctness

The model dropdown renders a list via models.map(...) but returns an unkeyed fragment
(<>...</>), so React cannot correctly reconcile items. Additionally, the only provided key is on
an inner DropdownGroup and uses model.label, which may be non-unique, increasing the chance of
incorrect selection/rendering when models change.
Agent Prompt
## Issue description
The model dropdown renders `models.map(...)` items using an unkeyed fragment (`<>...</>`). React keys must be applied to the *top-level* element returned by the map, otherwise reconciliation can misbehave and warnings are emitted. Also, `key={model.label}` may not be unique.

## Issue Context
This is in the model selector dropdown list rendering.

## Fix Focus Areas
- workspaces/lightspeed/plugins/lightspeed/src/components/LightspeedChatBoxHeader.tsx[143-156]

## Suggested change
- Replace the `<>...</>` wrapper with either:
  - a single `DropdownItem` directly (recommended), with `key={model.value}`; or
  - `<Fragment key={model.value}>...</Fragment>` (and remove/avoid conflicting nested keys).
- Avoid using `model.label` for keys; use `model.value`.

ⓘ Copy this prompt and use it to remediate the issue with your preferred AI generation tools

</DropdownList>
</Dropdown>
<ChatbotHeaderOptionsDropdown
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
* limitations under the License.
*/

import React from 'react';
import { RefObject, useCallback, useEffect, useRef, useState } from 'react';

import { useApi } from '@backstage/core-plugin-api';

Expand Down Expand Up @@ -75,7 +75,7 @@ export type UseConversationMessagesReturn = {
attachments?: Attachment[],
) => Promise<void>;
conversations: Conversations;
scrollToBottomRef: React.RefObject<ScrollContainerHandle | null>;
scrollToBottomRef: RefObject<ScrollContainerHandle | null>;
data?: BaseMessage[] | undefined;
error: Error | null;
isPending: boolean;
Expand Down Expand Up @@ -105,25 +105,25 @@ export const useConversationMessages = (
onStart?: (conversation_id: string) => void,
): UseConversationMessagesReturn => {
const { mutateAsync: createMessage } = useCreateConversationMessage();
const scrollToBottomRef = React.useRef<ScrollContainerHandle>(null);
const scrollToBottomRef = useRef<ScrollContainerHandle>(null);

const [currentConversation, setCurrentConversation] =
React.useState(conversationId);
const [conversations, setConversations] = React.useState<Conversations>({
useState(conversationId);
const [conversations, setConversations] = useState<Conversations>({
[currentConversation]: [],
});
const streamingConversations = React.useRef<Conversations>({
const streamingConversations = useRef<Conversations>({
[currentConversation]: [],
});

// Track pending tool calls during streaming
const pendingToolCalls = React.useRef<{ [id: number]: ToolCall }>({});
const pendingToolCalls = useRef<{ [id: number]: ToolCall }>({});

// Cache tool calls by conversation ID and message index to persist across refetches
// Key format: `${conversationId}-${messageIndex}`
const toolCallsCache = React.useRef<{ [key: string]: ToolCall[] }>({});
const toolCallsCache = useRef<{ [key: string]: ToolCall[] }>({});

React.useEffect(() => {
useEffect(() => {
if (currentConversation !== conversationId) {
setCurrentConversation(conversationId);
setConversations(prev => {
Expand All @@ -140,7 +140,7 @@ export const useConversationMessages = (
const { data: conversationsData = [], ...queryProps } =
useFetchConversationMessages(currentConversation);

React.useEffect(() => {
useEffect(() => {
if (
!Array.isArray(conversationsData) ||
(conversationsData.length === 0 &&
Expand Down Expand Up @@ -212,7 +212,7 @@ export const useConversationMessages = (
streamingConversations,
]);

const handleInputPrompt = React.useCallback(
const handleInputPrompt = useCallback(
async (prompt: string, attachments: Attachment[] = []) => {
let newConversationId = '';

Expand Down