From db4e4acca0f79d9ebf8230193bb5fd5ec47706c7 Mon Sep 17 00:00:00 2001 From: Brooklyn Nicholson Date: Sun, 26 Apr 2026 01:47:05 -0500 Subject: [PATCH 01/87] perf(tui): stabilize long-session scrolling --- ui-tui/src/__tests__/textInputWrap.test.ts | 15 +++++- ui-tui/src/__tests__/viewportStore.test.ts | 31 +++++++++++ ui-tui/src/app/useMainApp.ts | 7 ++- ui-tui/src/components/appChrome.tsx | 55 ++----------------- ui-tui/src/components/appLayout.tsx | 9 ++-- ui-tui/src/components/textInput.tsx | 45 +--------------- ui-tui/src/hooks/useVirtualHistory.ts | 16 ++++-- ui-tui/src/lib/inputMetrics.ts | 62 ++++++++++++++++++++++ ui-tui/src/lib/viewportStore.ts | 59 ++++++++++++++++++++ ui-tui/src/types/hermes-ink.d.ts | 1 + 10 files changed, 195 insertions(+), 105 deletions(-) create mode 100644 ui-tui/src/__tests__/viewportStore.test.ts create mode 100644 ui-tui/src/lib/inputMetrics.ts create mode 100644 ui-tui/src/lib/viewportStore.ts diff --git a/ui-tui/src/__tests__/textInputWrap.test.ts b/ui-tui/src/__tests__/textInputWrap.test.ts index 9414b9fb..170f6883 100644 --- a/ui-tui/src/__tests__/textInputWrap.test.ts +++ b/ui-tui/src/__tests__/textInputWrap.test.ts @@ -1,6 +1,7 @@ import { describe, expect, it } from 'vitest' -import { cursorLayout, offsetFromPosition } from '../components/textInput.js' +import { offsetFromPosition } from '../components/textInput.js' +import { cursorLayout, inputVisualHeight, stableComposerColumns } from '../lib/inputMetrics.js' describe('cursorLayout — char-wrap parity with wrap-ansi', () => { it('places cursor mid-line at its column', () => { @@ -35,6 +36,18 @@ describe('cursorLayout — char-wrap parity with wrap-ansi', () => { }) }) +describe('input metrics helpers', () => { + it('computes visual height from the wrapped cursor line', () => { + expect(inputVisualHeight('abcdefgh', 8)).toBe(2) + expect(inputVisualHeight('one\ntwo', 40)).toBe(2) + }) + + it('reserves a stable transcript scrollbar gutter for composer width', () => { + expect(stableComposerColumns(100, 3)).toBe(93) + expect(stableComposerColumns(10, 3)).toBe(20) + }) +}) + describe('offsetFromPosition — char-wrap inverse of cursorLayout', () => { it('returns 0 for empty input', () => { expect(offsetFromPosition('', 0, 0, 10)).toBe(0) diff --git a/ui-tui/src/__tests__/viewportStore.test.ts b/ui-tui/src/__tests__/viewportStore.test.ts new file mode 100644 index 00000000..671ef9cf --- /dev/null +++ b/ui-tui/src/__tests__/viewportStore.test.ts @@ -0,0 +1,31 @@ +import { describe, expect, it } from 'vitest' + +import { getViewportSnapshot, viewportSnapshotKey } from '../lib/viewportStore.js' + +describe('viewportStore', () => { + it('normalizes absent scroll handles', () => { + expect(getViewportSnapshot(null)).toEqual({ + atBottom: true, + bottom: 0, + pending: 0, + scrollHeight: 0, + top: 0, + viewportHeight: 0 + }) + }) + + it('includes pending scroll delta in snapshot math and keying', () => { + const handle = { + getPendingDelta: () => 3, + getScrollHeight: () => 40, + getScrollTop: () => 10, + getViewportHeight: () => 5, + isSticky: () => false + } + + const snap = getViewportSnapshot(handle as any) + + expect(snap).toMatchObject({ atBottom: false, bottom: 18, pending: 3, scrollHeight: 40, top: 13, viewportHeight: 5 }) + expect(viewportSnapshotKey(snap)).toBe('0:13:5:40:3') + }) +}) diff --git a/ui-tui/src/app/useMainApp.ts b/ui-tui/src/app/useMainApp.ts index 0230e0b1..31f228eb 100644 --- a/ui-tui/src/app/useMainApp.ts +++ b/ui-tui/src/app/useMainApp.ts @@ -19,6 +19,7 @@ import { useVirtualHistory } from '../hooks/useVirtualHistory.js' import { asRpcResult, rpcErrorMessage } from '../lib/rpc.js' import { terminalParityHints } from '../lib/terminalParity.js' import { buildToolTrailLine, sameToolTrailGroup, toolTrailLabel } from '../lib/text.js' +import { getViewportSnapshot } from '../lib/viewportStore.js' import type { Msg, PanelSection, SlashCatalog } from '../types.js' import { createGatewayEventHandler } from './createGatewayEventHandler.js' @@ -689,11 +690,9 @@ export function useMainApp(gw: GatewayClient) { return true } - const top = Math.max(0, s.getScrollTop() + s.getPendingDelta()) - const vp = Math.max(0, s.getViewportHeight()) - const total = Math.max(vp, s.getScrollHeight()) + const { bottom, scrollHeight } = getViewportSnapshot(s) - return top + vp >= total - 3 + return bottom >= scrollHeight - 3 })() const liveProgress = useMemo( diff --git a/ui-tui/src/components/appChrome.tsx b/ui-tui/src/components/appChrome.tsx index 001c89b9..6085df8a 100644 --- a/ui-tui/src/components/appChrome.tsx +++ b/ui-tui/src/components/appChrome.tsx @@ -1,6 +1,6 @@ import { Box, type ScrollBoxHandle, Text } from '@hermes/ink' import { useStore } from '@nanostores/react' -import { type ReactNode, type RefObject, useCallback, useEffect, useMemo, useState, useSyncExternalStore } from 'react' +import { type ReactNode, type RefObject, useEffect, useMemo, useState } from 'react' import { $delegationState } from '../app/delegationStore.js' import { $turnState } from '../app/turnStore.js' @@ -9,6 +9,7 @@ import { VERBS } from '../content/verbs.js' import { fmtDuration } from '../domain/messages.js' import { stickyPromptFromViewport } from '../domain/viewport.js' import { buildSubagentTree, treeTotals, widthByDepth } from '../lib/subagentTree.js' +import { useViewportSnapshot } from '../lib/viewportStore.js' import { fmtK } from '../lib/text.js' import type { Theme } from '../theme.js' import type { Msg, Usage } from '../types.js' @@ -255,17 +256,7 @@ export function FloatBox({ children, color }: { children: ReactNode; color: stri } export function StickyPromptTracker({ messages, offsets, scrollRef, onChange }: StickyPromptTrackerProps) { - useSyncExternalStore( - useCallback((cb: () => void) => scrollRef.current?.subscribe(cb) ?? (() => {}), [scrollRef]), - () => { - const { atBottom, top } = getStickyViewport(scrollRef.current) - - return atBottom ? -1 - top : top - }, - () => NaN - ) - - const { atBottom, bottom, top } = getStickyViewport(scrollRef.current) + const { atBottom, bottom, top } = useViewportSnapshot(scrollRef) const text = stickyPromptFromViewport(messages, offsets, top, bottom, atBottom) useEffect(() => onChange(text), [onChange, text]) @@ -274,42 +265,18 @@ export function StickyPromptTracker({ messages, offsets, scrollRef, onChange }: } export function TranscriptScrollbar({ scrollRef, t }: TranscriptScrollbarProps) { - useSyncExternalStore( - useCallback((cb: () => void) => scrollRef.current?.subscribe(cb) ?? (() => {}), [scrollRef]), - () => { - const s = scrollRef.current - - if (!s) { - return NaN - } - - const vp = Math.max(0, s.getViewportHeight()) - const total = Math.max(vp, s.getScrollHeight()) - const top = Math.max(0, s.getScrollTop() + s.getPendingDelta()) - const thumb = total > vp ? Math.max(1, Math.round((vp * vp) / total)) : vp - const travel = Math.max(1, vp - thumb) - const thumbTop = total > vp ? Math.round((top / Math.max(1, total - vp)) * travel) : 0 - - return `${thumbTop}:${thumb}:${vp}` - }, - () => '' - ) - const [hover, setHover] = useState(false) const [grab, setGrab] = useState(null) - - const s = scrollRef.current - const vp = Math.max(0, s?.getViewportHeight() ?? 0) + const { scrollHeight: total, top: pos, viewportHeight: vp } = useViewportSnapshot(scrollRef) if (!vp) { return } - const total = Math.max(vp, s?.getScrollHeight() ?? vp) + const s = scrollRef.current const scrollable = total > vp const thumb = scrollable ? Math.max(1, Math.round((vp * vp) / total)) : vp const travel = Math.max(1, vp - thumb) - const pos = Math.max(0, (s?.getScrollTop() ?? 0) + (s?.getPendingDelta() ?? 0)) const thumbTop = scrollable ? Math.round((pos / Math.max(1, total - vp)) * travel) : 0 const thumbColor = grab !== null ? t.color.gold : hover ? t.color.amber : t.color.bronze const trackColor = hover ? t.color.bronze : t.color.dim @@ -391,15 +358,3 @@ interface TranscriptScrollbarProps { scrollRef: RefObject t: Theme } - -function getStickyViewport(s?: ScrollBoxHandle | null) { - const top = Math.max(0, (s?.getScrollTop() ?? 0) + (s?.getPendingDelta() ?? 0)) - const vp = Math.max(0, s?.getViewportHeight() ?? 0) - const total = Math.max(vp, s?.getScrollHeight() ?? vp) - - return { - atBottom: (s?.isSticky() ?? true) || top + vp >= total - 2, - bottom: top + vp, - top - } -} diff --git a/ui-tui/src/components/appLayout.tsx b/ui-tui/src/components/appLayout.tsx index d8564517..170d0649 100644 --- a/ui-tui/src/components/appLayout.tsx +++ b/ui-tui/src/components/appLayout.tsx @@ -7,6 +7,7 @@ import type { AppLayoutProgressProps, AppLayoutProps } from '../app/interfaces.j import { $isBlocked, $overlayState, patchOverlayState } from '../app/overlayStore.js' import { $uiState } from '../app/uiStore.js' import { PLACEHOLDER } from '../content/placeholders.js' +import { inputVisualHeight, stableComposerColumns } from '../lib/inputMetrics.js' import type { Theme } from '../theme.js' import type { DetailsMode, SectionVisibility } from '../types.js' @@ -171,6 +172,8 @@ const ComposerPane = memo(function ComposerPane({ const isBlocked = useStore($isBlocked) const sh = (composer.inputBuf[0] ?? composer.input).startsWith('!') const pw = sh ? 2 : 3 + const inputColumns = stableComposerColumns(composer.cols, pw) + const inputHeight = inputVisualHeight(composer.input, inputColumns) return ( @@ -232,10 +235,10 @@ const ComposerPane = memo(function ComposerPane({ )} - - {/* subtract NoSelect paddingX={1} (2 cols) + pw so wrap-ansi and cursorLayout agree */} + + {/* Reserve the transcript scrollbar gutter too so typing never rewraps when the scrollbar column repaints. */} actually renders -export function cursorLayout(value: string, cursor: number, cols: number) { - const pos = Math.max(0, Math.min(cursor, value.length)) - const w = Math.max(1, cols) - - let col = 0, - line = 0 - - for (const { segment, index } of seg().segment(value)) { - if (index >= pos) { - break - } - - if (segment === '\n') { - line++ - col = 0 - - continue - } - - const sw = stringWidth(segment) - - if (!sw) { - continue - } - - if (col + sw > w) { - line++ - col = 0 - } - - col += sw - } - - // trailing cursor-cell overflows to the next row at the wrap column - if (col >= w) { - line++ - col = 0 - } - - return { column: col, line } -} - export function offsetFromPosition(value: string, row: number, col: number, cols: number) { if (!value.length) { return 0 diff --git a/ui-tui/src/hooks/useVirtualHistory.ts b/ui-tui/src/hooks/useVirtualHistory.ts index 17bc8dfd..388b5e5a 100644 --- a/ui-tui/src/hooks/useVirtualHistory.ts +++ b/ui-tui/src/hooks/useVirtualHistory.ts @@ -167,8 +167,20 @@ export function useVirtualHistory( }, []) useLayoutEffect(() => { + const s = scrollRef.current let dirty = false + // Give the renderer the mounted-row coverage for passive scroll clamping. + // Without this, burst wheel/page scroll can race past the React commit that + // updates the virtual range and paint spacer-only frames. + if (s && n > 0 && vp > 0) { + const min = offsets[start] ?? 0 + const max = Math.max(min, (offsets[end] ?? total) - vp) + s.setClampBounds(min, max) + } else { + s?.setClampBounds(undefined, undefined) + } + if (skipMeasurement.current) { skipMeasurement.current = false } else { @@ -188,8 +200,6 @@ export function useVirtualHistory( } } - const s = scrollRef.current - if (s) { const next = { sticky: s.isSticky(), @@ -210,7 +220,7 @@ export function useVirtualHistory( if (dirty) { setVer(v => v + 1) } - }, [end, hasScrollRef, items, scrollRef, start]) + }, [end, hasScrollRef, items, n, offsets, scrollRef, start, total, vp]) return { bottomSpacer: Math.max(0, total - (offsets[end] ?? total)), diff --git a/ui-tui/src/lib/inputMetrics.ts b/ui-tui/src/lib/inputMetrics.ts new file mode 100644 index 00000000..a42dbb2f --- /dev/null +++ b/ui-tui/src/lib/inputMetrics.ts @@ -0,0 +1,62 @@ +import { stringWidth } from '@hermes/ink' + +let _seg: Intl.Segmenter | null = null +const seg = () => (_seg ??= new Intl.Segmenter(undefined, { granularity: 'grapheme' })) + +/** + * Mirrors the char-wrap behavior used by the composer TextInput. + * Returns the zero-based visual line and column of the cursor cell. + */ +export function cursorLayout(value: string, cursor: number, cols: number) { + const pos = Math.max(0, Math.min(cursor, value.length)) + const w = Math.max(1, cols) + + let col = 0, + line = 0 + + for (const { segment, index } of seg().segment(value)) { + if (index >= pos) { + break + } + + if (segment === '\n') { + line++ + col = 0 + + continue + } + + const sw = stringWidth(segment) + + if (!sw) { + continue + } + + if (col + sw > w) { + line++ + col = 0 + } + + col += sw + } + + // trailing cursor-cell overflows to the next row at the wrap column + if (col >= w) { + line++ + col = 0 + } + + return { column: col, line } +} + +export function inputVisualHeight(value: string, columns: number) { + return cursorLayout(value, value.length, columns).line + 1 +} + +export function stableComposerColumns(totalCols: number, promptWidth: number) { + // totalCols is the terminal width. Reserve: + // - outer composer paddingX={1}: 2 columns + // - transcript scrollbar gutter + marginLeft: 2 columns + // - prompt prefix width + return Math.max(20, totalCols - promptWidth - 4) +} diff --git a/ui-tui/src/lib/viewportStore.ts b/ui-tui/src/lib/viewportStore.ts new file mode 100644 index 00000000..30028454 --- /dev/null +++ b/ui-tui/src/lib/viewportStore.ts @@ -0,0 +1,59 @@ +import type { RefObject } from 'react' +import { useCallback, useSyncExternalStore } from 'react' + +import type { ScrollBoxHandle } from '@hermes/ink' + +export interface ViewportSnapshot { + atBottom: boolean + bottom: number + pending: number + scrollHeight: number + top: number + viewportHeight: number +} + +const EMPTY: ViewportSnapshot = { + atBottom: true, + bottom: 0, + pending: 0, + scrollHeight: 0, + top: 0, + viewportHeight: 0 +} + +export function getViewportSnapshot(s?: ScrollBoxHandle | null): ViewportSnapshot { + if (!s) { + return EMPTY + } + + const pending = s.getPendingDelta() + const top = Math.max(0, s.getScrollTop() + pending) + const viewportHeight = Math.max(0, s.getViewportHeight()) + const scrollHeight = Math.max(viewportHeight, s.getScrollHeight()) + const bottom = top + viewportHeight + + return { + atBottom: s.isSticky() || bottom >= scrollHeight - 2, + bottom, + pending, + scrollHeight, + top, + viewportHeight + } +} + +export function viewportSnapshotKey(v: ViewportSnapshot) { + return `${v.atBottom ? 1 : 0}:${v.top}:${v.viewportHeight}:${v.scrollHeight}:${v.pending}` +} + +export function useViewportSnapshot(scrollRef: RefObject): ViewportSnapshot { + const key = useSyncExternalStore( + useCallback((cb: () => void) => scrollRef.current?.subscribe(cb) ?? (() => {}), [scrollRef]), + () => viewportSnapshotKey(getViewportSnapshot(scrollRef.current)), + () => viewportSnapshotKey(EMPTY) + ) + + void key + + return getViewportSnapshot(scrollRef.current) +} diff --git a/ui-tui/src/types/hermes-ink.d.ts b/ui-tui/src/types/hermes-ink.d.ts index 507be85a..344833ba 100644 --- a/ui-tui/src/types/hermes-ink.d.ts +++ b/ui-tui/src/types/hermes-ink.d.ts @@ -59,6 +59,7 @@ declare module '@hermes/ink' { readonly getViewportTop: () => number readonly isSticky: () => boolean readonly subscribe: (listener: () => void) => () => void + readonly setClampBounds: (min: number | undefined, max: number | undefined) => void } export const Box: React.ComponentType From 14fcff60c93d8c2564f6c859a4a76beaf1da6515 Mon Sep 17 00:00:00 2001 From: Brooklyn Nicholson Date: Sun, 26 Apr 2026 01:48:10 -0500 Subject: [PATCH 02/87] style(tui): apply formatter --- ui-tui/src/__tests__/createSlashHandler.test.ts | 4 +--- ui-tui/src/__tests__/viewportStore.test.ts | 9 ++++++++- ui-tui/src/components/appChrome.tsx | 2 +- ui-tui/src/lib/viewportStore.ts | 3 +-- 4 files changed, 11 insertions(+), 7 deletions(-) diff --git a/ui-tui/src/__tests__/createSlashHandler.test.ts b/ui-tui/src/__tests__/createSlashHandler.test.ts index 4bd35031..5d63d0ad 100644 --- a/ui-tui/src/__tests__/createSlashHandler.test.ts +++ b/ui-tui/src/__tests__/createSlashHandler.test.ts @@ -311,9 +311,7 @@ describe('createSlashHandler', () => { expect(rpc).toHaveBeenCalledWith('session.save', { session_id: 'sid-abc' }) await vi.waitFor(() => { - expect(ctx.transcript.sys).toHaveBeenCalledWith( - 'conversation saved to: /tmp/hermes_conversation_test.json' - ) + expect(ctx.transcript.sys).toHaveBeenCalledWith('conversation saved to: /tmp/hermes_conversation_test.json') }) }) diff --git a/ui-tui/src/__tests__/viewportStore.test.ts b/ui-tui/src/__tests__/viewportStore.test.ts index 671ef9cf..1b3a67a9 100644 --- a/ui-tui/src/__tests__/viewportStore.test.ts +++ b/ui-tui/src/__tests__/viewportStore.test.ts @@ -25,7 +25,14 @@ describe('viewportStore', () => { const snap = getViewportSnapshot(handle as any) - expect(snap).toMatchObject({ atBottom: false, bottom: 18, pending: 3, scrollHeight: 40, top: 13, viewportHeight: 5 }) + expect(snap).toMatchObject({ + atBottom: false, + bottom: 18, + pending: 3, + scrollHeight: 40, + top: 13, + viewportHeight: 5 + }) expect(viewportSnapshotKey(snap)).toBe('0:13:5:40:3') }) }) diff --git a/ui-tui/src/components/appChrome.tsx b/ui-tui/src/components/appChrome.tsx index 6085df8a..f03e0f5a 100644 --- a/ui-tui/src/components/appChrome.tsx +++ b/ui-tui/src/components/appChrome.tsx @@ -9,8 +9,8 @@ import { VERBS } from '../content/verbs.js' import { fmtDuration } from '../domain/messages.js' import { stickyPromptFromViewport } from '../domain/viewport.js' import { buildSubagentTree, treeTotals, widthByDepth } from '../lib/subagentTree.js' -import { useViewportSnapshot } from '../lib/viewportStore.js' import { fmtK } from '../lib/text.js' +import { useViewportSnapshot } from '../lib/viewportStore.js' import type { Theme } from '../theme.js' import type { Msg, Usage } from '../types.js' diff --git a/ui-tui/src/lib/viewportStore.ts b/ui-tui/src/lib/viewportStore.ts index 30028454..298d094b 100644 --- a/ui-tui/src/lib/viewportStore.ts +++ b/ui-tui/src/lib/viewportStore.ts @@ -1,8 +1,7 @@ +import type { ScrollBoxHandle } from '@hermes/ink' import type { RefObject } from 'react' import { useCallback, useSyncExternalStore } from 'react' -import type { ScrollBoxHandle } from '@hermes/ink' - export interface ViewportSnapshot { atBottom: boolean bottom: number From 458ce792d24e98baa65b5c03821fded93ba813de Mon Sep 17 00:00:00 2001 From: Brooklyn Nicholson Date: Sun, 26 Apr 2026 02:15:10 -0500 Subject: [PATCH 03/87] fix(tui): persist model switches by default --- tests/test_tui_gateway_server.py | 24 +++++++ tui_gateway/server.py | 72 +++++++++++++++++++ .../src/__tests__/createSlashHandler.test.ts | 30 ++++++++ ui-tui/src/app/slash/commands/session.ts | 36 ++++++---- ui-tui/src/app/useMainApp.ts | 2 +- 5 files changed, 150 insertions(+), 14 deletions(-) diff --git a/tests/test_tui_gateway_server.py b/tests/test_tui_gateway_server.py index f7eacb68..2639d802 100644 --- a/tests/test_tui_gateway_server.py +++ b/tests/test_tui_gateway_server.py @@ -347,6 +347,30 @@ def test_complete_slash_includes_provider_alias(): assert any(item["text"] == "provider" for item in resp["result"]["items"]) +def test_complete_slash_includes_tui_details_command(): + resp = server.handle_request( + {"id": "1", "method": "complete.slash", "params": {"text": "/det"}} + ) + + assert any(item["text"] == "/details" for item in resp["result"]["items"]) + + +def test_complete_slash_details_args(): + resp_section = server.handle_request( + {"id": "1", "method": "complete.slash", "params": {"text": "/details t"}} + ) + resp_mode = server.handle_request( + { + "id": "2", + "method": "complete.slash", + "params": {"text": "/details thinking e"}, + } + ) + + assert any(item["text"] == "thinking" for item in resp_section["result"]["items"]) + assert any(item["text"] == "expanded" for item in resp_mode["result"]["items"]) + + def test_config_set_reasoning_updates_live_session_and_agent(tmp_path, monkeypatch): monkeypatch.setattr(server, "_hermes_home", tmp_path) agent = types.SimpleNamespace(reasoning_config=None) diff --git a/tui_gateway/server.py b/tui_gateway/server.py index 03631bf1..b0b379d0 100644 --- a/tui_gateway/server.py +++ b/tui_gateway/server.py @@ -3710,6 +3710,65 @@ def _(rid, params: dict) -> dict: return _ok(rid, {"items": items}) +def _details_completion_item(value: str, meta: str = "") -> dict: + return {"text": value, "display": value, "meta": meta} + + +def _details_completions(text: str) -> list[dict] | None: + if not text.lower().startswith("/details"): + return None + + stripped = text.strip() + if stripped and not "/details".startswith(stripped.lower().split()[0]): + return None + + body = text[len("/details"):] + if body.startswith(" "): + body = body[1:] + parts = body.split() + has_trailing_space = text.endswith(" ") + sections = ("thinking", "tools", "subagents", "activity") + modes = ("hidden", "collapsed", "expanded") + + if not body or (len(parts) == 0 and has_trailing_space): + return [ + *[_details_completion_item(mode, "global mode") for mode in modes], + _details_completion_item("cycle", "cycle global mode"), + *[_details_completion_item(section, "section override") for section in sections], + ] + + if len(parts) == 1 and not has_trailing_space: + prefix = parts[0].lower() + candidates = [*modes, "cycle", *sections] + return [ + _details_completion_item( + candidate, + "section override" if candidate in sections else "global mode", + ) + for candidate in candidates + if candidate.startswith(prefix) and candidate != prefix + ] + + if len(parts) == 1 and has_trailing_space and parts[0].lower() in sections: + return [ + *[_details_completion_item(mode, f"set {parts[0].lower()}") for mode in modes], + _details_completion_item("reset", f"clear {parts[0].lower()} override"), + ] + + if len(parts) == 2 and not has_trailing_space and parts[0].lower() in sections: + prefix = parts[1].lower() + return [ + _details_completion_item( + candidate, + f"clear {parts[0].lower()} override" if candidate == "reset" else f"set {parts[0].lower()}", + ) + for candidate in (*modes, "reset") + if candidate.startswith(prefix) and candidate != prefix + ] + + return [] + + @method("complete.slash") def _(rid, params: dict) -> dict: text = params.get("text", "") @@ -3742,6 +3801,11 @@ def _(rid, params: dict) -> dict: "display": "/compact", "meta": "Toggle compact display mode", }, + { + "text": "/details", + "display": "/details", + "meta": "Control agent detail visibility", + }, { "text": "/logs", "display": "/logs", @@ -3753,6 +3817,14 @@ def _(rid, params: dict) -> dict: item["text"] == extra["text"] for item in items ): items.append(extra) + + details_items = _details_completions(text) + if details_items is not None: + return _ok( + rid, + {"items": details_items, "replace_from": text.rfind(" ") + 1}, + ) + return _ok( rid, {"items": items, "replace_from": text.rfind(" ") + 1 if " " in text else 1}, diff --git a/ui-tui/src/__tests__/createSlashHandler.test.ts b/ui-tui/src/__tests__/createSlashHandler.test.ts index 5d63d0ad..32c92c00 100644 --- a/ui-tui/src/__tests__/createSlashHandler.test.ts +++ b/ui-tui/src/__tests__/createSlashHandler.test.ts @@ -25,6 +25,36 @@ describe('createSlashHandler', () => { expect(ctx.gateway.gw.request).not.toHaveBeenCalled() }) + it('persists typed /model switches by default', async () => { + patchUiState({ sid: 'sid-abc' }) + + const ctx = buildCtx({ + gateway: { + ...buildGateway(), + rpc: vi.fn(() => Promise.resolve({ value: 'x-model' })) + } + }) + + expect(createSlashHandler(ctx)('/model x-model')).toBe(true) + expect(ctx.gateway.rpc).toHaveBeenCalledWith('config.set', { + key: 'model', + session_id: 'sid-abc', + value: 'x-model --global' + }) + }) + + it('does not duplicate --global for explicit persistent model switches', () => { + patchUiState({ sid: 'sid-abc' }) + const ctx = buildCtx() + + createSlashHandler(ctx)('/model x-model --global') + expect(ctx.gateway.rpc).toHaveBeenCalledWith('config.set', { + key: 'model', + session_id: 'sid-abc', + value: 'x-model --global' + }) + }) + it('opens the skills hub locally for bare /skills', () => { const ctx = buildCtx() diff --git a/ui-tui/src/app/slash/commands/session.ts b/ui-tui/src/app/slash/commands/session.ts index 1049ee34..7cb7fcf8 100644 --- a/ui-tui/src/app/slash/commands/session.ts +++ b/ui-tui/src/app/slash/commands/session.ts @@ -16,6 +16,14 @@ import { patchOverlayState } from '../../overlayStore.js' import { patchUiState } from '../../uiStore.js' import type { SlashCommand } from '../types.js' +const GLOBAL_MODEL_FLAG_RE = /(?:^|\s)--global(?:\s|$)/ + +const persistedModelArg = (arg: string) => { + const trimmed = arg.trim() + + return GLOBAL_MODEL_FLAG_RE.test(trimmed) ? trimmed : `${trimmed} --global` +} + export const sessionCommands: SlashCommand[] = [ { aliases: ['bg'], @@ -69,21 +77,23 @@ export const sessionCommands: SlashCommand[] = [ return patchOverlayState({ modelPicker: true }) } - ctx.gateway.rpc('config.set', { key: 'model', session_id: ctx.sid, value: arg.trim() }).then( - ctx.guarded(r => { - if (!r.value) { - return ctx.transcript.sys('error: invalid response: model switch') - } + ctx.gateway + .rpc('config.set', { key: 'model', session_id: ctx.sid, value: persistedModelArg(arg) }) + .then( + ctx.guarded(r => { + if (!r.value) { + return ctx.transcript.sys('error: invalid response: model switch') + } - ctx.transcript.sys(`model → ${r.value}`) - ctx.local.maybeWarn(r) + ctx.transcript.sys(`model → ${r.value}`) + ctx.local.maybeWarn(r) - patchUiState(state => ({ - ...state, - info: state.info ? { ...state.info, model: r.value! } : { model: r.value!, skills: {}, tools: {} } - })) - }) - ) + patchUiState(state => ({ + ...state, + info: state.info ? { ...state.info, model: r.value! } : { model: r.value!, skills: {}, tools: {} } + })) + }) + ) } }, diff --git a/ui-tui/src/app/useMainApp.ts b/ui-tui/src/app/useMainApp.ts index 31f228eb..7d87be11 100644 --- a/ui-tui/src/app/useMainApp.ts +++ b/ui-tui/src/app/useMainApp.ts @@ -627,7 +627,7 @@ export function useMainApp(gw: GatewayClient) { const onModelSelect = useCallback((value: string) => { patchOverlayState({ modelPicker: false }) - slashRef.current(`/model ${value}`) + slashRef.current(`/model ${value} --global`) }, []) const hasReasoning = Boolean(turn.reasoning.trim()) From 19d75d1797510072ee19e9db12926781ee98ccd9 Mon Sep 17 00:00:00 2001 From: Brooklyn Nicholson Date: Sun, 26 Apr 2026 02:21:22 -0500 Subject: [PATCH 04/87] perf(tui): coalesce composer echo updates --- ui-tui/src/components/textInput.tsx | 69 ++++++++++++++++++++++++++--- 1 file changed, 63 insertions(+), 6 deletions(-) diff --git a/ui-tui/src/components/textInput.tsx b/ui-tui/src/components/textInput.tsx index 39e8379b..263857a0 100644 --- a/ui-tui/src/components/textInput.tsx +++ b/ui-tui/src/components/textInput.tsx @@ -303,6 +303,8 @@ export function TextInput({ const pasteTimer = useRef | null>(null) const pastePos = useRef(0) const editVersionRef = useRef(0) + const parentChangeTimer = useRef | null>(null) + const pendingParentValue = useRef(null) const undo = useRef<{ cursor: number; value: string }[]>([]) const redo = useRef<{ cursor: number; value: string }[]>([]) @@ -385,11 +387,40 @@ export function TextInput({ if (pasteTimer.current) { clearTimeout(pasteTimer.current) } + + if (parentChangeTimer.current) { + clearTimeout(parentChangeTimer.current) + } }, [] ) - const commit = (next: string, nextCur: number, track = true) => { + const flushParentChange = () => { + if (parentChangeTimer.current) { + clearTimeout(parentChangeTimer.current) + parentChangeTimer.current = null + } + + const next = pendingParentValue.current + pendingParentValue.current = null + + if (next !== null) { + self.current = true + cbChange.current(next) + } + } + + const scheduleParentChange = (next: string) => { + pendingParentValue.current = next + + if (parentChangeTimer.current) { + return + } + + parentChangeTimer.current = setTimeout(flushParentChange, 16) + } + + const commit = (next: string, nextCur: number, track = true, syncParent = true) => { const prev = vRef.current const c = snapPos(next, nextCur) editVersionRef.current += 1 @@ -414,8 +445,13 @@ export function TextInput({ vRef.current = next if (next !== prev) { - self.current = true - cbChange.current(next) + if (syncParent) { + flushParentChange() + self.current = true + cbChange.current(next) + } else { + scheduleParentChange(next) + } } } @@ -597,9 +633,13 @@ export function TextInput({ } if (k.return) { - k.shift || (isMac ? isActionMod(k) : k.meta) - ? commit(ins(vRef.current, curRef.current, '\n'), curRef.current + 1) - : cbSubmit.current?.(vRef.current) + if (k.shift || (isMac ? isActionMod(k) : k.meta)) { + flushParentChange() + commit(ins(vRef.current, curRef.current, '\n'), curRef.current + 1) + } else { + flushParentChange() + cbSubmit.current?.(vRef.current) + } return } @@ -741,8 +781,25 @@ export function TextInput({ v = v.slice(0, range.start) + text + v.slice(range.end) c = range.start + text.length } else { + const simpleAppend = + focus && + termFocus && + !selected && + !mask && + !placeholder && + c === v.length && + !v.includes('\n') && + stringWidth(text) === text.length && + stringWidth(v) + text.length < Math.max(1, columns) + v = v.slice(0, c) + text + v.slice(c) c += text.length + + if (simpleAppend) { + commit(v, c, true, false) + + return + } } } else { return From 9bb3bc422dcfc38358dd0d406e44e9f2cceb0d68 Mon Sep 17 00:00:00 2001 From: Brooklyn Nicholson Date: Sun, 26 Apr 2026 03:07:15 -0500 Subject: [PATCH 05/87] perf(tui): optimistically echo simple input --- ui-tui/src/components/textInput.tsx | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/ui-tui/src/components/textInput.tsx b/ui-tui/src/components/textInput.tsx index 263857a0..ff0bb23c 100644 --- a/ui-tui/src/components/textInput.tsx +++ b/ui-tui/src/components/textInput.tsx @@ -10,11 +10,12 @@ import { isActionMod, isMac, isMacActionFallback } from '../lib/platform.js' type InkExt = typeof Ink & { stringWidth: (s: string) => number useDeclaredCursor: (a: { line: number; column: number; active: boolean }) => (el: any) => void + useStdout: () => { stdout?: NodeJS.WriteStream } useTerminalFocus: () => boolean } const ink = Ink as unknown as InkExt -const { Box, Text, useStdin, useInput, stringWidth, useDeclaredCursor, useTerminalFocus } = ink +const { Box, Text, useStdin, useInput, useStdout, stringWidth, useDeclaredCursor, useTerminalFocus } = ink const ESC = '\x1b' const INV = `${ESC}[7m` @@ -293,6 +294,7 @@ export function TextInput({ const [sel, setSel] = useState(null) const fwdDel = useFwdDelete(focus) const termFocus = useTerminalFocus() + const { stdout } = useStdout() const curRef = useRef(cur) const selRef = useRef(null) @@ -787,6 +789,7 @@ export function TextInput({ !selected && !mask && !placeholder && + !!stdout?.isTTY && c === v.length && !v.includes('\n') && stringWidth(text) === text.length && @@ -796,6 +799,7 @@ export function TextInput({ c += text.length if (simpleAppend) { + stdout!.write(text) commit(v, c, true, false) return From 5cd41d2b3b1d5b464865a948951d4327a6a42b70 Mon Sep 17 00:00:00 2001 From: Brooklyn Nicholson Date: Sun, 26 Apr 2026 03:22:50 -0500 Subject: [PATCH 06/87] perf(tui): widen native input echo --- ui-tui/src/components/textInput.tsx | 48 ++++++++++++++++++++++------- 1 file changed, 37 insertions(+), 11 deletions(-) diff --git a/ui-tui/src/components/textInput.tsx b/ui-tui/src/components/textInput.tsx index ff0bb23c..830599c7 100644 --- a/ui-tui/src/components/textInput.tsx +++ b/ui-tui/src/components/textInput.tsx @@ -307,6 +307,7 @@ export function TextInput({ const editVersionRef = useRef(0) const parentChangeTimer = useRef | null>(null) const pendingParentValue = useRef(null) + const lineWidthRef = useRef(stringWidth(value.includes('\n') ? value.slice(value.lastIndexOf('\n') + 1) : value)) const undo = useRef<{ cursor: number; value: string }[]>([]) const redo = useRef<{ cursor: number; value: string }[]>([]) @@ -359,6 +360,7 @@ export function TextInput({ curRef.current = value.length selRef.current = null vRef.current = value + lineWidthRef.current = stringWidth(value.includes('\n') ? value.slice(value.lastIndexOf('\n') + 1) : value) undo.current = [] redo.current = [] } @@ -422,6 +424,31 @@ export function TextInput({ parentChangeTimer.current = setTimeout(flushParentChange, 16) } + const canFastEchoBase = () => focus && termFocus && !selected && !mask && !!stdout?.isTTY + + const canFastAppend = (current: string, cursor: number, text: string) => { + const sw = stringWidth(text) + + return ( + canFastEchoBase() && + cursor === current.length && + current.length > 0 && + !current.includes('\n') && + sw === text.length && + lineWidthRef.current + sw < Math.max(1, columns) + ) + } + + const canFastBackspace = (current: string, cursor: number) => { + if (!canFastEchoBase() || cursor !== current.length || cursor <= 0 || current.includes('\n')) { + return false + } + + const prev = current[cursor - 1] + + return !!prev && stringWidth(prev) === 1 + } + const commit = (next: string, nextCur: number, track = true, syncParent = true) => { const prev = vRef.current const c = snapPos(next, nextCur) @@ -445,6 +472,7 @@ export function TextInput({ setCur(c) curRef.current = c vRef.current = next + lineWidthRef.current = stringWidth(next.includes('\n') ? next.slice(next.lastIndexOf('\n') + 1) : next) if (next !== prev) { if (syncParent) { @@ -706,6 +734,14 @@ export function TextInput({ const t = wordLeft(v, c) v = v.slice(0, t) + v.slice(c) c = t + } else if (canFastBackspace(v, c)) { + const t = prevPos(v, c) + v = v.slice(0, t) + v.slice(c) + c = t + stdout!.write('\b \b') + commit(v, c, true, false) + + return } else { const t = prevPos(v, c) v = v.slice(0, t) + v.slice(c) @@ -783,17 +819,7 @@ export function TextInput({ v = v.slice(0, range.start) + text + v.slice(range.end) c = range.start + text.length } else { - const simpleAppend = - focus && - termFocus && - !selected && - !mask && - !placeholder && - !!stdout?.isTTY && - c === v.length && - !v.includes('\n') && - stringWidth(text) === text.length && - stringWidth(v) + text.length < Math.max(1, columns) + const simpleAppend = canFastAppend(v, c, text) v = v.slice(0, c) + text + v.slice(c) c += text.length From ee7ef33b02f0163b63d0fe8600163b2be740e08a Mon Sep 17 00:00:00 2001 From: Brooklyn Nicholson Date: Sun, 26 Apr 2026 03:27:45 -0500 Subject: [PATCH 07/87] fix(tui): queue busy submissions gracefully --- ui-tui/src/app/turnController.ts | 13 +++++++++-- ui-tui/src/app/useSubmission.ts | 39 ++++++++++++++++++++++++++++++-- ui-tui/src/config/timing.ts | 3 +++ 3 files changed, 51 insertions(+), 4 deletions(-) diff --git a/ui-tui/src/app/turnController.ts b/ui-tui/src/app/turnController.ts index 1041b4d4..90c4ac12 100644 --- a/ui-tui/src/app/turnController.ts +++ b/ui-tui/src/app/turnController.ts @@ -1,4 +1,4 @@ -import { REASONING_PULSE_MS, STREAM_BATCH_MS } from '../config/timing.js' +import { REASONING_PULSE_MS, STREAM_BATCH_MS, STREAM_IDLE_BATCH_MS, STREAM_TYPING_BATCH_MS } from '../config/timing.js' import type { SessionInterruptResponse, SubagentEventPayload } from '../gatewayTypes.js' import { hasReasoningTag, splitReasoning } from '../lib/reasoning.js' import { @@ -75,8 +75,17 @@ class TurnController { private reasoningStreamingTimer: Timer = null private reasoningTimer: Timer = null private streamTimer: Timer = null + private streamDelay = STREAM_IDLE_BATCH_MS private toolProgressTimer: Timer = null + boostStreamingForTyping() { + this.streamDelay = STREAM_TYPING_BATCH_MS + } + + relaxStreaming() { + this.streamDelay = STREAM_IDLE_BATCH_MS + } + clearReasoning() { this.reasoningTimer = clear(this.reasoningTimer) this.reasoningText = '' @@ -493,7 +502,7 @@ class TurnController { const raw = this.bufRef.trimStart() const visible = hasReasoningTag(raw) ? splitReasoning(raw).text : raw patchTurnState({ streaming: visible }) - }, STREAM_BATCH_MS) + }, this.streamDelay) } startMessage() { diff --git a/ui-tui/src/app/useSubmission.ts b/ui-tui/src/app/useSubmission.ts index f09dc363..9bca6581 100644 --- a/ui-tui/src/app/useSubmission.ts +++ b/ui-tui/src/app/useSubmission.ts @@ -1,5 +1,6 @@ -import { type MutableRefObject, useCallback, useRef } from 'react' +import { type MutableRefObject, useCallback, useEffect, useRef } from 'react' +import { TYPING_IDLE_MS } from '../config/timing.js' import { attachedImageNotice } from '../domain/messages.js' import { looksLikeSlashCommand } from '../domain/slash.js' import type { GatewayClient } from '../gatewayClient.js' @@ -14,6 +15,9 @@ import { turnController } from './turnController.js' import { getUiState, patchUiState } from './uiStore.js' const DOUBLE_ENTER_MS = 450 +const SESSION_BUSY_RE = /session busy|waiting for model response/i + +const isSessionBusyError = (e: unknown) => e instanceof Error && SESSION_BUSY_RE.test(e.message) const expandSnips = (snips: PasteSnippet[]) => { const byLabel = new Map() @@ -44,6 +48,30 @@ export function useSubmission(opts: UseSubmissionOptions) { } = opts const lastEmptyAt = useRef(0) + const typingIdleTimer = useRef | null>(null) + + useEffect(() => { + if (composerState.input || composerState.inputBuf.length) { + if (getUiState().busy) { + turnController.boostStreamingForTyping() + } + + if (typingIdleTimer.current) { + clearTimeout(typingIdleTimer.current) + } + + typingIdleTimer.current = setTimeout(() => { + typingIdleTimer.current = null + turnController.relaxStreaming() + }, TYPING_IDLE_MS) + } + + return () => { + if (typingIdleTimer.current) { + clearTimeout(typingIdleTimer.current) + } + } + }, [composerState.input, composerState.inputBuf]) const send = useCallback( (text: string) => { @@ -65,6 +93,13 @@ export function useSubmission(opts: UseSubmissionOptions) { turnController.interrupted = false gw.request('prompt.submit', { session_id: sid, text: submitText }).catch((e: Error) => { + if (isSessionBusyError(e)) { + composerActions.enqueue(text) + patchUiState({ busy: true, status: 'queued for next turn' }) + + return sys(`queued: "${text.slice(0, 50)}${text.length > 50 ? '…' : ''}"`) + } + sys(`error: ${e.message}`) patchUiState({ busy: false, status: 'ready' }) }) @@ -92,7 +127,7 @@ export function useSubmission(opts: UseSubmissionOptions) { }) .catch(() => startSubmit(text, expand(text))) }, - [appendMessage, composerState.pasteSnips, gw, maybeGoodVibes, setLastUserMsg, sys] + [appendMessage, composerActions, composerState.pasteSnips, gw, maybeGoodVibes, setLastUserMsg, sys] ) const shellExec = useCallback( diff --git a/ui-tui/src/config/timing.ts b/ui-tui/src/config/timing.ts index 63498dba..8fdf6b5f 100644 --- a/ui-tui/src/config/timing.ts +++ b/ui-tui/src/config/timing.ts @@ -1,2 +1,5 @@ export const STREAM_BATCH_MS = 16 +export const STREAM_IDLE_BATCH_MS = 16 +export const STREAM_TYPING_BATCH_MS = 80 +export const TYPING_IDLE_MS = 120 export const REASONING_PULSE_MS = 700 From cd7c5e5606bb583eb9c2ebc4bcb23dd78be043e3 Mon Sep 17 00:00:00 2001 From: Brooklyn Nicholson Date: Sun, 26 Apr 2026 03:38:56 -0500 Subject: [PATCH 08/87] perf(tui): defer local input render during echo --- ui-tui/src/components/textInput.tsx | 36 +++++++++++++++++++++++++---- 1 file changed, 32 insertions(+), 4 deletions(-) diff --git a/ui-tui/src/components/textInput.tsx b/ui-tui/src/components/textInput.tsx index 830599c7..66d212fb 100644 --- a/ui-tui/src/components/textInput.tsx +++ b/ui-tui/src/components/textInput.tsx @@ -307,6 +307,7 @@ export function TextInput({ const editVersionRef = useRef(0) const parentChangeTimer = useRef | null>(null) const pendingParentValue = useRef(null) + const localRenderTimer = useRef | null>(null) const lineWidthRef = useRef(stringWidth(value.includes('\n') ? value.slice(value.lastIndexOf('\n') + 1) : value)) const undo = useRef<{ cursor: number; value: string }[]>([]) const redo = useRef<{ cursor: number; value: string }[]>([]) @@ -395,6 +396,10 @@ export function TextInput({ if (parentChangeTimer.current) { clearTimeout(parentChangeTimer.current) } + + if (localRenderTimer.current) { + clearTimeout(localRenderTimer.current) + } }, [] ) @@ -424,6 +429,23 @@ export function TextInput({ parentChangeTimer.current = setTimeout(flushParentChange, 16) } + const flushLocalRender = () => { + if (localRenderTimer.current) { + clearTimeout(localRenderTimer.current) + localRenderTimer.current = null + } + + setCur(curRef.current) + } + + const scheduleLocalRender = () => { + if (localRenderTimer.current) { + return + } + + localRenderTimer.current = setTimeout(flushLocalRender, 16) + } + const canFastEchoBase = () => focus && termFocus && !selected && !mask && !!stdout?.isTTY const canFastAppend = (current: string, cursor: number, text: string) => { @@ -449,7 +471,7 @@ export function TextInput({ return !!prev && stringWidth(prev) === 1 } - const commit = (next: string, nextCur: number, track = true, syncParent = true) => { + const commit = (next: string, nextCur: number, track = true, syncParent = true, syncLocal = true) => { const prev = vRef.current const c = snapPos(next, nextCur) editVersionRef.current += 1 @@ -469,7 +491,13 @@ export function TextInput({ redo.current = [] } - setCur(c) + if (syncLocal) { + flushLocalRender() + setCur(c) + } else { + scheduleLocalRender() + } + curRef.current = c vRef.current = next lineWidthRef.current = stringWidth(next.includes('\n') ? next.slice(next.lastIndexOf('\n') + 1) : next) @@ -739,7 +767,7 @@ export function TextInput({ v = v.slice(0, t) + v.slice(c) c = t stdout!.write('\b \b') - commit(v, c, true, false) + commit(v, c, true, false, false) return } else { @@ -826,7 +854,7 @@ export function TextInput({ if (simpleAppend) { stdout!.write(text) - commit(v, c, true, false) + commit(v, c, true, false, false) return } From 1c964ed43ff6839f3c7d068cd4a45f4bed0d4cb7 Mon Sep 17 00:00:00 2001 From: Brooklyn Nicholson Date: Sun, 26 Apr 2026 03:47:05 -0500 Subject: [PATCH 09/87] fix(tui): rely on native cursor for input --- ui-tui/src/components/textInput.tsx | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/ui-tui/src/components/textInput.tsx b/ui-tui/src/components/textInput.tsx index 66d212fb..35f1949b 100644 --- a/ui-tui/src/components/textInput.tsx +++ b/ui-tui/src/components/textInput.tsx @@ -336,21 +336,23 @@ export function TextInput({ active: focus && termFocus && !selected }) + const nativeCursor = focus && termFocus && !selected + const rendered = useMemo(() => { if (!focus) { return display || dim(placeholder) } if (!display && placeholder) { - return invert(placeholder[0] ?? ' ') + dim(placeholder.slice(1)) + return nativeCursor ? dim(placeholder) : invert(placeholder[0] ?? ' ') + dim(placeholder.slice(1)) } if (selected) { return renderWithSelection(display, selected.start, selected.end) } - return renderWithCursor(display, cur) - }, [cur, display, focus, placeholder, selected]) + return nativeCursor ? display || ' ' : renderWithCursor(display, cur) + }, [cur, display, focus, nativeCursor, placeholder, selected]) useEffect(() => { if (self.current) { From 355e0ae960ec031123e8eee8fdecf2b20a506d4d Mon Sep 17 00:00:00 2001 From: Brooklyn Nicholson Date: Sun, 26 Apr 2026 04:23:57 -0500 Subject: [PATCH 10/87] fix(tui): keep streaming progress stable during interaction --- .../hermes-ink/src/ink/components/App.tsx | 24 +++---- .../src/ink/components/ScrollBox.tsx | 30 +++++---- ui-tui/src/__tests__/interactionMode.test.ts | 28 +++++++++ ui-tui/src/__tests__/scroll.test.ts | 53 ++++++++++++++++ .../src/__tests__/virtualHistoryClamp.test.ts | 13 ++++ ui-tui/src/app/interactionMode.ts | 52 ++++++++++++++++ ui-tui/src/app/interfaces.ts | 4 ++ ui-tui/src/app/scroll.ts | 58 +++++++++++++++++ ui-tui/src/app/turnController.ts | 14 ++++- ui-tui/src/app/useMainApp.ts | 62 +++---------------- ui-tui/src/app/useSubmission.ts | 19 +----- ui-tui/src/components/textInput.tsx | 2 +- ui-tui/src/config/limits.ts | 2 +- ui-tui/src/config/timing.ts | 6 +- ui-tui/src/hooks/useVirtualHistory.ts | 17 ++++- 15 files changed, 278 insertions(+), 106 deletions(-) create mode 100644 ui-tui/src/__tests__/interactionMode.test.ts create mode 100644 ui-tui/src/__tests__/scroll.test.ts create mode 100644 ui-tui/src/__tests__/virtualHistoryClamp.test.ts create mode 100644 ui-tui/src/app/interactionMode.ts create mode 100644 ui-tui/src/app/scroll.ts diff --git a/ui-tui/packages/hermes-ink/src/ink/components/App.tsx b/ui-tui/packages/hermes-ink/src/ink/components/App.tsx index 7805b4f9..64c181a0 100644 --- a/ui-tui/packages/hermes-ink/src/ink/components/App.tsx +++ b/ui-tui/packages/hermes-ink/src/ink/components/App.tsx @@ -29,7 +29,7 @@ import { FOCUS_IN, FOCUS_OUT } from '../termio/csi.js' -import { DBP, DFE, DISABLE_MOUSE_TRACKING, EBP, EFE, HIDE_CURSOR, SHOW_CURSOR } from '../termio/dec.js' +import { DBP, DFE, DISABLE_MOUSE_TRACKING, EBP, EFE, SHOW_CURSOR } from '../termio/dec.js' import AppContext from './AppContext.js' import { ClockProvider } from './ClockContext.js' @@ -206,10 +206,9 @@ export default class App extends PureComponent { ) } override componentDidMount() { - // In accessibility mode, keep the native cursor visible for screen magnifiers and other tools - if (this.props.stdout.isTTY) { - this.props.stdout.write(HIDE_CURSOR) - } + // Keep the native terminal cursor visible. Ink parks it at the declared + // input caret after each frame, so the terminal emulator provides the + // normal blinking block/bar without React-driven blink re-renders. } override componentWillUnmount() { if (this.props.stdout.isTTY) { @@ -470,7 +469,7 @@ export default class App extends PureComponent { } if (this.props.stdout.isTTY) { - this.props.stdout.write(HIDE_CURSOR + EFE) + this.props.stdout.write(EFE) } this.inputEmitter.emit('resume') @@ -569,18 +568,19 @@ function processKeysInBatch(app: App, items: ParsedInput[], _unused1: undefined, /** Exported for testing. Mutates app.props.selection and click/hover state. */ export function handleMouseEvent(app: App, m: ParsedMouse): void { - // Allow disabling click handling while keeping wheel scroll (which goes - // through the keybinding system as 'wheelup'/'wheeldown', not here). - if (isMouseClicksDisabled()) { - return - } - const sel = app.props.selection // Terminal coords are 1-indexed; screen buffer is 0-indexed const col = m.col - 1 const row = m.row - 1 const baseButton = m.button & 0x03 + // Allow disabling app click/selection handling while keeping wheel scroll + // and DOM mouse dispatch alive. Put this after coordinate/button decoding + // and exempt non-left buttons so scrollbar/right-click handlers still work. + if (isMouseClicksDisabled() && baseButton === 0) { + return + } + if (m.action === 'press') { if ((m.button & 0x20) !== 0 && baseButton === 3) { if (app.mouseCaptureTarget) { diff --git a/ui-tui/packages/hermes-ink/src/ink/components/ScrollBox.tsx b/ui-tui/packages/hermes-ink/src/ink/components/ScrollBox.tsx index ed4239ce..38f04b4f 100644 --- a/ui-tui/packages/hermes-ink/src/ink/components/ScrollBox.tsx +++ b/ui-tui/packages/hermes-ink/src/ink/components/ScrollBox.tsx @@ -122,6 +122,19 @@ function ScrollBox({ children, ref, stickyScroll, ...style }: PropsWithChildren< }) } + const scrollByNow = (dy: number) => { + const el = domRef.current + + if (!el) { + return + } + + el.stickyScroll = false + el.scrollAnchor = undefined + el.pendingScrollDelta = (el.pendingScrollDelta ?? 0) + Math.floor(dy) + scrollMutated(el) + } + useImperativeHandle( ref, (): ScrollBoxHandle => ({ @@ -155,22 +168,7 @@ function ScrollBox({ children, ref, stickyScroll, ...style }: PropsWithChildren< } scrollMutated(box) }, - scrollBy(dy: number) { - const el = domRef.current - - if (!el) { - return - } - - el.stickyScroll = false - // Wheel input cancels any in-flight anchor seek — user override. - el.scrollAnchor = undefined - // Accumulate in pendingScrollDelta; renderer drains it at a capped - // rate so fast flicks show intermediate frames. Pure accumulator: - // scroll-up followed by scroll-down naturally cancels. - el.pendingScrollDelta = (el.pendingScrollDelta ?? 0) + Math.floor(dy) - scrollMutated(el) - }, + scrollBy: scrollByNow, scrollToBottom() { const el = domRef.current diff --git a/ui-tui/src/__tests__/interactionMode.test.ts b/ui-tui/src/__tests__/interactionMode.test.ts new file mode 100644 index 00000000..1a44519d --- /dev/null +++ b/ui-tui/src/__tests__/interactionMode.test.ts @@ -0,0 +1,28 @@ +import { afterEach, describe, expect, it, vi } from 'vitest' + +import { getInteractionMode, markScrolling, markTyping, resetInteractionMode } from '../app/interactionMode.js' +import { SCROLLING_IDLE_MS, TYPING_IDLE_MS } from '../config/timing.js' + +describe('interactionMode', () => { + afterEach(() => { + resetInteractionMode() + vi.useRealTimers() + }) + + it('holds scrolling mode briefly then returns idle', () => { + vi.useFakeTimers() + markScrolling() + expect(getInteractionMode()).toBe('scrolling') + vi.advanceTimersByTime(SCROLLING_IDLE_MS) + expect(getInteractionMode()).toBe('idle') + }) + + it('typing takes priority over scrolling', () => { + vi.useFakeTimers() + markTyping() + markScrolling() + expect(getInteractionMode()).toBe('typing') + vi.advanceTimersByTime(TYPING_IDLE_MS) + expect(getInteractionMode()).toBe('idle') + }) +}) diff --git a/ui-tui/src/__tests__/scroll.test.ts b/ui-tui/src/__tests__/scroll.test.ts new file mode 100644 index 00000000..22f5d3f1 --- /dev/null +++ b/ui-tui/src/__tests__/scroll.test.ts @@ -0,0 +1,53 @@ +import { describe, expect, it, vi } from 'vitest' + +import { scrollWithSelectionBy } from '../app/scroll.js' + +function makeScroll(overrides: Partial> = {}) { + return { + getPendingDelta: vi.fn(() => 0), + getScrollHeight: vi.fn(() => 100), + getScrollTop: vi.fn(() => 10), + getViewportHeight: vi.fn(() => 20), + getViewportTop: vi.fn(() => 0), + scrollBy: vi.fn(), + ...overrides + } +} + +describe('scrollWithSelectionBy', () => { + it('clamps to the actual remaining scroll distance before calling scrollBy', () => { + const s = makeScroll({ + getScrollHeight: vi.fn(() => 30), + getScrollTop: vi.fn(() => 9), + getViewportHeight: vi.fn(() => 20) + }) + const selection = { + captureScrolledRows: vi.fn(), + getState: vi.fn(() => null), + shiftAnchor: vi.fn(), + shiftSelection: vi.fn() + } + + scrollWithSelectionBy(10, { scrollRef: { current: s as never }, selection }) + + expect(s.scrollBy).toHaveBeenCalledWith(1) + }) + + it('does nothing at the edge instead of queueing dead pending deltas', () => { + const s = makeScroll({ + getScrollHeight: vi.fn(() => 30), + getScrollTop: vi.fn(() => 10), + getViewportHeight: vi.fn(() => 20) + }) + const selection = { + captureScrolledRows: vi.fn(), + getState: vi.fn(() => null), + shiftAnchor: vi.fn(), + shiftSelection: vi.fn() + } + + scrollWithSelectionBy(10, { scrollRef: { current: s as never }, selection }) + + expect(s.scrollBy).not.toHaveBeenCalled() + }) +}) diff --git a/ui-tui/src/__tests__/virtualHistoryClamp.test.ts b/ui-tui/src/__tests__/virtualHistoryClamp.test.ts new file mode 100644 index 00000000..255fad7c --- /dev/null +++ b/ui-tui/src/__tests__/virtualHistoryClamp.test.ts @@ -0,0 +1,13 @@ +import { describe, expect, it } from 'vitest' + +import { shouldSetVirtualClamp } from '../hooks/useVirtualHistory.js' + +describe('virtual history clamp bounds', () => { + it('does not clamp sticky live tail content', () => { + expect(shouldSetVirtualClamp({ itemCount: 20, sticky: true, viewportHeight: 10 })).toBe(false) + }) + + it('sets clamp bounds after manual scroll breaks sticky mode', () => { + expect(shouldSetVirtualClamp({ itemCount: 20, sticky: false, viewportHeight: 10 })).toBe(true) + }) +}) diff --git a/ui-tui/src/app/interactionMode.ts b/ui-tui/src/app/interactionMode.ts new file mode 100644 index 00000000..f18033f8 --- /dev/null +++ b/ui-tui/src/app/interactionMode.ts @@ -0,0 +1,52 @@ +import { SCROLLING_IDLE_MS, TYPING_IDLE_MS } from '../config/timing.js' + +export type InteractionMode = 'idle' | 'scrolling' | 'typing' + +type Timer = null | ReturnType + +let mode: InteractionMode = 'idle' +let scrollingTimer: Timer = null +let typingTimer: Timer = null + +const clear = (t: Timer): null => { + if (t) { + clearTimeout(t) + } + + return null +} + +export function getInteractionMode(): InteractionMode { + return mode +} + +export function markTyping(): void { + mode = 'typing' + typingTimer = clear(typingTimer) + scrollingTimer = clear(scrollingTimer) + typingTimer = setTimeout(() => { + typingTimer = null + mode = 'idle' + }, TYPING_IDLE_MS) +} + +export function markScrolling(): void { + if (mode === 'typing') { + return + } + + mode = 'scrolling' + scrollingTimer = clear(scrollingTimer) + scrollingTimer = setTimeout(() => { + scrollingTimer = null + if (mode === 'scrolling') { + mode = 'idle' + } + }, SCROLLING_IDLE_MS) +} + +export function resetInteractionMode(): void { + scrollingTimer = clear(scrollingTimer) + typingTimer = clear(typingTimer) + mode = 'idle' +} diff --git a/ui-tui/src/app/interfaces.ts b/ui-tui/src/app/interfaces.ts index 9049c17f..032eee87 100644 --- a/ui-tui/src/app/interfaces.ts +++ b/ui-tui/src/app/interfaces.ts @@ -31,8 +31,12 @@ export interface StateSetter { export type StatusBarMode = 'bottom' | 'off' | 'top' export interface SelectionApi { + captureScrolledRows: (firstRow: number, lastRow: number, side: 'above' | 'below') => void clearSelection: () => void copySelection: () => string + getState: () => unknown + shiftAnchor: (dRow: number, minRow: number, maxRow: number) => void + shiftSelection: (dRow: number, minRow: number, maxRow: number) => void } export interface CompletionItem { diff --git a/ui-tui/src/app/scroll.ts b/ui-tui/src/app/scroll.ts new file mode 100644 index 00000000..2572e280 --- /dev/null +++ b/ui-tui/src/app/scroll.ts @@ -0,0 +1,58 @@ +import type { ScrollBoxHandle } from '@hermes/ink' + +import type { SelectionApi } from './interfaces.js' +import { markScrolling } from './interactionMode.js' + +export interface SelectionSnap { + anchor?: { row: number } | null + focus?: { row: number } | null + isDragging?: boolean +} + +export interface ScrollWithSelectionOptions { + readonly scrollRef: { readonly current: ScrollBoxHandle | null } + readonly selection: SelectionApi +} + +export function scrollWithSelectionBy(delta: number, { scrollRef, selection }: ScrollWithSelectionOptions): void { + const s = scrollRef.current + + if (!s) { + return + } + + const cur = s.getScrollTop() + s.getPendingDelta() + const viewport = Math.max(0, s.getViewportHeight()) + const max = Math.max(0, s.getScrollHeight() - viewport) + const actual = Math.max(0, Math.min(max, cur + delta)) - cur + + if (actual === 0) { + return + } + + markScrolling() + + const sel = selection.getState() as null | SelectionSnap + const top = s.getViewportTop() + const bottom = top + viewport - 1 + + if ( + sel?.anchor && + sel.focus && + sel.anchor.row >= top && + sel.anchor.row <= bottom && + (sel.isDragging || (sel.focus.row >= top && sel.focus.row <= bottom)) + ) { + const shift = sel.isDragging ? selection.shiftAnchor : selection.shiftSelection + + if (actual > 0) { + selection.captureScrolledRows(top, top + actual - 1, 'above') + } else { + selection.captureScrolledRows(bottom + actual + 1, bottom, 'below') + } + + shift(-actual, top, bottom) + } + + s.scrollBy(actual) +} diff --git a/ui-tui/src/app/turnController.ts b/ui-tui/src/app/turnController.ts index 90c4ac12..bc40deba 100644 --- a/ui-tui/src/app/turnController.ts +++ b/ui-tui/src/app/turnController.ts @@ -1,4 +1,10 @@ -import { REASONING_PULSE_MS, STREAM_BATCH_MS, STREAM_IDLE_BATCH_MS, STREAM_TYPING_BATCH_MS } from '../config/timing.js' +import { + REASONING_PULSE_MS, + STREAM_BATCH_MS, + STREAM_IDLE_BATCH_MS, + STREAM_SCROLLING_BATCH_MS, + STREAM_TYPING_BATCH_MS +} from '../config/timing.js' import type { SessionInterruptResponse, SubagentEventPayload } from '../gatewayTypes.js' import { hasReasoningTag, splitReasoning } from '../lib/reasoning.js' import { @@ -10,6 +16,7 @@ import { } from '../lib/text.js' import type { ActiveTool, ActivityItem, Msg, SubagentProgress } from '../types.js' +import { getInteractionMode } from './interactionMode.js' import { resetFlowOverlays } from './overlayStore.js' import { pushSnapshot } from './spawnHistoryStore.js' import { getTurnState, patchTurnState, resetTurnState } from './turnStore.js' @@ -497,12 +504,15 @@ class TurnController { return } + const interaction = getInteractionMode() + const delay = interaction === 'scrolling' ? STREAM_SCROLLING_BATCH_MS : interaction === 'typing' ? STREAM_TYPING_BATCH_MS : this.streamDelay + this.streamTimer = setTimeout(() => { this.streamTimer = null const raw = this.bufRef.trimStart() const visible = hasReasoningTag(raw) ? splitReasoning(raw).text : raw patchTurnState({ streaming: visible }) - }, this.streamDelay) + }, delay) } startMessage() { diff --git a/ui-tui/src/app/useMainApp.ts b/ui-tui/src/app/useMainApp.ts index 7d87be11..4d6dfc19 100644 --- a/ui-tui/src/app/useMainApp.ts +++ b/ui-tui/src/app/useMainApp.ts @@ -33,6 +33,7 @@ import { useComposerState } from './useComposerState.js' import { useConfigSync } from './useConfigSync.js' import { useInputHandlers } from './useInputHandlers.js' import { useLongRunToolCharms } from './useLongRunToolCharms.js' +import { scrollWithSelectionBy } from './scroll.js' import { useSessionLifecycle } from './useSessionLifecycle.js' import { useSubmission } from './useSubmission.js' @@ -64,12 +65,6 @@ const statusColorOf = (status: string, t: { dim: string; error: string; ok: stri return t.dim } -interface SelectionSnap { - anchor?: { row: number } - focus?: { row: number } - isDragging?: boolean -} - export function useMainApp(gw: GatewayClient) { const { exit } = useApp() const { stdout } = useStdout() @@ -186,46 +181,7 @@ export function useMainApp(gw: GatewayClient) { const virtualHistory = useVirtualHistory(scrollRef, virtualRows, cols) const scrollWithSelection = useCallback( - (delta: number) => { - const s = scrollRef.current - - if (!s) { - return - } - - const sel = selection.getState() as null | SelectionSnap - const top = s.getViewportTop() - const bottom = top + s.getViewportHeight() - 1 - - if ( - !sel?.anchor || - !sel.focus || - sel.anchor.row < top || - sel.anchor.row > bottom || - (!sel.isDragging && (sel.focus.row < top || sel.focus.row > bottom)) - ) { - return s.scrollBy(delta) - } - - const max = Math.max(0, s.getScrollHeight() - s.getViewportHeight()) - const cur = s.getScrollTop() + s.getPendingDelta() - const actual = Math.max(0, Math.min(max, cur + delta)) - cur - - if (actual === 0) { - return - } - - const shift = sel!.isDragging ? selection.shiftAnchor : selection.shiftSelection - - if (actual > 0) { - selection.captureScrolledRows(top, top + actual - 1, 'above') - } else { - selection.captureScrolledRows(bottom + actual + 1, bottom, 'below') - } - - shift(-actual, top, bottom) - s.scrollBy(delta) - }, + (delta: number) => scrollWithSelectionBy(delta, { scrollRef, selection }), [selection] ) @@ -700,14 +656,12 @@ export function useMainApp(gw: GatewayClient) { [turn, showProgressArea] ) - const frozenProgressRef = useRef(liveProgress) - - // Freeze the offscreen live tail so scroll doesn't rebuild unseen streaming UI. - if (liveTailVisible || !ui.busy) { - frozenProgressRef.current = liveProgress - } - - const appProgress = liveTailVisible || !ui.busy ? liveProgress : frozenProgressRef.current + // Always pass current progress through. Freezing this while offscreen looked + // like a nice scroll optimization, but it also froze the live tail's + // thinking/tool state at arbitrary intermediate snapshots. Streaming update + // throttling now handles interaction load; progress state should remain + // truthful so panels don't randomly disappear. + const appProgress = liveProgress const cwd = ui.info?.cwd || process.env.HERMES_CWD || process.cwd() const gitBranch = useGitBranch(cwd) diff --git a/ui-tui/src/app/useSubmission.ts b/ui-tui/src/app/useSubmission.ts index 9bca6581..8e5f15c1 100644 --- a/ui-tui/src/app/useSubmission.ts +++ b/ui-tui/src/app/useSubmission.ts @@ -1,6 +1,5 @@ import { type MutableRefObject, useCallback, useEffect, useRef } from 'react' -import { TYPING_IDLE_MS } from '../config/timing.js' import { attachedImageNotice } from '../domain/messages.js' import { looksLikeSlashCommand } from '../domain/slash.js' import type { GatewayClient } from '../gatewayClient.js' @@ -11,6 +10,7 @@ import { PASTE_SNIPPET_RE } from '../protocol/paste.js' import type { Msg } from '../types.js' import type { ComposerActions, ComposerRefs, ComposerState, PasteSnippet } from './interfaces.js' +import { markTyping } from './interactionMode.js' import { turnController } from './turnController.js' import { getUiState, patchUiState } from './uiStore.js' @@ -48,28 +48,13 @@ export function useSubmission(opts: UseSubmissionOptions) { } = opts const lastEmptyAt = useRef(0) - const typingIdleTimer = useRef | null>(null) useEffect(() => { if (composerState.input || composerState.inputBuf.length) { + markTyping() if (getUiState().busy) { turnController.boostStreamingForTyping() } - - if (typingIdleTimer.current) { - clearTimeout(typingIdleTimer.current) - } - - typingIdleTimer.current = setTimeout(() => { - typingIdleTimer.current = null - turnController.relaxStreaming() - }, TYPING_IDLE_MS) - } - - return () => { - if (typingIdleTimer.current) { - clearTimeout(typingIdleTimer.current) - } } }, [composerState.input, composerState.inputBuf]) diff --git a/ui-tui/src/components/textInput.tsx b/ui-tui/src/components/textInput.tsx index 35f1949b..9b916c46 100644 --- a/ui-tui/src/components/textInput.tsx +++ b/ui-tui/src/components/textInput.tsx @@ -336,7 +336,7 @@ export function TextInput({ active: focus && termFocus && !selected }) - const nativeCursor = focus && termFocus && !selected + const nativeCursor = focus && termFocus && !selected && !!stdout?.isTTY const rendered = useMemo(() => { if (!focus) { diff --git a/ui-tui/src/config/limits.ts b/ui-tui/src/config/limits.ts index aa109039..875b6bac 100644 --- a/ui-tui/src/config/limits.ts +++ b/ui-tui/src/config/limits.ts @@ -2,4 +2,4 @@ export const LARGE_PASTE = { chars: 8000, lines: 80 } export const LONG_MSG = 300 export const MAX_HISTORY = 800 export const THINKING_COT_MAX = 160 -export const WHEEL_SCROLL_STEP = 3 +export const WHEEL_SCROLL_STEP = 6 diff --git a/ui-tui/src/config/timing.ts b/ui-tui/src/config/timing.ts index 8fdf6b5f..083fa17f 100644 --- a/ui-tui/src/config/timing.ts +++ b/ui-tui/src/config/timing.ts @@ -1,5 +1,7 @@ export const STREAM_BATCH_MS = 16 export const STREAM_IDLE_BATCH_MS = 16 -export const STREAM_TYPING_BATCH_MS = 80 -export const TYPING_IDLE_MS = 120 +export const STREAM_SCROLLING_BATCH_MS = 250 +export const STREAM_TYPING_BATCH_MS = 120 +export const TYPING_IDLE_MS = 250 +export const SCROLLING_IDLE_MS = 450 export const REASONING_PULSE_MS = 700 diff --git a/ui-tui/src/hooks/useVirtualHistory.ts b/ui-tui/src/hooks/useVirtualHistory.ts index 388b5e5a..e8565e8c 100644 --- a/ui-tui/src/hooks/useVirtualHistory.ts +++ b/ui-tui/src/hooks/useVirtualHistory.ts @@ -17,6 +17,16 @@ const COLD_START = 40 const QUANTUM = OVERSCAN >> 1 const FREEZE_RENDERS = 2 +export const shouldSetVirtualClamp = ({ + itemCount, + sticky, + viewportHeight +}: { + itemCount: number + sticky: boolean + viewportHeight: number +}) => itemCount > 0 && viewportHeight > 0 && !sticky + const upperBound = (arr: number[], target: number) => { let lo = 0 let hi = arr.length @@ -173,11 +183,16 @@ export function useVirtualHistory( // Give the renderer the mounted-row coverage for passive scroll clamping. // Without this, burst wheel/page scroll can race past the React commit that // updates the virtual range and paint spacer-only frames. - if (s && n > 0 && vp > 0) { + if (s && shouldSetVirtualClamp({ itemCount: n, sticky, viewportHeight: vp })) { const min = offsets[start] ?? 0 const max = Math.max(min, (offsets[end] ?? total) - vp) s.setClampBounds(min, max) } else { + // Sticky bottom often has live, non-virtualized tail content after the + // virtual transcript (streaming answer / thinking / tools). A clamp based + // only on virtual history would cap rendering before that tail and make + // live thinking appear to vanish. No burst-scroll clamp is needed while + // sticky anyway. s?.setClampBounds(undefined, undefined) } From 381121025edf77886eb89203417a248b9978476a Mon Sep 17 00:00:00 2001 From: Brooklyn Nicholson Date: Sun, 26 Apr 2026 04:28:55 -0500 Subject: [PATCH 11/87] fix(tui): address review feedback --- tests/test_tui_gateway_server.py | 4 ++++ tui_gateway/server.py | 5 ++++- ui-tui/src/__tests__/textInputWrap.test.ts | 5 +++-- ui-tui/src/lib/inputMetrics.ts | 4 ++-- 4 files changed, 13 insertions(+), 5 deletions(-) diff --git a/tests/test_tui_gateway_server.py b/tests/test_tui_gateway_server.py index 2639d802..fd9dcc9c 100644 --- a/tests/test_tui_gateway_server.py +++ b/tests/test_tui_gateway_server.py @@ -356,6 +356,9 @@ def test_complete_slash_includes_tui_details_command(): def test_complete_slash_details_args(): + resp_root = server.handle_request( + {"id": "0", "method": "complete.slash", "params": {"text": "/details"}} + ) resp_section = server.handle_request( {"id": "1", "method": "complete.slash", "params": {"text": "/details t"}} ) @@ -367,6 +370,7 @@ def test_complete_slash_details_args(): } ) + assert resp_root["result"]["replace_from"] == len("/details") assert any(item["text"] == "thinking" for item in resp_section["result"]["items"]) assert any(item["text"] == "expanded" for item in resp_mode["result"]["items"]) diff --git a/tui_gateway/server.py b/tui_gateway/server.py index b0b379d0..1239ea71 100644 --- a/tui_gateway/server.py +++ b/tui_gateway/server.py @@ -3822,7 +3822,10 @@ def _(rid, params: dict) -> dict: if details_items is not None: return _ok( rid, - {"items": details_items, "replace_from": text.rfind(" ") + 1}, + { + "items": details_items, + "replace_from": text.rfind(" ") + 1 if " " in text else len(text), + }, ) return _ok( diff --git a/ui-tui/src/__tests__/textInputWrap.test.ts b/ui-tui/src/__tests__/textInputWrap.test.ts index 170f6883..e46af487 100644 --- a/ui-tui/src/__tests__/textInputWrap.test.ts +++ b/ui-tui/src/__tests__/textInputWrap.test.ts @@ -42,9 +42,10 @@ describe('input metrics helpers', () => { expect(inputVisualHeight('one\ntwo', 40)).toBe(2) }) - it('reserves a stable transcript scrollbar gutter for composer width', () => { + it('reserves gutters without exceeding the physical terminal width', () => { expect(stableComposerColumns(100, 3)).toBe(93) - expect(stableComposerColumns(10, 3)).toBe(20) + expect(stableComposerColumns(10, 3)).toBe(3) + expect(stableComposerColumns(6, 3)).toBe(1) }) }) diff --git a/ui-tui/src/lib/inputMetrics.ts b/ui-tui/src/lib/inputMetrics.ts index a42dbb2f..9d8ccd1f 100644 --- a/ui-tui/src/lib/inputMetrics.ts +++ b/ui-tui/src/lib/inputMetrics.ts @@ -54,9 +54,9 @@ export function inputVisualHeight(value: string, columns: number) { } export function stableComposerColumns(totalCols: number, promptWidth: number) { - // totalCols is the terminal width. Reserve: + // Physical render/wrap width. Reserve: // - outer composer paddingX={1}: 2 columns // - transcript scrollbar gutter + marginLeft: 2 columns // - prompt prefix width - return Math.max(20, totalCols - promptWidth - 4) + return Math.max(1, totalCols - promptWidth - 4) } From bbd950efcf203e53d267d2030d7d853e8fee2b86 Mon Sep 17 00:00:00 2001 From: Brooklyn Nicholson Date: Sun, 26 Apr 2026 04:32:55 -0500 Subject: [PATCH 12/87] fix(tui): keep stream cadence responsive while typing --- ui-tui/src/app/scroll.ts | 3 --- ui-tui/src/app/turnController.ts | 7 +------ ui-tui/src/app/useSubmission.ts | 19 +++++++++++++++++-- ui-tui/src/config/timing.ts | 3 +-- 4 files changed, 19 insertions(+), 13 deletions(-) diff --git a/ui-tui/src/app/scroll.ts b/ui-tui/src/app/scroll.ts index 2572e280..0d736d2c 100644 --- a/ui-tui/src/app/scroll.ts +++ b/ui-tui/src/app/scroll.ts @@ -1,7 +1,6 @@ import type { ScrollBoxHandle } from '@hermes/ink' import type { SelectionApi } from './interfaces.js' -import { markScrolling } from './interactionMode.js' export interface SelectionSnap { anchor?: { row: number } | null @@ -30,8 +29,6 @@ export function scrollWithSelectionBy(delta: number, { scrollRef, selection }: S return } - markScrolling() - const sel = selection.getState() as null | SelectionSnap const top = s.getViewportTop() const bottom = top + viewport - 1 diff --git a/ui-tui/src/app/turnController.ts b/ui-tui/src/app/turnController.ts index bc40deba..3240c4e8 100644 --- a/ui-tui/src/app/turnController.ts +++ b/ui-tui/src/app/turnController.ts @@ -2,7 +2,6 @@ import { REASONING_PULSE_MS, STREAM_BATCH_MS, STREAM_IDLE_BATCH_MS, - STREAM_SCROLLING_BATCH_MS, STREAM_TYPING_BATCH_MS } from '../config/timing.js' import type { SessionInterruptResponse, SubagentEventPayload } from '../gatewayTypes.js' @@ -16,7 +15,6 @@ import { } from '../lib/text.js' import type { ActiveTool, ActivityItem, Msg, SubagentProgress } from '../types.js' -import { getInteractionMode } from './interactionMode.js' import { resetFlowOverlays } from './overlayStore.js' import { pushSnapshot } from './spawnHistoryStore.js' import { getTurnState, patchTurnState, resetTurnState } from './turnStore.js' @@ -504,15 +502,12 @@ class TurnController { return } - const interaction = getInteractionMode() - const delay = interaction === 'scrolling' ? STREAM_SCROLLING_BATCH_MS : interaction === 'typing' ? STREAM_TYPING_BATCH_MS : this.streamDelay - this.streamTimer = setTimeout(() => { this.streamTimer = null const raw = this.bufRef.trimStart() const visible = hasReasoningTag(raw) ? splitReasoning(raw).text : raw patchTurnState({ streaming: visible }) - }, delay) + }, this.streamDelay) } startMessage() { diff --git a/ui-tui/src/app/useSubmission.ts b/ui-tui/src/app/useSubmission.ts index 8e5f15c1..9bca6581 100644 --- a/ui-tui/src/app/useSubmission.ts +++ b/ui-tui/src/app/useSubmission.ts @@ -1,5 +1,6 @@ import { type MutableRefObject, useCallback, useEffect, useRef } from 'react' +import { TYPING_IDLE_MS } from '../config/timing.js' import { attachedImageNotice } from '../domain/messages.js' import { looksLikeSlashCommand } from '../domain/slash.js' import type { GatewayClient } from '../gatewayClient.js' @@ -10,7 +11,6 @@ import { PASTE_SNIPPET_RE } from '../protocol/paste.js' import type { Msg } from '../types.js' import type { ComposerActions, ComposerRefs, ComposerState, PasteSnippet } from './interfaces.js' -import { markTyping } from './interactionMode.js' import { turnController } from './turnController.js' import { getUiState, patchUiState } from './uiStore.js' @@ -48,13 +48,28 @@ export function useSubmission(opts: UseSubmissionOptions) { } = opts const lastEmptyAt = useRef(0) + const typingIdleTimer = useRef | null>(null) useEffect(() => { if (composerState.input || composerState.inputBuf.length) { - markTyping() if (getUiState().busy) { turnController.boostStreamingForTyping() } + + if (typingIdleTimer.current) { + clearTimeout(typingIdleTimer.current) + } + + typingIdleTimer.current = setTimeout(() => { + typingIdleTimer.current = null + turnController.relaxStreaming() + }, TYPING_IDLE_MS) + } + + return () => { + if (typingIdleTimer.current) { + clearTimeout(typingIdleTimer.current) + } } }, [composerState.input, composerState.inputBuf]) diff --git a/ui-tui/src/config/timing.ts b/ui-tui/src/config/timing.ts index 083fa17f..d428bacf 100644 --- a/ui-tui/src/config/timing.ts +++ b/ui-tui/src/config/timing.ts @@ -1,7 +1,6 @@ export const STREAM_BATCH_MS = 16 export const STREAM_IDLE_BATCH_MS = 16 -export const STREAM_SCROLLING_BATCH_MS = 250 -export const STREAM_TYPING_BATCH_MS = 120 +export const STREAM_TYPING_BATCH_MS = 80 export const TYPING_IDLE_MS = 250 export const SCROLLING_IDLE_MS = 450 export const REASONING_PULSE_MS = 700 From 8f0fa0836f3f6ceadd2d756ad31254336da75b19 Mon Sep 17 00:00:00 2001 From: Brooklyn Nicholson Date: Sun, 26 Apr 2026 04:35:54 -0500 Subject: [PATCH 13/87] fix(tui): preserve composer width on narrow panes --- ui-tui/src/__tests__/textInputWrap.test.ts | 4 ++-- ui-tui/src/lib/inputMetrics.ts | 10 +++++----- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/ui-tui/src/__tests__/textInputWrap.test.ts b/ui-tui/src/__tests__/textInputWrap.test.ts index e46af487..a05ed42f 100644 --- a/ui-tui/src/__tests__/textInputWrap.test.ts +++ b/ui-tui/src/__tests__/textInputWrap.test.ts @@ -42,9 +42,9 @@ describe('input metrics helpers', () => { expect(inputVisualHeight('one\ntwo', 40)).toBe(2) }) - it('reserves gutters without exceeding the physical terminal width', () => { + it('reserves gutters on wide panes without starving narrow composer width', () => { expect(stableComposerColumns(100, 3)).toBe(93) - expect(stableComposerColumns(10, 3)).toBe(3) + expect(stableComposerColumns(10, 3)).toBe(5) expect(stableComposerColumns(6, 3)).toBe(1) }) }) diff --git a/ui-tui/src/lib/inputMetrics.ts b/ui-tui/src/lib/inputMetrics.ts index 9d8ccd1f..d54f9637 100644 --- a/ui-tui/src/lib/inputMetrics.ts +++ b/ui-tui/src/lib/inputMetrics.ts @@ -54,9 +54,9 @@ export function inputVisualHeight(value: string, columns: number) { } export function stableComposerColumns(totalCols: number, promptWidth: number) { - // Physical render/wrap width. Reserve: - // - outer composer paddingX={1}: 2 columns - // - transcript scrollbar gutter + marginLeft: 2 columns - // - prompt prefix width - return Math.max(1, totalCols - promptWidth - 4) + // Physical render/wrap width. Always reserve outer composer padding and + // prompt prefix. Only reserve the transcript scrollbar gutter when the + // terminal is wide enough; on narrow panes, preserving input columns beats + // keeping gutters visually aligned. + return Math.max(1, totalCols - promptWidth - 2 - (totalCols - promptWidth >= 24 ? 2 : 0)) } From bc1731044260735c2e10c8f3feb3b33c6c0ec8f0 Mon Sep 17 00:00:00 2001 From: Brooklyn Nicholson Date: Sun, 26 Apr 2026 04:39:25 -0500 Subject: [PATCH 14/87] fix(tui): smooth selection drag behavior --- ui-tui/packages/hermes-ink/src/ink/ink.tsx | 115 ++++++++++++++++++++- ui-tui/src/app/useSubmission.ts | 2 + 2 files changed, 116 insertions(+), 1 deletion(-) diff --git a/ui-tui/packages/hermes-ink/src/ink/ink.tsx b/ui-tui/packages/hermes-ink/src/ink/ink.tsx index 7422cf46..e87f97a4 100644 --- a/ui-tui/packages/hermes-ink/src/ink/ink.tsx +++ b/ui-tui/packages/hermes-ink/src/ink/ink.tsx @@ -19,6 +19,7 @@ import App from './components/App.js' import type { CursorDeclaration, CursorDeclarationSetter } from './components/CursorDeclarationContext.js' import { FRAME_INTERVAL_MS } from './constants.js' import * as dom from './dom.js' +import { markDirty } from './dom.js' import { KeyboardEvent } from './events/keyboard-event.js' import { FocusManager } from './focus.js' import { emptyFrame, type Frame, type FrameEvent } from './frame.js' @@ -251,6 +252,10 @@ export default class Ink { // into one follow-up microtask instead of stacking renders. private isRendering = false private immediateRerenderRequested = false + private selectionNotifyQueued = false + private selectionDragCell: { col: number; row: number } | null = null + private selectionAutoScrollTimer: ReturnType | null = null + private selectionAutoScrollDir: -1 | 0 | 1 = 0 constructor(private readonly options: Options) { autoBind(this) @@ -1601,7 +1606,13 @@ export default class Ink { return () => this.selectionListeners.delete(cb) } private notifySelectionChange(): void { - this.scheduleRender() + if (!this.selectionNotifyQueued) { + this.selectionNotifyQueued = true + queueMicrotask(() => { + this.selectionNotifyQueued = false + this.scheduleRender() + }) + } const active = hasSelection(this.selection) @@ -1635,6 +1646,8 @@ export default class Ink { return undefined } + this.stopSelectionAutoScroll() + return dispatchMouse( this.rootNode, col, @@ -1649,6 +1662,7 @@ export default class Ink { return } + this.stopSelectionAutoScroll() dispatchMouse(this.rootNode, col, row, 'onMouseUp', button, isEmptyCellAt(this.frontFrame.screen, col, row), target) } dispatchMouseDrag(target: dom.DOMElement, col: number, row: number, button: number): void { @@ -1774,6 +1788,17 @@ export default class Ink { return } + if (this.selectionDragCell?.col === col && this.selectionDragCell.row === row) { + this.updateSelectionAutoScroll(row) + return + } + + this.selectionDragCell = { col, row } + this.applySelectionDrag(col, row) + this.updateSelectionAutoScroll(row) + } + + private applySelectionDrag(col: number, row: number): void { const sel = this.selection if (sel.anchorSpan) { @@ -1785,6 +1810,94 @@ export default class Ink { this.notifySelectionChange() } + private updateSelectionAutoScroll(row: number): void { + if (!this.selection.isDragging || !this.altScreenActive) { + this.stopSelectionAutoScroll() + return + } + + const dir: -1 | 0 | 1 = row <= 0 ? -1 : row >= this.terminalRows - 1 ? 1 : 0 + + if (dir === 0) { + this.stopSelectionAutoScroll() + return + } + + if (this.selectionAutoScrollDir === dir && this.selectionAutoScrollTimer) { + return + } + + this.stopSelectionAutoScroll() + this.selectionAutoScrollDir = dir + this.selectionAutoScrollTimer = setInterval(() => this.stepSelectionAutoScroll(), 50) + } + + private stepSelectionAutoScroll(): void { + if (!this.selection.isDragging || !this.altScreenActive || this.selectionAutoScrollDir === 0) { + this.stopSelectionAutoScroll() + return + } + + const box = this.findPrimaryScrollBox() + + if (!box) { + this.stopSelectionAutoScroll() + return + } + + const viewport = Math.max(0, box.scrollViewportHeight ?? 0) + const max = Math.max(0, (box.scrollHeight ?? 0) - viewport) + const current = box.scrollTop ?? 0 + const next = Math.max(0, Math.min(max, current + this.selectionAutoScrollDir)) + + if (next === current) { + return + } + + if (this.selectionAutoScrollDir > 0) { + captureScrolledRows(this.selection, this.frontFrame.screen, box.scrollViewportTop ?? 0, box.scrollViewportTop ?? 0, 'above') + } else { + const bottom = (box.scrollViewportTop ?? 0) + viewport - 1 + captureScrolledRows(this.selection, this.frontFrame.screen, bottom, bottom, 'below') + } + + box.stickyScroll = false + box.pendingScrollDelta = undefined + box.scrollAnchor = undefined + box.scrollTop = next + markDirty(box) + shiftAnchor(this.selection, -this.selectionAutoScrollDir, box.scrollViewportTop ?? 0, (box.scrollViewportTop ?? 0) + viewport - 1) + this.applySelectionDrag(this.selectionDragCell?.col ?? 0, this.selectionAutoScrollDir > 0 ? this.terminalRows - 1 : 0) + } + + private stopSelectionAutoScroll(): void { + if (this.selectionAutoScrollTimer) { + clearInterval(this.selectionAutoScrollTimer) + this.selectionAutoScrollTimer = null + } + + this.selectionAutoScrollDir = 0 + this.selectionDragCell = null + } + + private findPrimaryScrollBox(): dom.DOMElement | undefined { + const stack = [this.rootNode] + + while (stack.length) { + const node = stack.shift()! + + if (node.style.overflowY === 'scroll' && node.scrollHeight !== undefined && node.scrollViewportHeight !== undefined) { + return node + } + + for (const child of node.childNodes) { + if (child.nodeName !== '#text') { + stack.push(child) + } + } + } + } + // Methods to properly suspend stdin for external editor usage // This is needed to prevent Ink from swallowing keystrokes when an external editor is active private stdinListeners: Array<{ diff --git a/ui-tui/src/app/useSubmission.ts b/ui-tui/src/app/useSubmission.ts index 9bca6581..42129cb7 100644 --- a/ui-tui/src/app/useSubmission.ts +++ b/ui-tui/src/app/useSubmission.ts @@ -295,6 +295,8 @@ export function useSubmission(opts: UseSubmissionOptions) { if (doubleTap && live.sid && composerRefs.queueRef.current.length) { const next = composerActions.dequeue() + composerActions.syncQueue() + if (next) { composerActions.setQueueEdit(null) dispatchSubmission(next) From 7d68ea9501c5c3a7c98c795bf90d2076c9d0e90b Mon Sep 17 00:00:00 2001 From: Brooklyn Nicholson Date: Sun, 26 Apr 2026 04:42:04 -0500 Subject: [PATCH 15/87] fix(tui): stream legacy thinking deltas visibly --- .../__tests__/createGatewayEventHandler.test.ts | 14 ++++++++++++++ ui-tui/src/app/createGatewayEventHandler.ts | 7 ++++++- 2 files changed, 20 insertions(+), 1 deletion(-) diff --git a/ui-tui/src/__tests__/createGatewayEventHandler.test.ts b/ui-tui/src/__tests__/createGatewayEventHandler.test.ts index 991c87a1..658ca571 100644 --- a/ui-tui/src/__tests__/createGatewayEventHandler.test.ts +++ b/ui-tui/src/__tests__/createGatewayEventHandler.test.ts @@ -123,6 +123,20 @@ describe('createGatewayEventHandler', () => { expect(appended[0]?.toolTokens).toBeGreaterThan(0) }) + it('streams legacy thinking.delta into visible reasoning state', () => { + vi.useFakeTimers() + const appended: Msg[] = [] + const streamed = 'short streamed reasoning' + + createGatewayEventHandler(buildCtx(appended))({ payload: { text: streamed }, type: 'thinking.delta' } as any) + vi.runOnlyPendingTimers() + + expect(getTurnState().reasoning).toBe(streamed) + expect(getTurnState().reasoningActive).toBe(true) + expect(getTurnState().reasoningTokens).toBe(estimateTokensRough(streamed)) + vi.useRealTimers() + }) + it('ignores fallback reasoning.available when streamed reasoning already exists', () => { const appended: Msg[] = [] const streamed = 'short streamed reasoning' diff --git a/ui-tui/src/app/createGatewayEventHandler.ts b/ui-tui/src/app/createGatewayEventHandler.ts index 15cf00a5..94e82c56 100644 --- a/ui-tui/src/app/createGatewayEventHandler.ts +++ b/ui-tui/src/app/createGatewayEventHandler.ts @@ -220,7 +220,12 @@ export function createGatewayEventHandler(ctx: GatewayEventHandlerContext): (ev: const text = ev.payload?.text if (text !== undefined) { - scheduleThinkingStatus(text ? String(text) : statusFromBusy()) + const value = String(text) + scheduleThinkingStatus(value || statusFromBusy()) + + if (value) { + turnController.recordReasoningDelta(value) + } } return From e16e196c7e0530186f3883104681f36ed3dabc3b Mon Sep 17 00:00:00 2001 From: Brooklyn Nicholson Date: Sun, 26 Apr 2026 04:44:19 -0500 Subject: [PATCH 16/87] fix(tui): keep selection drag responsive --- ui-tui/packages/hermes-ink/src/ink/ink.tsx | 37 +++++++++++++--------- 1 file changed, 22 insertions(+), 15 deletions(-) diff --git a/ui-tui/packages/hermes-ink/src/ink/ink.tsx b/ui-tui/packages/hermes-ink/src/ink/ink.tsx index e87f97a4..d05b743a 100644 --- a/ui-tui/packages/hermes-ink/src/ink/ink.tsx +++ b/ui-tui/packages/hermes-ink/src/ink/ink.tsx @@ -65,6 +65,7 @@ import { type SelectionState, selectLineAt, selectWordAt, + selectionBounds, shiftAnchor, shiftSelection, shiftSelectionForFollow, @@ -252,7 +253,6 @@ export default class Ink { // into one follow-up microtask instead of stacking renders. private isRendering = false private immediateRerenderRequested = false - private selectionNotifyQueued = false private selectionDragCell: { col: number; row: number } | null = null private selectionAutoScrollTimer: ReturnType | null = null private selectionAutoScrollDir: -1 | 0 | 1 = 0 @@ -1606,13 +1606,7 @@ export default class Ink { return () => this.selectionListeners.delete(cb) } private notifySelectionChange(): void { - if (!this.selectionNotifyQueued) { - this.selectionNotifyQueued = true - queueMicrotask(() => { - this.selectionNotifyQueued = false - this.scheduleRender() - }) - } + this.scheduleRender() const active = hasSelection(this.selection) @@ -1854,11 +1848,16 @@ export default class Ink { return } - if (this.selectionAutoScrollDir > 0) { - captureScrolledRows(this.selection, this.frontFrame.screen, box.scrollViewportTop ?? 0, box.scrollViewportTop ?? 0, 'above') - } else { - const bottom = (box.scrollViewportTop ?? 0) + viewport - 1 - captureScrolledRows(this.selection, this.frontFrame.screen, bottom, bottom, 'below') + const top = box.scrollViewportTop ?? 0 + const bottom = top + viewport - 1 + const before = selectionBounds(this.selection) + + if (before) { + if (this.selectionAutoScrollDir > 0) { + captureScrolledRows(this.selection, this.frontFrame.screen, top, top, 'above') + } else { + captureScrolledRows(this.selection, this.frontFrame.screen, bottom, bottom, 'below') + } } box.stickyScroll = false @@ -1866,8 +1865,16 @@ export default class Ink { box.scrollAnchor = undefined box.scrollTop = next markDirty(box) - shiftAnchor(this.selection, -this.selectionAutoScrollDir, box.scrollViewportTop ?? 0, (box.scrollViewportTop ?? 0) + viewport - 1) - this.applySelectionDrag(this.selectionDragCell?.col ?? 0, this.selectionAutoScrollDir > 0 ? this.terminalRows - 1 : 0) + shiftAnchor(this.selection, -this.selectionAutoScrollDir, top, bottom) + + if (this.selectionDragCell) { + this.selectionDragCell = { + col: this.selectionDragCell.col, + row: this.selectionAutoScrollDir > 0 ? bottom : top + } + } + + this.applySelectionDrag(this.selectionDragCell?.col ?? 0, this.selectionDragCell?.row ?? (this.selectionAutoScrollDir > 0 ? bottom : top)) } private stopSelectionAutoScroll(): void { From 5ac4088856f18d19e9d3d658b6774b5027bc2ea9 Mon Sep 17 00:00:00 2001 From: Brooklyn Nicholson Date: Sun, 26 Apr 2026 04:46:44 -0500 Subject: [PATCH 17/87] fix(tui): keep live progress visible while scrolling --- ui-tui/src/components/appLayout.tsx | 19 ++++++++++--------- 1 file changed, 10 insertions(+), 9 deletions(-) diff --git a/ui-tui/src/components/appLayout.tsx b/ui-tui/src/components/appLayout.tsx index 170d0649..b9e9fece 100644 --- a/ui-tui/src/components/appLayout.tsx +++ b/ui-tui/src/components/appLayout.tsx @@ -110,6 +110,16 @@ const TranscriptPane = memo(function TranscriptPane({ <> + + {transcript.virtualHistory.topSpacer > 0 ? : null} {transcript.virtualRows.slice(transcript.virtualHistory.start, transcript.virtualHistory.end).map(row => ( @@ -137,15 +147,6 @@ const TranscriptPane = memo(function TranscriptPane({ {transcript.virtualHistory.bottomSpacer > 0 ? : null} - From 7143d22a83a92c3dc3bc32fa49ba3af4af3ecb76 Mon Sep 17 00:00:00 2001 From: Brooklyn Nicholson Date: Sun, 26 Apr 2026 04:49:56 -0500 Subject: [PATCH 18/87] fix(tui): keep queued sends in queue UI --- ui-tui/src/app/useMainApp.ts | 1 + ui-tui/src/app/useSubmission.ts | 18 ++++++++++-------- 2 files changed, 11 insertions(+), 8 deletions(-) diff --git a/ui-tui/src/app/useMainApp.ts b/ui-tui/src/app/useMainApp.ts index 4d6dfc19..d46744a0 100644 --- a/ui-tui/src/app/useMainApp.ts +++ b/ui-tui/src/app/useMainApp.ts @@ -395,6 +395,7 @@ export function useMainApp(gw: GatewayClient) { const next = composerActions.dequeue() if (next) { + patchUiState({ busy: true, status: 'running…' }) sendQueued(next) } }, [ui.sid, ui.busy, composerActions, composerRefs, sendQueued]) diff --git a/ui-tui/src/app/useSubmission.ts b/ui-tui/src/app/useSubmission.ts index 42129cb7..b499bfd8 100644 --- a/ui-tui/src/app/useSubmission.ts +++ b/ui-tui/src/app/useSubmission.ts @@ -74,10 +74,10 @@ export function useSubmission(opts: UseSubmissionOptions) { }, [composerState.input, composerState.inputBuf]) const send = useCallback( - (text: string) => { + (text: string, showUserMessage = true) => { const expand = expandSnips(composerState.pasteSnips) - const startSubmit = (displayText: string, submitText: string) => { + const startSubmit = (displayText: string, submitText: string, showUserMessage = true) => { const sid = getUiState().sid if (!sid) { @@ -87,7 +87,9 @@ export function useSubmission(opts: UseSubmissionOptions) { turnController.clearStatusTimer() maybeGoodVibes(submitText) setLastUserMsg(text) - appendMessage({ role: 'user', text: displayText }) + if (showUserMessage) { + appendMessage({ role: 'user', text: displayText }) + } patchUiState({ busy: true, status: 'running…' }) turnController.bufRef = '' turnController.interrupted = false @@ -114,7 +116,7 @@ export function useSubmission(opts: UseSubmissionOptions) { gw.request('input.detect_drop', { session_id: sid, text }) .then(r => { if (!r?.matched) { - return startSubmit(text, expand(text)) + return startSubmit(text, expand(text), showUserMessage) } if (r.is_image) { @@ -123,9 +125,9 @@ export function useSubmission(opts: UseSubmissionOptions) { turnController.pushActivity(`detected file: ${r.name}`) } - startSubmit(r.text || text, expand(r.text || text)) + startSubmit(r.text || text, expand(r.text || text), showUserMessage) }) - .catch(() => startSubmit(text, expand(text))) + .catch(() => startSubmit(text, expand(text), showUserMessage)) }, [appendMessage, composerActions, composerState.pasteSnips, gw, maybeGoodVibes, setLastUserMsg, sys] ) @@ -192,9 +194,9 @@ export function useSubmission(opts: UseSubmissionOptions) { return interpolate(text, send) } - send(text) + send(text, composerRefs.queueRef.current.length === 0) }, - [interpolate, send, shellExec] + [composerRefs, interpolate, send, shellExec] ) const dispatchSubmission = useCallback( From a0aebad673ff016e7c8e173e60df88b63b12ccc7 Mon Sep 17 00:00:00 2001 From: Brooklyn Nicholson Date: Sun, 26 Apr 2026 04:59:44 -0500 Subject: [PATCH 19/87] fix(tui): anchor details to stream timeline --- .../createGatewayEventHandler.test.ts | 53 +++++++------- ui-tui/src/app/createGatewayEventHandler.ts | 15 ++-- ui-tui/src/app/turnController.ts | 73 ++++++++----------- 3 files changed, 64 insertions(+), 77 deletions(-) diff --git a/ui-tui/src/__tests__/createGatewayEventHandler.test.ts b/ui-tui/src/__tests__/createGatewayEventHandler.test.ts index 658ca571..c3cb5095 100644 --- a/ui-tui/src/__tests__/createGatewayEventHandler.test.ts +++ b/ui-tui/src/__tests__/createGatewayEventHandler.test.ts @@ -82,15 +82,13 @@ describe('createGatewayEventHandler', () => { type: 'message.complete' } as any) - expect(appended).toHaveLength(1) - expect(appended[0]).toMatchObject({ - role: 'assistant', - text: 'final answer', - thinking: 'mapped the page' - }) - expect(appended[0]?.tools).toHaveLength(1) - expect(appended[0]?.tools?.[0]).toContain('hero cards') - expect(appended[0]?.toolTokens).toBeGreaterThan(0) + expect(appended).toHaveLength(3) + expect(appended[0]).toMatchObject({ kind: 'trail', role: 'system', text: '', thinking: 'mapped the page' }) + expect(appended[1]).toMatchObject({ kind: 'trail', role: 'system', text: '' }) + expect(appended[1]?.tools).toHaveLength(1) + expect(appended[1]?.tools?.[0]).toContain('hero cards') + expect(appended[1]?.toolTokens).toBeGreaterThan(0) + expect(appended[2]).toMatchObject({ role: 'assistant', text: 'final answer' }) }) it('keeps tool tokens across handler recreation mid-turn', () => { @@ -118,9 +116,10 @@ describe('createGatewayEventHandler', () => { type: 'message.complete' } as any) - expect(appended).toHaveLength(1) - expect(appended[0]?.tools).toHaveLength(1) - expect(appended[0]?.toolTokens).toBeGreaterThan(0) + expect(appended).toHaveLength(3) + expect(appended[1]?.tools).toHaveLength(1) + expect(appended[1]?.toolTokens).toBeGreaterThan(0) + expect(appended[2]).toMatchObject({ role: 'assistant', text: 'final answer' }) }) it('streams legacy thinking.delta into visible reasoning state', () => { @@ -148,9 +147,10 @@ describe('createGatewayEventHandler', () => { onEvent({ payload: { text: fallback }, type: 'reasoning.available' } as any) onEvent({ payload: { text: 'final answer' }, type: 'message.complete' } as any) - expect(appended).toHaveLength(1) + expect(appended).toHaveLength(2) expect(appended[0]?.thinking).toBe(streamed) expect(appended[0]?.thinkingTokens).toBe(estimateTokensRough(streamed)) + expect(appended[1]).toMatchObject({ role: 'assistant', text: 'final answer' }) }) it('uses message.complete reasoning when no streamed reasoning ref', () => { @@ -161,9 +161,10 @@ describe('createGatewayEventHandler', () => { onEvent({ payload: { reasoning: fromServer, text: 'final answer' }, type: 'message.complete' } as any) - expect(appended).toHaveLength(1) + expect(appended).toHaveLength(2) expect(appended[0]?.thinking).toBe(fromServer) expect(appended[0]?.thinkingTokens).toBe(estimateTokensRough(fromServer)) + expect(appended[1]).toMatchObject({ role: 'assistant', text: 'final answer' }) }) it('anchors inline_diff as its own segment where the edit happened', () => { @@ -184,21 +185,19 @@ describe('createGatewayEventHandler', () => { expect(appended).toHaveLength(0) expect(turnController.segmentMessages).toEqual([ { role: 'assistant', text: 'Editing the file' }, + { kind: 'trail', role: 'system', text: '', tools: ['Patch("foo.ts") ✓'] }, { kind: 'diff', role: 'assistant', text: block } ]) onEvent({ payload: { text: 'patch applied' }, type: 'message.complete' } as any) - // Four transcript messages: pre-tool narration → tool trail → diff - // (kind='diff', so MessageLine gives it blank-line breathing room) → - // post-tool narration. The final message does NOT contain a diff. - expect(appended).toHaveLength(4) + expect(appended).toHaveLength(5) expect(appended[0]?.text).toBe('Editing the file') expect(appended[1]).toMatchObject({ kind: 'trail' }) expect(appended[1]?.tools?.[0]).toContain('Patch') expect(appended[2]).toMatchObject({ kind: 'diff', text: block }) - expect(appended[3]?.text).toBe('patch applied') - expect(appended[3]?.text).not.toContain('```diff') + expect(appended[4]?.text).toBe('patch applied') + expect(appended[4]?.text).not.toContain('```diff') }) it('drops the diff segment when the final assistant text narrates the same diff', () => { @@ -212,9 +211,10 @@ describe('createGatewayEventHandler', () => { // Only the final message — diff-only segment dropped so we don't // render two stacked copies of the same patch. - expect(appended).toHaveLength(1) - expect(appended[0]?.text).toBe(assistantText) - expect((appended[0]?.text.match(/```diff/g) ?? []).length).toBe(1) + expect(appended).toHaveLength(2) + expect(appended[0]).toMatchObject({ kind: 'trail' }) + expect(appended[1]?.text).toBe(assistantText) + expect((appended[1]?.text.match(/```diff/g) ?? []).length).toBe(1) }) it('strips the CLI "┊ review diff" header from inline diff segments', () => { @@ -246,9 +246,10 @@ describe('createGatewayEventHandler', () => { } as any) onEvent({ payload: { text: assistantText }, type: 'message.complete' } as any) - expect(appended).toHaveLength(1) - expect(appended[0]?.text).toBe(assistantText) - expect((appended[0]?.text.match(/```diff/g) ?? []).length).toBe(1) + expect(appended).toHaveLength(2) + expect(appended[0]).toMatchObject({ kind: 'trail' }) + expect(appended[1]?.text).toBe(assistantText) + expect((appended[1]?.text.match(/```diff/g) ?? []).length).toBe(1) }) it('keeps tool trail terse when inline_diff is present', () => { diff --git a/ui-tui/src/app/createGatewayEventHandler.ts b/ui-tui/src/app/createGatewayEventHandler.ts index 94e82c56..502f5387 100644 --- a/ui-tui/src/app/createGatewayEventHandler.ts +++ b/ui-tui/src/app/createGatewayEventHandler.ts @@ -379,6 +379,10 @@ export function createGatewayEventHandler(ctx: GatewayEventHandlerContext): (ev: const inlineDiffText = ev.payload.inline_diff && getUiState().inlineDiffs ? stripAnsi(String(ev.payload.inline_diff)).trim() : '' + if (inlineDiffText) { + turnController.flushStreamingSegment() + } + turnController.recordToolComplete( ev.payload.tool_id, ev.payload.name, @@ -386,17 +390,10 @@ export function createGatewayEventHandler(ctx: GatewayEventHandlerContext): (ev: inlineDiffText ? '' : ev.payload.summary ) - if (!inlineDiffText) { - return + if (inlineDiffText) { + turnController.pushInlineDiffSegment(inlineDiffText) } - // Anchor the diff to where the edit happened in the turn — between - // the narration that preceded the tool call and whatever the agent - // streams afterwards. The previous end-merge put the diff at the - // bottom of the final message even when the edit fired mid-turn, - // which read as "the agent wrote this after saying that". - turnController.pushInlineDiffSegment(inlineDiffText) - return } diff --git a/ui-tui/src/app/turnController.ts b/ui-tui/src/app/turnController.ts index 3240c4e8..1269409d 100644 --- a/ui-tui/src/app/turnController.ts +++ b/ui-tui/src/app/turnController.ts @@ -38,11 +38,7 @@ const diffSegmentBody = (msg: Msg): null | string => { return m ? m[1]! : null } -const insertBeforeFirstDiff = (segments: Msg[], msg: Msg): Msg[] => { - const index = segments.findIndex(segment => segment.kind === 'diff') - - return index < 0 ? [...segments, msg] : [...segments.slice(0, index), msg, ...segments.slice(index)] -} +const hasDetails = (msg: Msg): boolean => Boolean(msg.thinking || msg.tools?.length || msg.toolTokens) export interface InterruptDeps { appendMessage: (msg: Msg) => void @@ -69,6 +65,7 @@ class TurnController { persistSpawnTree?: (subagents: SubagentProgress[], sessionId: null | string) => Promise protocolWarned = false reasoningText = '' + reasoningSegmentOffset = 0 segmentMessages: Msg[] = [] pendingSegmentTools: string[] = [] statusTimer: Timer = null @@ -94,6 +91,7 @@ class TurnController { clearReasoning() { this.reasoningTimer = clear(this.reasoningTimer) this.reasoningText = '' + this.reasoningSegmentOffset = 0 this.toolTokenAcc = 0 patchTurnState({ reasoning: '', reasoningTokens: 0, toolTokens: 0 }) } @@ -181,29 +179,33 @@ class TurnController { flushStreamingSegment() { const raw = this.bufRef.trimStart() - - if (!raw) { - return - } - - const split = hasReasoningTag(raw) ? splitReasoning(raw) : { reasoning: '', text: raw } + const split = raw ? (hasReasoningTag(raw) ? splitReasoning(raw) : { reasoning: '', text: raw }) : { reasoning: '', text: '' } if (split.reasoning && !this.reasoningText.trim()) { this.reasoningText = split.reasoning patchTurnState({ reasoning: this.reasoningText, reasoningTokens: estimateTokensRough(this.reasoningText) }) } - const text = split.text + const thinking = this.reasoningText.slice(this.reasoningSegmentOffset).trim() + const msg: Msg = { + role: split.text ? 'assistant' : 'system', + text: split.text, + ...(!split.text && { kind: 'trail' as const }), + ...(thinking && { + thinking, + thinkingTokens: estimateTokensRough(thinking) + }), + ...(this.pendingSegmentTools.length && { tools: this.pendingSegmentTools }) + } this.streamTimer = clear(this.streamTimer) - if (text) { - const tools = this.pendingSegmentTools - - this.segmentMessages = [...this.segmentMessages, { role: 'assistant', text, ...(tools.length && { tools }) }] - this.pendingSegmentTools = [] + if (split.text || hasDetails(msg)) { + this.segmentMessages = [...this.segmentMessages, msg] } + this.reasoningSegmentOffset = this.reasoningText.length + this.pendingSegmentTools = [] this.bufRef = '' patchTurnState({ streamPendingTools: [], streamSegments: this.segmentMessages, streaming: '' }) } @@ -295,7 +297,6 @@ class TurnController { const finalText = split.text const existingReasoning = this.reasoningText.trim() || String(payload.reasoning ?? '').trim() const savedReasoning = [existingReasoning, existingReasoning ? '' : split.reasoning].filter(Boolean).join('\n\n') - const savedReasoningTokens = savedReasoning ? estimateTokensRough(savedReasoning) : 0 const savedToolTokens = this.toolTokenAcc const tools = this.pendingSegmentTools @@ -312,32 +313,20 @@ class TurnController { return body === null || (!finalHasOwnDiffFence && !finalText.includes(body)) }) - const hasDiffSegment = segments.some(msg => msg.kind === 'diff') - const detailsBelongBeforeDiff = hasDiffSegment && (tools.length > 0 || Boolean(savedReasoning)) - - const finalMessages = detailsBelongBeforeDiff - ? insertBeforeFirstDiff(segments, { - kind: 'trail', - role: 'system', - text: '', - thinking: savedReasoning || undefined, - thinkingTokens: savedReasoning ? savedReasoningTokens : undefined, - toolTokens: savedToolTokens || undefined, - ...(tools.length && { tools }) - }) - : [...segments] + const finalThinking = savedReasoning.slice(this.reasoningSegmentOffset).trim() + const finalDetails: Msg = { + kind: 'trail', + role: 'system', + text: '', + thinking: finalThinking || undefined, + thinkingTokens: finalThinking ? estimateTokensRough(finalThinking) : undefined, + toolTokens: savedToolTokens || undefined, + ...(tools.length && { tools }) + } + const finalMessages = hasDetails(finalDetails) ? [...segments, finalDetails] : [...segments] if (finalText) { - finalMessages.push({ - role: 'assistant', - text: finalText, - ...(!detailsBelongBeforeDiff && { - thinking: savedReasoning || undefined, - thinkingTokens: savedReasoning ? savedReasoningTokens : undefined, - toolTokens: savedToolTokens || undefined, - ...(tools.length && { tools }) - }) - }) + finalMessages.push({ role: 'assistant', text: finalText }) } const wasInterrupted = this.interrupted From 2e6c3c7d23711e8f0bfc0002ae793d0e2b6d65e1 Mon Sep 17 00:00:00 2001 From: Brooklyn Nicholson Date: Sun, 26 Apr 2026 05:06:57 -0500 Subject: [PATCH 20/87] fix(tui): address follow-up review nits --- tui_gateway/server.py | 8 +++++- ui-tui/src/app/slash/commands/session.ts | 4 +-- ui-tui/src/app/useSubmission.ts | 31 ++++++++++++++---------- ui-tui/src/components/appLayout.tsx | 19 +++++++-------- ui-tui/src/components/textInput.tsx | 15 ++++++------ ui-tui/src/hooks/useVirtualHistory.ts | 2 +- 6 files changed, 44 insertions(+), 35 deletions(-) diff --git a/tui_gateway/server.py b/tui_gateway/server.py index 1239ea71..39def65e 100644 --- a/tui_gateway/server.py +++ b/tui_gateway/server.py @@ -3743,7 +3743,13 @@ def _details_completions(text: str) -> list[dict] | None: return [ _details_completion_item( candidate, - "section override" if candidate in sections else "global mode", + ( + "section override" + if candidate in sections + else "cycle global mode" + if candidate == "cycle" + else "global mode" + ), ) for candidate in candidates if candidate.startswith(prefix) and candidate != prefix diff --git a/ui-tui/src/app/slash/commands/session.ts b/ui-tui/src/app/slash/commands/session.ts index 7cb7fcf8..e91dd421 100644 --- a/ui-tui/src/app/slash/commands/session.ts +++ b/ui-tui/src/app/slash/commands/session.ts @@ -21,7 +21,7 @@ const GLOBAL_MODEL_FLAG_RE = /(?:^|\s)--global(?:\s|$)/ const persistedModelArg = (arg: string) => { const trimmed = arg.trim() - return GLOBAL_MODEL_FLAG_RE.test(trimmed) ? trimmed : `${trimmed} --global` + return !trimmed || GLOBAL_MODEL_FLAG_RE.test(trimmed) ? trimmed : `${trimmed} --global` } export const sessionCommands: SlashCommand[] = [ @@ -73,7 +73,7 @@ export const sessionCommands: SlashCommand[] = [ return } - if (!arg) { + if (!arg.trim()) { return patchOverlayState({ modelPicker: true }) } diff --git a/ui-tui/src/app/useSubmission.ts b/ui-tui/src/app/useSubmission.ts index b499bfd8..70a3faf3 100644 --- a/ui-tui/src/app/useSubmission.ts +++ b/ui-tui/src/app/useSubmission.ts @@ -51,24 +51,29 @@ export function useSubmission(opts: UseSubmissionOptions) { const typingIdleTimer = useRef | null>(null) useEffect(() => { - if (composerState.input || composerState.inputBuf.length) { - if (getUiState().busy) { - turnController.boostStreamingForTyping() - } - - if (typingIdleTimer.current) { - clearTimeout(typingIdleTimer.current) - } - - typingIdleTimer.current = setTimeout(() => { - typingIdleTimer.current = null - turnController.relaxStreaming() - }, TYPING_IDLE_MS) + if (typingIdleTimer.current) { + clearTimeout(typingIdleTimer.current) + typingIdleTimer.current = null } + if (!composerState.input && !composerState.inputBuf.length) { + turnController.relaxStreaming() + return + } + + if (getUiState().busy) { + turnController.boostStreamingForTyping() + } + + typingIdleTimer.current = setTimeout(() => { + typingIdleTimer.current = null + turnController.relaxStreaming() + }, TYPING_IDLE_MS) + return () => { if (typingIdleTimer.current) { clearTimeout(typingIdleTimer.current) + typingIdleTimer.current = null } } }, [composerState.input, composerState.inputBuf]) diff --git a/ui-tui/src/components/appLayout.tsx b/ui-tui/src/components/appLayout.tsx index b9e9fece..170d0649 100644 --- a/ui-tui/src/components/appLayout.tsx +++ b/ui-tui/src/components/appLayout.tsx @@ -110,16 +110,6 @@ const TranscriptPane = memo(function TranscriptPane({ <> - - {transcript.virtualHistory.topSpacer > 0 ? : null} {transcript.virtualRows.slice(transcript.virtualHistory.start, transcript.virtualHistory.end).map(row => ( @@ -147,6 +137,15 @@ const TranscriptPane = memo(function TranscriptPane({ {transcript.virtualHistory.bottomSpacer > 0 ? : null} + diff --git a/ui-tui/src/components/textInput.tsx b/ui-tui/src/components/textInput.tsx index 9b916c46..9f8b2994 100644 --- a/ui-tui/src/components/textInput.tsx +++ b/ui-tui/src/components/textInput.tsx @@ -431,13 +431,11 @@ export function TextInput({ parentChangeTimer.current = setTimeout(flushParentChange, 16) } - const flushLocalRender = () => { + const cancelLocalRender = () => { if (localRenderTimer.current) { clearTimeout(localRenderTimer.current) localRenderTimer.current = null } - - setCur(curRef.current) } const scheduleLocalRender = () => { @@ -445,7 +443,10 @@ export function TextInput({ return } - localRenderTimer.current = setTimeout(flushLocalRender, 16) + localRenderTimer.current = setTimeout(() => { + localRenderTimer.current = null + setCur(curRef.current) + }, 16) } const canFastEchoBase = () => focus && termFocus && !selected && !mask && !!stdout?.isTTY @@ -468,9 +469,7 @@ export function TextInput({ return false } - const prev = current[cursor - 1] - - return !!prev && stringWidth(prev) === 1 + return stringWidth(current.slice(prevPos(current, cursor), cursor)) === 1 } const commit = (next: string, nextCur: number, track = true, syncParent = true, syncLocal = true) => { @@ -494,7 +493,7 @@ export function TextInput({ } if (syncLocal) { - flushLocalRender() + cancelLocalRender() setCur(c) } else { scheduleLocalRender() diff --git a/ui-tui/src/hooks/useVirtualHistory.ts b/ui-tui/src/hooks/useVirtualHistory.ts index e8565e8c..17c93a75 100644 --- a/ui-tui/src/hooks/useVirtualHistory.ts +++ b/ui-tui/src/hooks/useVirtualHistory.ts @@ -235,7 +235,7 @@ export function useVirtualHistory( if (dirty) { setVer(v => v + 1) } - }, [end, hasScrollRef, items, n, offsets, scrollRef, start, total, vp]) + }, [end, hasScrollRef, items, n, offsets, scrollRef, start, sticky, total, vp]) return { bottomSpacer: Math.max(0, total - (offsets[end] ?? total)), From 05dc2eec364529469efec137157ce3f9312b8634 Mon Sep 17 00:00:00 2001 From: Brooklyn Nicholson Date: Sun, 26 Apr 2026 05:13:21 -0500 Subject: [PATCH 21/87] fix(tui): tighten timeline detail spacing --- ui-tui/src/components/messageLine.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ui-tui/src/components/messageLine.tsx b/ui-tui/src/components/messageLine.tsx index fc6f78e9..9807b1bb 100644 --- a/ui-tui/src/components/messageLine.tsx +++ b/ui-tui/src/components/messageLine.tsx @@ -35,7 +35,7 @@ export const MessageLine = memo(function MessageLine({ if (msg.kind === 'trail' && (msg.tools?.length || thinking)) { return thinkingMode !== 'hidden' || toolsMode !== 'hidden' || activityMode !== 'hidden' ? ( - + Date: Sun, 26 Apr 2026 05:17:26 -0500 Subject: [PATCH 22/87] fix(tui): attach inline diffs to tool timeline --- .../createGatewayEventHandler.test.ts | 48 ++++++++----------- ui-tui/src/app/createGatewayEventHandler.ts | 20 ++++---- ui-tui/src/app/turnController.ts | 23 +++++++-- 3 files changed, 49 insertions(+), 42 deletions(-) diff --git a/ui-tui/src/__tests__/createGatewayEventHandler.test.ts b/ui-tui/src/__tests__/createGatewayEventHandler.test.ts index c3cb5095..7e0cddfe 100644 --- a/ui-tui/src/__tests__/createGatewayEventHandler.test.ts +++ b/ui-tui/src/__tests__/createGatewayEventHandler.test.ts @@ -185,19 +185,17 @@ describe('createGatewayEventHandler', () => { expect(appended).toHaveLength(0) expect(turnController.segmentMessages).toEqual([ { role: 'assistant', text: 'Editing the file' }, - { kind: 'trail', role: 'system', text: '', tools: ['Patch("foo.ts") ✓'] }, - { kind: 'diff', role: 'assistant', text: block } + { kind: 'diff', role: 'assistant', text: block, tools: ['Patch("foo.ts") ✓'] } ]) onEvent({ payload: { text: 'patch applied' }, type: 'message.complete' } as any) - expect(appended).toHaveLength(5) + expect(appended).toHaveLength(4) expect(appended[0]?.text).toBe('Editing the file') - expect(appended[1]).toMatchObject({ kind: 'trail' }) + expect(appended[1]).toMatchObject({ kind: 'diff', text: block }) expect(appended[1]?.tools?.[0]).toContain('Patch') - expect(appended[2]).toMatchObject({ kind: 'diff', text: block }) - expect(appended[4]?.text).toBe('patch applied') - expect(appended[4]?.text).not.toContain('```diff') + expect(appended[3]?.text).toBe('patch applied') + expect(appended[3]?.text).not.toContain('```diff') }) it('drops the diff segment when the final assistant text narrates the same diff', () => { @@ -211,10 +209,9 @@ describe('createGatewayEventHandler', () => { // Only the final message — diff-only segment dropped so we don't // render two stacked copies of the same patch. - expect(appended).toHaveLength(2) - expect(appended[0]).toMatchObject({ kind: 'trail' }) - expect(appended[1]?.text).toBe(assistantText) - expect((appended[1]?.text.match(/```diff/g) ?? []).length).toBe(1) + expect(appended).toHaveLength(1) + expect(appended[0]?.text).toBe(assistantText) + expect((appended[0]?.text.match(/```diff/g) ?? []).length).toBe(1) }) it('strips the CLI "┊ review diff" header from inline diff segments', () => { @@ -226,12 +223,12 @@ describe('createGatewayEventHandler', () => { onEvent({ payload: { text: 'done' }, type: 'message.complete' } as any) // Tool trail first, then diff segment (kind='diff'), then final narration. - expect(appended).toHaveLength(3) - expect(appended[0]?.kind).toBe('trail') - expect(appended[1]?.kind).toBe('diff') - expect(appended[1]?.text).not.toContain('┊ review diff') - expect(appended[1]?.text).toContain('--- a/foo.ts') - expect(appended[2]?.text).toBe('done') + expect(appended).toHaveLength(2) + expect(appended[0]?.kind).toBe('diff') + expect(appended[0]?.text).not.toContain('┊ review diff') + expect(appended[0]?.text).toContain('--- a/foo.ts') + expect(appended[0]?.tools?.[0]).toContain('Tool') + expect(appended[1]?.text).toBe('done') }) it('drops the diff segment when assistant writes its own ```diff fence', () => { @@ -246,10 +243,9 @@ describe('createGatewayEventHandler', () => { } as any) onEvent({ payload: { text: assistantText }, type: 'message.complete' } as any) - expect(appended).toHaveLength(2) - expect(appended[0]).toMatchObject({ kind: 'trail' }) - expect(appended[1]?.text).toBe(assistantText) - expect((appended[1]?.text.match(/```diff/g) ?? []).length).toBe(1) + expect(appended).toHaveLength(1) + expect(appended[0]?.text).toBe(assistantText) + expect((appended[0]?.text.match(/```diff/g) ?? []).length).toBe(1) }) it('keeps tool trail terse when inline_diff is present', () => { @@ -265,15 +261,13 @@ describe('createGatewayEventHandler', () => { // Tool row is now placed before the diff, so telemetry does not render // below the patch that came from that tool. - expect(appended).toHaveLength(3) - expect(appended[0]?.kind).toBe('trail') + expect(appended).toHaveLength(2) + expect(appended[0]?.kind).toBe('diff') + expect(appended[0]?.text).toContain('```diff') expect(appended[0]?.tools?.[0]).toContain('Review Diff') expect(appended[0]?.tools?.[0]).not.toContain('--- a/foo.ts') - expect(appended[1]?.kind).toBe('diff') - expect(appended[1]?.text).toContain('```diff') + expect(appended[1]?.text).toBe('done') expect(appended[1]?.tools ?? []).toEqual([]) - expect(appended[2]?.text).toBe('done') - expect(appended[2]?.tools ?? []).toEqual([]) }) it('shows setup panel for missing provider startup error', () => { diff --git a/ui-tui/src/app/createGatewayEventHandler.ts b/ui-tui/src/app/createGatewayEventHandler.ts index 502f5387..4e51c032 100644 --- a/ui-tui/src/app/createGatewayEventHandler.ts +++ b/ui-tui/src/app/createGatewayEventHandler.ts @@ -380,18 +380,14 @@ export function createGatewayEventHandler(ctx: GatewayEventHandlerContext): (ev: ev.payload.inline_diff && getUiState().inlineDiffs ? stripAnsi(String(ev.payload.inline_diff)).trim() : '' if (inlineDiffText) { - turnController.flushStreamingSegment() - } - - turnController.recordToolComplete( - ev.payload.tool_id, - ev.payload.name, - ev.payload.error, - inlineDiffText ? '' : ev.payload.summary - ) - - if (inlineDiffText) { - turnController.pushInlineDiffSegment(inlineDiffText) + turnController.recordInlineDiffToolComplete( + inlineDiffText, + ev.payload.tool_id, + ev.payload.name, + ev.payload.error + ) + } else { + turnController.recordToolComplete(ev.payload.tool_id, ev.payload.name, ev.payload.error, ev.payload.summary) } return diff --git a/ui-tui/src/app/turnController.ts b/ui-tui/src/app/turnController.ts index 1269409d..0dadbfbc 100644 --- a/ui-tui/src/app/turnController.ts +++ b/ui-tui/src/app/turnController.ts @@ -220,7 +220,7 @@ class TurnController { }, REASONING_PULSE_MS) } - pushInlineDiffSegment(diffText: string) { + pushInlineDiffSegment(diffText: string, tools: string[] = []) { // Strip CLI chrome the gateway emits before the unified diff (e.g. a // leading "┊ review diff" header written by `_emit_inline_diff` for the // terminal printer). That header only makes sense as stdout dressing, @@ -247,7 +247,7 @@ class TurnController { return } - this.segmentMessages = [...this.segmentMessages, { kind: 'diff', role: 'assistant', text: block }] + this.segmentMessages = [...this.segmentMessages, { kind: 'diff', role: 'assistant', text: block, ...(tools.length && { tools }) }] patchTurnState({ streamSegments: this.segmentMessages }) } @@ -397,13 +397,25 @@ class TurnController { } recordToolComplete(toolId: string, fallbackName?: string, error?: string, summary?: string) { + const line = this.completeTool(toolId, fallbackName, error, summary) + + this.pendingSegmentTools = [...this.pendingSegmentTools, line] + this.publishToolState() + } + + recordInlineDiffToolComplete(diffText: string, toolId: string, fallbackName?: string, error?: string) { + this.flushStreamingSegment() + this.pushInlineDiffSegment(diffText, [this.completeTool(toolId, fallbackName, error, '')]) + this.publishToolState() + } + + private completeTool(toolId: string, fallbackName?: string, error?: string, summary?: string) { const done = this.activeTools.find(tool => tool.id === toolId) const name = done?.name ?? fallbackName ?? 'tool' const label = toolTrailLabel(name) const line = buildToolTrailLine(name, done?.context || '', Boolean(error), error || summary || '') this.activeTools = this.activeTools.filter(tool => tool.id !== toolId) - this.pendingSegmentTools = [...this.pendingSegmentTools, line] const next = this.turnTools.filter(item => !sameToolTrailGroup(label, item)) @@ -412,6 +424,11 @@ class TurnController { } this.turnTools = next.slice(-TRAIL_LIMIT) + + return line + } + + private publishToolState() { patchTurnState({ streamPendingTools: this.pendingSegmentTools, tools: this.activeTools, From 6814646b364aa37c27734faf37339770d1889123 Mon Sep 17 00:00:00 2001 From: Brooklyn Nicholson Date: Sun, 26 Apr 2026 10:58:18 -0500 Subject: [PATCH 23/87] fix(tui): avoid duplicating flushed stream text --- .../createGatewayEventHandler.test.ts | 16 ++++++++++++++++ ui-tui/src/app/turnController.ts | 18 +++++++++++++++++- 2 files changed, 33 insertions(+), 1 deletion(-) diff --git a/ui-tui/src/__tests__/createGatewayEventHandler.test.ts b/ui-tui/src/__tests__/createGatewayEventHandler.test.ts index 7e0cddfe..4f7ccdb7 100644 --- a/ui-tui/src/__tests__/createGatewayEventHandler.test.ts +++ b/ui-tui/src/__tests__/createGatewayEventHandler.test.ts @@ -198,6 +198,22 @@ describe('createGatewayEventHandler', () => { expect(appended[3]?.text).not.toContain('```diff') }) + it('keeps full final responses from duplicating flushed pre-diff narration', () => { + const appended: Msg[] = [] + const onEvent = createGatewayEventHandler(buildCtx(appended)) + const diff = '--- a/foo.ts\n+++ b/foo.ts\n@@\n-old\n+new' + const block = `\`\`\`diff\n${diff}\n\`\`\`` + + onEvent({ payload: { text: 'Before edit. ' }, type: 'message.delta' } as any) + onEvent({ payload: { context: 'foo.ts', name: 'patch', tool_id: 'tool-1' }, type: 'tool.start' } as any) + onEvent({ payload: { inline_diff: diff, summary: 'patched', tool_id: 'tool-1' }, type: 'tool.complete' } as any) + onEvent({ payload: { text: 'After edit.' }, type: 'message.delta' } as any) + onEvent({ payload: { text: 'Before edit. After edit.' }, type: 'message.complete' } as any) + + expect(appended.map(msg => msg.text.trim()).filter(Boolean)).toEqual(['Before edit.', block, 'After edit.']) + expect(appended[1]?.tools?.[0]).toContain('Patch') + }) + it('drops the diff segment when the final assistant text narrates the same diff', () => { const appended: Msg[] = [] const onEvent = createGatewayEventHandler(buildCtx(appended)) diff --git a/ui-tui/src/app/turnController.ts b/ui-tui/src/app/turnController.ts index 0dadbfbc..540d3793 100644 --- a/ui-tui/src/app/turnController.ts +++ b/ui-tui/src/app/turnController.ts @@ -40,6 +40,22 @@ const diffSegmentBody = (msg: Msg): null | string => { const hasDetails = (msg: Msg): boolean => Boolean(msg.thinking || msg.tools?.length || msg.toolTokens) +const textSegments = (segments: Msg[]) => segments.filter(msg => msg.role === 'assistant' && msg.kind !== 'diff').map(msg => msg.text) + +const finalTail = (finalText: string, segments: Msg[]) => { + let tail = finalText + + for (const text of textSegments(segments)) { + const trimmed = text.trim() + + if (trimmed && tail.startsWith(trimmed)) { + tail = tail.slice(trimmed.length).trimStart() + } + } + + return tail +} + export interface InterruptDeps { appendMessage: (msg: Msg) => void gw: { request: (method: string, params?: Record) => Promise } @@ -294,7 +310,7 @@ class TurnController { recordMessageComplete(payload: { rendered?: string; reasoning?: string; text?: string }) { const rawText = (payload.rendered ?? payload.text ?? this.bufRef).trimStart() const split = splitReasoning(rawText) - const finalText = split.text + const finalText = finalTail(split.text, this.segmentMessages) const existingReasoning = this.reasoningText.trim() || String(payload.reasoning ?? '').trim() const savedReasoning = [existingReasoning, existingReasoning ? '' : split.reasoning].filter(Boolean).join('\n\n') const savedToolTokens = this.toolTokenAcc From a8fcd1c742f459a6d8ed506786f48e406d481b1e Mon Sep 17 00:00:00 2001 From: Brooklyn Nicholson Date: Sun, 26 Apr 2026 13:30:08 -0500 Subject: [PATCH 24/87] fix(tui): apply details mode live --- ui-tui/packages/hermes-ink/src/ink/ink.tsx | 2 +- .../src/__tests__/createSlashHandler.test.ts | 1 + ui-tui/src/__tests__/details.test.ts | 10 ++++++++-- ui-tui/src/app/interfaces.ts | 1 + ui-tui/src/app/slash/commands/core.ts | 4 ++-- ui-tui/src/app/uiStore.ts | 1 + ui-tui/src/app/useConfigSync.ts | 1 + ui-tui/src/app/useMainApp.ts | 6 ++++-- ui-tui/src/components/appLayout.tsx | 8 ++++++++ ui-tui/src/components/messageLine.tsx | 10 +++++++--- ui-tui/src/components/thinking.tsx | 12 +++++++----- ui-tui/src/domain/details.ts | 19 +++++++++++++++---- 12 files changed, 56 insertions(+), 19 deletions(-) diff --git a/ui-tui/packages/hermes-ink/src/ink/ink.tsx b/ui-tui/packages/hermes-ink/src/ink/ink.tsx index d05b743a..ff6570f8 100644 --- a/ui-tui/packages/hermes-ink/src/ink/ink.tsx +++ b/ui-tui/packages/hermes-ink/src/ink/ink.tsx @@ -62,10 +62,10 @@ import { getSelectedText, hasSelection, moveFocus, + selectionBounds, type SelectionState, selectLineAt, selectWordAt, - selectionBounds, shiftAnchor, shiftSelection, shiftSelectionForFollow, diff --git a/ui-tui/src/__tests__/createSlashHandler.test.ts b/ui-tui/src/__tests__/createSlashHandler.test.ts index 32c92c00..a51c89c5 100644 --- a/ui-tui/src/__tests__/createSlashHandler.test.ts +++ b/ui-tui/src/__tests__/createSlashHandler.test.ts @@ -119,6 +119,7 @@ describe('createSlashHandler', () => { expect(getUiState().detailsMode).toBe('collapsed') expect(createSlashHandler(ctx)('/details toggle')).toBe(true) expect(getUiState().detailsMode).toBe('expanded') + expect(getUiState().detailsModeCommandOverride).toBe(true) expect(ctx.gateway.rpc).toHaveBeenCalledWith('config.set', { key: 'details_mode', value: 'expanded' diff --git a/ui-tui/src/__tests__/details.test.ts b/ui-tui/src/__tests__/details.test.ts index 0f567b2f..04a1fca9 100644 --- a/ui-tui/src/__tests__/details.test.ts +++ b/ui-tui/src/__tests__/details.test.ts @@ -78,19 +78,25 @@ describe('sectionMode', () => { expect(sectionMode('subagents', 'hidden', {})).toBe('hidden') }) - it('streams thinking + tools expanded by default regardless of global mode', () => { + it('streams thinking + tools expanded by default for persisted config values', () => { expect(sectionMode('thinking', 'collapsed', {})).toBe('expanded') expect(sectionMode('thinking', 'hidden', undefined)).toBe('expanded') expect(sectionMode('tools', 'collapsed', {})).toBe('expanded') expect(sectionMode('tools', 'hidden', undefined)).toBe('expanded') }) - it('hides the activity panel by default regardless of global mode', () => { + it('hides the activity panel by default for persisted config values', () => { expect(sectionMode('activity', 'collapsed', {})).toBe('hidden') expect(sectionMode('activity', 'expanded', undefined)).toBe('hidden') expect(sectionMode('activity', 'hidden', {})).toBe('hidden') }) + it('applies in-session /details mode globally over built-in defaults', () => { + expect(sectionMode('thinking', 'collapsed', {}, true)).toBe('collapsed') + expect(sectionMode('tools', 'hidden', {}, true)).toBe('hidden') + expect(sectionMode('activity', 'expanded', undefined, true)).toBe('expanded') + }) + it('honours per-section overrides over both the section default and global mode', () => { expect(sectionMode('thinking', 'collapsed', { thinking: 'collapsed' })).toBe('collapsed') expect(sectionMode('tools', 'collapsed', { tools: 'hidden' })).toBe('hidden') diff --git a/ui-tui/src/app/interfaces.ts b/ui-tui/src/app/interfaces.ts index 032eee87..34919aca 100644 --- a/ui-tui/src/app/interfaces.ts +++ b/ui-tui/src/app/interfaces.ts @@ -90,6 +90,7 @@ export interface UiState { busy: boolean compact: boolean detailsMode: DetailsMode + detailsModeCommandOverride: boolean info: null | SessionInfo inlineDiffs: boolean mouseTracking: boolean diff --git a/ui-tui/src/app/slash/commands/core.ts b/ui-tui/src/app/slash/commands/core.ts index 6d927fed..70804a1f 100644 --- a/ui-tui/src/app/slash/commands/core.ts +++ b/ui-tui/src/app/slash/commands/core.ts @@ -184,7 +184,7 @@ export const coreCommands: SlashCommand[] = [ } const mode = parseDetailsMode(r?.value) ?? ui.detailsMode - patchUiState({ detailsMode: mode }) + patchUiState({ detailsMode: mode, detailsModeCommandOverride: false }) const overrides = SECTION_NAMES.filter(s => ui.sections[s]) .map(s => `${s}=${ui.sections[s]}`) @@ -224,7 +224,7 @@ export const coreCommands: SlashCommand[] = [ return transcript.sys(DETAILS_USAGE) } - patchUiState({ detailsMode: next }) + patchUiState({ detailsMode: next, detailsModeCommandOverride: true }) gateway.rpc('config.set', { key: 'details_mode', value: next }).catch(() => {}) transcript.sys(`details: ${next}`) } diff --git a/ui-tui/src/app/uiStore.ts b/ui-tui/src/app/uiStore.ts index fc17a694..1b3a841e 100644 --- a/ui-tui/src/app/uiStore.ts +++ b/ui-tui/src/app/uiStore.ts @@ -11,6 +11,7 @@ const buildUiState = (): UiState => ({ busy: false, compact: false, detailsMode: 'collapsed', + detailsModeCommandOverride: false, info: null, inlineDiffs: true, mouseTracking: MOUSE_TRACKING, diff --git a/ui-tui/src/app/useConfigSync.ts b/ui-tui/src/app/useConfigSync.ts index 3ceb8c63..26d02d62 100644 --- a/ui-tui/src/app/useConfigSync.ts +++ b/ui-tui/src/app/useConfigSync.ts @@ -45,6 +45,7 @@ export const applyDisplay = (cfg: ConfigFullResponse | null, setBell: (v: boolea patchUiState({ compact: !!d.tui_compact, detailsMode: resolveDetailsMode(d), + detailsModeCommandOverride: false, inlineDiffs: d.inline_diffs !== false, mouseTracking: d.tui_mouse !== false, sections: resolveSections(d.sections), diff --git a/ui-tui/src/app/useMainApp.ts b/ui-tui/src/app/useMainApp.ts index d46744a0..6e07f8f8 100644 --- a/ui-tui/src/app/useMainApp.ts +++ b/ui-tui/src/app/useMainApp.ts @@ -26,6 +26,7 @@ import { createGatewayEventHandler } from './createGatewayEventHandler.js' import { createSlashHandler } from './createSlashHandler.js' import { type GatewayRpc, type TranscriptRow } from './interfaces.js' import { $overlayState, patchOverlayState } from './overlayStore.js' +import { scrollWithSelectionBy } from './scroll.js' import { turnController } from './turnController.js' import { $turnState, patchTurnState } from './turnStore.js' import { $uiState, getUiState, patchUiState } from './uiStore.js' @@ -33,7 +34,6 @@ import { useComposerState } from './useComposerState.js' import { useConfigSync } from './useConfigSync.js' import { useInputHandlers } from './useInputHandlers.js' import { useLongRunToolCharms } from './useLongRunToolCharms.js' -import { scrollWithSelectionBy } from './scroll.js' import { useSessionLifecycle } from './useSessionLifecycle.js' import { useSubmission } from './useSubmission.js' @@ -593,7 +593,9 @@ export function useMainApp(gw: GatewayClient) { // resolved to hidden, the only thing ToolTrail will surface is the // floating-alert backstop (errors/warnings). Mirror that so we don't // render an empty wrapper Box above the streaming area in quiet mode. - const anyPanelVisible = SECTION_NAMES.some(s => sectionMode(s, ui.detailsMode, ui.sections) !== 'hidden') + const anyPanelVisible = SECTION_NAMES.some( + s => sectionMode(s, ui.detailsMode, ui.sections, ui.detailsModeCommandOverride) !== 'hidden' + ) const showProgressArea = anyPanelVisible ? Boolean( diff --git a/ui-tui/src/components/appLayout.tsx b/ui-tui/src/components/appLayout.tsx index 170d0649..b302fed6 100644 --- a/ui-tui/src/components/appLayout.tsx +++ b/ui-tui/src/components/appLayout.tsx @@ -25,6 +25,7 @@ const StreamingAssistant = memo(function StreamingAssistant({ cols, compact, detailsMode, + detailsModeCommandOverride, progress, sections, t @@ -40,6 +41,7 @@ const StreamingAssistant = memo(function StreamingAssistant({ cols={cols} compact={compact} detailsMode={detailsMode} + detailsModeCommandOverride={detailsModeCommandOverride} key={`seg:${i}`} msg={msg} sections={sections} @@ -52,6 +54,7 @@ const StreamingAssistant = memo(function StreamingAssistant({ From 015f6c825df2e877af5d9811b1ff016d188ab551 Mon Sep 17 00:00:00 2001 From: Brooklyn Nicholson Date: Sun, 26 Apr 2026 13:52:54 -0500 Subject: [PATCH 27/87] fix(tui): support modified enter for multiline input --- .../src/ink/events/cmd-shortcuts.test.ts | 20 +++++++++++++++++- .../hermes-ink/src/ink/events/input-event.ts | 21 +++++++++++++------ .../packages/hermes-ink/src/ink/terminal.ts | 2 +- ui-tui/src/components/textInput.tsx | 2 +- 4 files changed, 36 insertions(+), 9 deletions(-) diff --git a/ui-tui/packages/hermes-ink/src/ink/events/cmd-shortcuts.test.ts b/ui-tui/packages/hermes-ink/src/ink/events/cmd-shortcuts.test.ts index 1abd7bbe..250b262e 100644 --- a/ui-tui/packages/hermes-ink/src/ink/events/cmd-shortcuts.test.ts +++ b/ui-tui/packages/hermes-ink/src/ink/events/cmd-shortcuts.test.ts @@ -11,7 +11,25 @@ function parseOne(sequence: string) { return keys[0]! } -describe('InputEvent macOS command modifiers', () => { +describe('enhanced keyboard modifier parsing', () => { + it('detects modified Enter sequences for multiline composer shortcuts', () => { + const shiftEnter = new InputEvent(parseOne('\u001b[13;2u')) + const ctrlEnter = new InputEvent(parseOne('\u001b[13;5u')) + const modifyOtherShiftEnter = new InputEvent(parseOne('\u001b[27;2;13~')) + + expect(shiftEnter.key.return).toBe(true) + expect(shiftEnter.key.shift).toBe(true) + expect(shiftEnter.input).toBe('') + + expect(ctrlEnter.key.return).toBe(true) + expect(ctrlEnter.key.ctrl).toBe(true) + expect(ctrlEnter.input).toBe('') + + expect(modifyOtherShiftEnter.key.return).toBe(true) + expect(modifyOtherShiftEnter.key.shift).toBe(true) + expect(modifyOtherShiftEnter.input).toBe('') + }) + it('preserves Cmd as super for kitty keyboard CSI-u sequences', () => { const parsed = parseOne('\u001b[99;9u') const event = new InputEvent(parsed) diff --git a/ui-tui/packages/hermes-ink/src/ink/events/input-event.ts b/ui-tui/packages/hermes-ink/src/ink/events/input-event.ts index 293ecdbe..a3cd3fab 100644 --- a/ui-tui/packages/hermes-ink/src/ink/events/input-event.ts +++ b/ui-tui/packages/hermes-ink/src/ink/events/input-event.ts @@ -116,11 +116,15 @@ function parseKey(keypress: ParsedKey): [Key, string] { // so the raw "[57358u" doesn't leak into the prompt. See #38781. input = '' } else { - // 'space' → ' '; 'escape' → '' (key.escape carries it; - // processedAsSpecialSequence bypasses the nonAlphanumericKeys - // clear below, so we must handle it explicitly here); - // otherwise use key name. - input = keypress.name === 'space' ? ' ' : keypress.name === 'escape' ? '' : keypress.name + // 'space' → ' '; functional keys like Enter/Escape carry their state + // through key.return/key.escape, and processedAsSpecialSequence bypasses + // the nonAlphanumericKeys clear below, so clear them explicitly here. + input = + keypress.name === 'space' + ? ' ' + : keypress.name === 'return' || keypress.name === 'escape' + ? '' + : keypress.name } processedAsSpecialSequence = true @@ -138,7 +142,12 @@ function parseKey(keypress: ParsedKey): [Key, string] { // guards against future terminal behavior. input = '' } else { - input = keypress.name === 'space' ? ' ' : keypress.name === 'escape' ? '' : keypress.name + input = + keypress.name === 'space' + ? ' ' + : keypress.name === 'return' || keypress.name === 'escape' + ? '' + : keypress.name } processedAsSpecialSequence = true diff --git a/ui-tui/packages/hermes-ink/src/ink/terminal.ts b/ui-tui/packages/hermes-ink/src/ink/terminal.ts index 8bdac622..75637c76 100644 --- a/ui-tui/packages/hermes-ink/src/ink/terminal.ts +++ b/ui-tui/packages/hermes-ink/src/ink/terminal.ts @@ -176,7 +176,7 @@ export function isXtermJs(): boolean { // in xterm.js-based terminals like VS Code). tmux is allowlisted because it // accepts modifyOtherKeys and doesn't forward the kitty sequence to the outer // terminal. -const EXTENDED_KEYS_TERMINALS = ['iTerm.app', 'kitty', 'WezTerm', 'ghostty', 'tmux', 'windows-terminal'] +const EXTENDED_KEYS_TERMINALS = ['iTerm.app', 'kitty', 'WezTerm', 'ghostty', 'tmux', 'windows-terminal', 'vscode'] /** True if this terminal correctly handles extended key reporting * (Kitty keyboard protocol + xterm modifyOtherKeys). */ diff --git a/ui-tui/src/components/textInput.tsx b/ui-tui/src/components/textInput.tsx index b31f86e7..984d2178 100644 --- a/ui-tui/src/components/textInput.tsx +++ b/ui-tui/src/components/textInput.tsx @@ -700,7 +700,7 @@ export function TextInput({ } if (k.return) { - if (k.shift || (isMac ? isActionMod(k) : k.meta)) { + if (k.shift || k.ctrl || (isMac ? isActionMod(k) : k.meta)) { flushParentChange() commit(ins(vRef.current, curRef.current, '\n'), curRef.current + 1) } else { From 2be5e181a987d7e07cba55c5d8b0e1a17597c0e7 Mon Sep 17 00:00:00 2001 From: Brooklyn Nicholson Date: Sun, 26 Apr 2026 13:54:12 -0500 Subject: [PATCH 28/87] fix(tui): keep thinking color theme-neutral --- ui-tui/src/components/thinking.tsx | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/ui-tui/src/components/thinking.tsx b/ui-tui/src/components/thinking.tsx index 604b71eb..b8436fc4 100644 --- a/ui-tui/src/components/thinking.tsx +++ b/ui-tui/src/components/thinking.tsx @@ -646,22 +646,22 @@ export const Thinking = memo(function Thinking({ {preview ? ( mode === 'full' ? ( lines.map((line, index) => ( - + {line || ' '} {index === lines.length - 1 ? ( - + ) : null} )) ) : ( - + {preview} - + ) ) : ( - - + + )} From a8bfe72d359d4e049984dd26a27185ce1e63e64c Mon Sep 17 00:00:00 2001 From: Brooklyn Nicholson Date: Sun, 26 Apr 2026 13:56:26 -0500 Subject: [PATCH 29/87] fix(tui): address latest review feedback --- ui-tui/src/__tests__/interactionMode.test.ts | 28 ----------- ui-tui/src/app/interactionMode.ts | 52 -------------------- ui-tui/src/app/useSubmission.ts | 4 +- ui-tui/src/config/timing.ts | 1 - ui-tui/src/lib/viewportStore.ts | 19 +++++-- 5 files changed, 17 insertions(+), 87 deletions(-) delete mode 100644 ui-tui/src/__tests__/interactionMode.test.ts delete mode 100644 ui-tui/src/app/interactionMode.ts diff --git a/ui-tui/src/__tests__/interactionMode.test.ts b/ui-tui/src/__tests__/interactionMode.test.ts deleted file mode 100644 index 1a44519d..00000000 --- a/ui-tui/src/__tests__/interactionMode.test.ts +++ /dev/null @@ -1,28 +0,0 @@ -import { afterEach, describe, expect, it, vi } from 'vitest' - -import { getInteractionMode, markScrolling, markTyping, resetInteractionMode } from '../app/interactionMode.js' -import { SCROLLING_IDLE_MS, TYPING_IDLE_MS } from '../config/timing.js' - -describe('interactionMode', () => { - afterEach(() => { - resetInteractionMode() - vi.useRealTimers() - }) - - it('holds scrolling mode briefly then returns idle', () => { - vi.useFakeTimers() - markScrolling() - expect(getInteractionMode()).toBe('scrolling') - vi.advanceTimersByTime(SCROLLING_IDLE_MS) - expect(getInteractionMode()).toBe('idle') - }) - - it('typing takes priority over scrolling', () => { - vi.useFakeTimers() - markTyping() - markScrolling() - expect(getInteractionMode()).toBe('typing') - vi.advanceTimersByTime(TYPING_IDLE_MS) - expect(getInteractionMode()).toBe('idle') - }) -}) diff --git a/ui-tui/src/app/interactionMode.ts b/ui-tui/src/app/interactionMode.ts deleted file mode 100644 index f18033f8..00000000 --- a/ui-tui/src/app/interactionMode.ts +++ /dev/null @@ -1,52 +0,0 @@ -import { SCROLLING_IDLE_MS, TYPING_IDLE_MS } from '../config/timing.js' - -export type InteractionMode = 'idle' | 'scrolling' | 'typing' - -type Timer = null | ReturnType - -let mode: InteractionMode = 'idle' -let scrollingTimer: Timer = null -let typingTimer: Timer = null - -const clear = (t: Timer): null => { - if (t) { - clearTimeout(t) - } - - return null -} - -export function getInteractionMode(): InteractionMode { - return mode -} - -export function markTyping(): void { - mode = 'typing' - typingTimer = clear(typingTimer) - scrollingTimer = clear(scrollingTimer) - typingTimer = setTimeout(() => { - typingTimer = null - mode = 'idle' - }, TYPING_IDLE_MS) -} - -export function markScrolling(): void { - if (mode === 'typing') { - return - } - - mode = 'scrolling' - scrollingTimer = clear(scrollingTimer) - scrollingTimer = setTimeout(() => { - scrollingTimer = null - if (mode === 'scrolling') { - mode = 'idle' - } - }, SCROLLING_IDLE_MS) -} - -export function resetInteractionMode(): void { - scrollingTimer = clear(scrollingTimer) - typingTimer = clear(typingTimer) - mode = 'idle' -} diff --git a/ui-tui/src/app/useSubmission.ts b/ui-tui/src/app/useSubmission.ts index 70a3faf3..6a585bd6 100644 --- a/ui-tui/src/app/useSubmission.ts +++ b/ui-tui/src/app/useSubmission.ts @@ -101,10 +101,10 @@ export function useSubmission(opts: UseSubmissionOptions) { gw.request('prompt.submit', { session_id: sid, text: submitText }).catch((e: Error) => { if (isSessionBusyError(e)) { - composerActions.enqueue(text) + composerActions.enqueue(submitText) patchUiState({ busy: true, status: 'queued for next turn' }) - return sys(`queued: "${text.slice(0, 50)}${text.length > 50 ? '…' : ''}"`) + return sys(`queued: "${submitText.slice(0, 50)}${submitText.length > 50 ? '…' : ''}"`) } sys(`error: ${e.message}`) diff --git a/ui-tui/src/config/timing.ts b/ui-tui/src/config/timing.ts index d428bacf..e0bd611b 100644 --- a/ui-tui/src/config/timing.ts +++ b/ui-tui/src/config/timing.ts @@ -2,5 +2,4 @@ export const STREAM_BATCH_MS = 16 export const STREAM_IDLE_BATCH_MS = 16 export const STREAM_TYPING_BATCH_MS = 80 export const TYPING_IDLE_MS = 250 -export const SCROLLING_IDLE_MS = 450 export const REASONING_PULSE_MS = 700 diff --git a/ui-tui/src/lib/viewportStore.ts b/ui-tui/src/lib/viewportStore.ts index 298d094b..0a52e99a 100644 --- a/ui-tui/src/lib/viewportStore.ts +++ b/ui-tui/src/lib/viewportStore.ts @@ -1,6 +1,6 @@ import type { ScrollBoxHandle } from '@hermes/ink' import type { RefObject } from 'react' -import { useCallback, useSyncExternalStore } from 'react' +import { useCallback, useMemo, useSyncExternalStore } from 'react' export interface ViewportSnapshot { atBottom: boolean @@ -45,6 +45,19 @@ export function viewportSnapshotKey(v: ViewportSnapshot) { return `${v.atBottom ? 1 : 0}:${v.top}:${v.viewportHeight}:${v.scrollHeight}:${v.pending}` } +const snapshotFromKey = (key: string): ViewportSnapshot => { + const [atBottom = '1', top = '0', viewportHeight = '0', scrollHeight = '0', pending = '0'] = key.split(':') + + return { + atBottom: atBottom === '1', + bottom: Number(top) + Number(viewportHeight), + pending: Number(pending), + scrollHeight: Number(scrollHeight), + top: Number(top), + viewportHeight: Number(viewportHeight) + } +} + export function useViewportSnapshot(scrollRef: RefObject): ViewportSnapshot { const key = useSyncExternalStore( useCallback((cb: () => void) => scrollRef.current?.subscribe(cb) ?? (() => {}), [scrollRef]), @@ -52,7 +65,5 @@ export function useViewportSnapshot(scrollRef: RefObject () => viewportSnapshotKey(EMPTY) ) - void key - - return getViewportSnapshot(scrollRef.current) + return useMemo(() => snapshotFromKey(key), [key]) } From c9f7b703ddb1971acb573bfe6a8890584e9442be Mon Sep 17 00:00:00 2001 From: Brooklyn Nicholson Date: Sun, 26 Apr 2026 13:59:56 -0500 Subject: [PATCH 30/87] fix(tui): filter thinking status noise --- .../createGatewayEventHandler.test.ts | 14 +++++++ ui-tui/src/__tests__/reasoning.test.ts | 11 ++++++ ui-tui/src/lib/text.ts | 37 ++++++++++++++++++- 3 files changed, 61 insertions(+), 1 deletion(-) diff --git a/ui-tui/src/__tests__/createGatewayEventHandler.test.ts b/ui-tui/src/__tests__/createGatewayEventHandler.test.ts index 1114c716..27c49b0d 100644 --- a/ui-tui/src/__tests__/createGatewayEventHandler.test.ts +++ b/ui-tui/src/__tests__/createGatewayEventHandler.test.ts @@ -150,6 +150,20 @@ describe('createGatewayEventHandler', () => { expect(appended[appended.length - 1]).toMatchObject({ role: 'assistant', text: 'final answer' }) }) + it('filters spinner/status-only reasoning noise from completed thinking', () => { + const appended: Msg[] = [] + const streamed = '(¬_¬) synthesizing...\nactual plan\n( ͡° ͜ʖ ͡°) pondering...\nnext step' + + const onEvent = createGatewayEventHandler(buildCtx(appended)) + + onEvent({ payload: { text: streamed }, type: 'reasoning.delta' } as any) + onEvent({ payload: { text: 'final answer' }, type: 'message.complete' } as any) + + expect(appended[0]?.thinking).toBe(streamed) + expect(appended[0]?.text).toBe('') + expect(appended[appended.length - 1]).toMatchObject({ role: 'assistant', text: 'final answer' }) + }) + it('ignores fallback reasoning.available when streamed reasoning already exists', () => { const appended: Msg[] = [] const streamed = 'short streamed reasoning' diff --git a/ui-tui/src/__tests__/reasoning.test.ts b/ui-tui/src/__tests__/reasoning.test.ts index c961ea7a..d14a0a29 100644 --- a/ui-tui/src/__tests__/reasoning.test.ts +++ b/ui-tui/src/__tests__/reasoning.test.ts @@ -1,6 +1,7 @@ import { describe, expect, it } from 'vitest' import { hasReasoningTag, splitReasoning } from '../lib/reasoning.js' +import { cleanThinkingText } from '../lib/text.js' describe('splitReasoning', () => { it('extracts and strips it from text', () => { @@ -48,3 +49,13 @@ describe('splitReasoning', () => { expect(hasReasoningTag('no tags at all')).toBe(false) }) }) + +describe('cleanThinkingText', () => { + it('removes face/status ticker fragments while preserving real reasoning', () => { + expect( + cleanThinkingText( + '(¬_¬) synthesizing...**Resolving comments on GitHub**\n( ͡° ͜ʖ ͡°) musing...\nActual step\n٩(๑❛ᴗ❛๑)۶ contemplating...next step' + ) + ).toBe('**Resolving comments on GitHub**\nActual step\nnext step') + }) +}) diff --git a/ui-tui/src/lib/text.ts b/ui-tui/src/lib/text.ts index 8541ac3f..18d5a5a6 100644 --- a/ui-tui/src/lib/text.ts +++ b/ui-tui/src/lib/text.ts @@ -70,8 +70,43 @@ export const pasteTokenLabel = (text: string, lineCount: number) => { : `[[ ${preview} [${fmtK(lineCount)} lines] ]]` } +const THINKING_STATUS_WORDS = [ + 'pondering', + 'contemplating', + 'musing', + 'cogitating', + 'ruminating', + 'deliberating', + 'mulling', + 'reflecting', + 'processing', + 'reasoning', + 'analyzing', + 'computing', + 'synthesizing', + 'formulating', + 'brainstorming' +] + +const THINKING_STATUS_RE = new RegExp(`^(?:${THINKING_STATUS_WORDS.join('|')})\\.{0,3}$`, 'i') + +const THINKING_FACE_SOURCE = '[^A-Za-z\n]+' + +const THINKING_STATUS_CHUNK_RE = new RegExp( + `${THINKING_FACE_SOURCE}\\s*(?:${THINKING_STATUS_WORDS.join('|')})\\.{0,3}\\s*`, + 'giu' +) + +export const cleanThinkingText = (reasoning: string) => + reasoning + .split('\n') + .map(line => line.replace(THINKING_STATUS_CHUNK_RE, '').trim()) + .filter(line => line && !THINKING_STATUS_RE.test(line.replace(/\.\.\.$/, '').trim())) + .join('\n') + .trim() + export const thinkingPreview = (reasoning: string, mode: ThinkingMode, max: number = THINKING_COT_MAX) => { - const raw = reasoning.trim() + const raw = cleanThinkingText(reasoning) return !raw || mode === 'collapsed' ? '' : mode === 'full' ? raw : compactPreview(raw.replace(WS_RE, ' '), max) } From a30ffbe1d4498505a5bebda2960ec16053a9c7fb Mon Sep 17 00:00:00 2001 From: Brooklyn Nicholson Date: Sun, 26 Apr 2026 14:01:14 -0500 Subject: [PATCH 31/87] fix(tui): show queued prompts when drained --- ui-tui/src/app/useSubmission.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/ui-tui/src/app/useSubmission.ts b/ui-tui/src/app/useSubmission.ts index 6a585bd6..046b2316 100644 --- a/ui-tui/src/app/useSubmission.ts +++ b/ui-tui/src/app/useSubmission.ts @@ -199,9 +199,9 @@ export function useSubmission(opts: UseSubmissionOptions) { return interpolate(text, send) } - send(text, composerRefs.queueRef.current.length === 0) + send(text) }, - [composerRefs, interpolate, send, shellExec] + [interpolate, send, shellExec] ) const dispatchSubmission = useCallback( From 7b5b524fc71e210ebe778c22234518ea3ef40588 Mon Sep 17 00:00:00 2001 From: Brooklyn Nicholson Date: Sun, 26 Apr 2026 14:03:36 -0500 Subject: [PATCH 32/87] refactor(tui): clean thinking and viewport helpers --- ui-tui/src/lib/text.ts | 29 +++-------------------------- ui-tui/src/lib/viewportStore.ts | 26 ++++++++++++-------------- 2 files changed, 15 insertions(+), 40 deletions(-) diff --git a/ui-tui/src/lib/text.ts b/ui-tui/src/lib/text.ts index 18d5a5a6..9407c8fa 100644 --- a/ui-tui/src/lib/text.ts +++ b/ui-tui/src/lib/text.ts @@ -1,4 +1,5 @@ import { THINKING_COT_MAX } from '../config/limits.js' +import { VERBS } from '../content/verbs.js' import type { ThinkingMode } from '../types.js' const ESC = String.fromCharCode(27) @@ -70,32 +71,8 @@ export const pasteTokenLabel = (text: string, lineCount: number) => { : `[[ ${preview} [${fmtK(lineCount)} lines] ]]` } -const THINKING_STATUS_WORDS = [ - 'pondering', - 'contemplating', - 'musing', - 'cogitating', - 'ruminating', - 'deliberating', - 'mulling', - 'reflecting', - 'processing', - 'reasoning', - 'analyzing', - 'computing', - 'synthesizing', - 'formulating', - 'brainstorming' -] - -const THINKING_STATUS_RE = new RegExp(`^(?:${THINKING_STATUS_WORDS.join('|')})\\.{0,3}$`, 'i') - -const THINKING_FACE_SOURCE = '[^A-Za-z\n]+' - -const THINKING_STATUS_CHUNK_RE = new RegExp( - `${THINKING_FACE_SOURCE}\\s*(?:${THINKING_STATUS_WORDS.join('|')})\\.{0,3}\\s*`, - 'giu' -) +const THINKING_STATUS_RE = new RegExp(`^(?:${VERBS.join('|')})\\.{0,3}$`, 'i') +const THINKING_STATUS_CHUNK_RE = new RegExp(`[^A-Za-z\n]+\\s*(?:${VERBS.join('|')})\\.{0,3}\\s*`, 'giu') export const cleanThinkingText = (reasoning: string) => reasoning diff --git a/ui-tui/src/lib/viewportStore.ts b/ui-tui/src/lib/viewportStore.ts index 0a52e99a..58e24ab8 100644 --- a/ui-tui/src/lib/viewportStore.ts +++ b/ui-tui/src/lib/viewportStore.ts @@ -45,19 +45,6 @@ export function viewportSnapshotKey(v: ViewportSnapshot) { return `${v.atBottom ? 1 : 0}:${v.top}:${v.viewportHeight}:${v.scrollHeight}:${v.pending}` } -const snapshotFromKey = (key: string): ViewportSnapshot => { - const [atBottom = '1', top = '0', viewportHeight = '0', scrollHeight = '0', pending = '0'] = key.split(':') - - return { - atBottom: atBottom === '1', - bottom: Number(top) + Number(viewportHeight), - pending: Number(pending), - scrollHeight: Number(scrollHeight), - top: Number(top), - viewportHeight: Number(viewportHeight) - } -} - export function useViewportSnapshot(scrollRef: RefObject): ViewportSnapshot { const key = useSyncExternalStore( useCallback((cb: () => void) => scrollRef.current?.subscribe(cb) ?? (() => {}), [scrollRef]), @@ -65,5 +52,16 @@ export function useViewportSnapshot(scrollRef: RefObject () => viewportSnapshotKey(EMPTY) ) - return useMemo(() => snapshotFromKey(key), [key]) + return useMemo(() => { + const [atBottom = '1', top = '0', viewportHeight = '0', scrollHeight = '0', pending = '0'] = key.split(':') + + return { + atBottom: atBottom === '1', + bottom: Number(top) + Number(viewportHeight), + pending: Number(pending), + scrollHeight: Number(scrollHeight), + top: Number(top), + viewportHeight: Number(viewportHeight) + } + }, [key]) } From 3d21f97422cd51153bd0c073f189ae07aa1830b1 Mon Sep 17 00:00:00 2001 From: Brooklyn Nicholson Date: Sun, 26 Apr 2026 14:06:42 -0500 Subject: [PATCH 33/87] fix(tui): keep live tool state before stream segments --- ui-tui/src/components/appLayout.tsx | 28 ++++++++++++++-------------- 1 file changed, 14 insertions(+), 14 deletions(-) diff --git a/ui-tui/src/components/appLayout.tsx b/ui-tui/src/components/appLayout.tsx index b302fed6..c4739c05 100644 --- a/ui-tui/src/components/appLayout.tsx +++ b/ui-tui/src/components/appLayout.tsx @@ -36,21 +36,8 @@ const StreamingAssistant = memo(function StreamingAssistant({ return ( <> - {progress.streamSegments.map((msg, i) => ( - - ))} - {progress.showProgressArea && ( - + )} + {progress.streamSegments.map((msg, i) => ( + + ))} + {progress.showStreamingArea && ( Date: Sun, 26 Apr 2026 14:12:43 -0500 Subject: [PATCH 34/87] refactor(tui): render progress in ordered stream timeline --- ui-tui/src/app/turnController.ts | 34 ++++++++++++++++++++++++++++- ui-tui/src/components/appLayout.tsx | 23 ------------------- 2 files changed, 33 insertions(+), 24 deletions(-) diff --git a/ui-tui/src/app/turnController.ts b/ui-tui/src/app/turnController.ts index ce6cc600..e676fbd3 100644 --- a/ui-tui/src/app/turnController.ts +++ b/ui-tui/src/app/turnController.ts @@ -88,6 +88,7 @@ class TurnController { turnTools: string[] = [] private activeTools: ActiveTool[] = [] + private reasoningSegmentIndex: null | number = null private activityId = 0 private reasoningStreamingTimer: Timer = null private reasoningTimer: Timer = null @@ -191,6 +192,33 @@ class TurnController { }) } + private syncReasoningSegment() { + const thinking = this.reasoningText.trim() + + if (!thinking) { + return + } + + const msg: Msg = { + kind: 'trail', + role: 'system', + text: '', + thinking, + thinkingTokens: estimateTokensRough(thinking), + toolTokens: this.toolTokenAcc || undefined, + ...(this.pendingSegmentTools.length && { tools: this.pendingSegmentTools }) + } + + if (this.reasoningSegmentIndex === null) { + this.reasoningSegmentIndex = this.segmentMessages.length + this.segmentMessages = [...this.segmentMessages, msg] + } else { + this.segmentMessages = this.segmentMessages.map((item, i) => (i === this.reasoningSegmentIndex ? msg : item)) + } + + patchTurnState({ streamSegments: this.segmentMessages }) + } + flushStreamingSegment() { const raw = this.bufRef.trimStart() const split = raw ? (hasReasoningTag(raw) ? splitReasoning(raw) : { reasoning: '', text: raw }) : { reasoning: '', text: '' } @@ -331,7 +359,8 @@ class TurnController { toolTokens: savedToolTokens || undefined, ...(tools.length && { tools }) } - const finalMessages = hasDetails(finalDetails) ? [...segments, finalDetails] : [...segments] + const hasReasoningSegment = this.reasoningSegmentIndex !== null + const finalMessages = hasDetails(finalDetails) && !hasReasoningSegment ? [...segments, finalDetails] : [...segments] if (finalText) { finalMessages.push({ role: 'assistant', text: finalText }) @@ -391,6 +420,7 @@ class TurnController { this.reasoningText = incoming this.scheduleReasoning() + this.syncReasoningSegment() this.pulseReasoningStreaming() } @@ -401,6 +431,7 @@ class TurnController { this.reasoningText += text this.scheduleReasoning() + this.syncReasoningSegment() this.pulseReasoningStreaming() } @@ -485,6 +516,7 @@ class TurnController { this.lastStatusNote = '' this.pendingSegmentTools = [] this.protocolWarned = false + this.reasoningSegmentIndex = null this.segmentMessages = [] this.turnTools = [] this.toolTokenAcc = 0 diff --git a/ui-tui/src/components/appLayout.tsx b/ui-tui/src/components/appLayout.tsx index c4739c05..744f6e73 100644 --- a/ui-tui/src/components/appLayout.tsx +++ b/ui-tui/src/components/appLayout.tsx @@ -18,7 +18,6 @@ import { Banner, Panel, SessionPanel } from './branding.js' import { MessageLine } from './messageLine.js' import { QueuedMessages } from './queuedMessages.js' import { TextInput } from './textInput.js' -import { ToolTrail } from './thinking.js' const StreamingAssistant = memo(function StreamingAssistant({ busy, @@ -36,28 +35,6 @@ const StreamingAssistant = memo(function StreamingAssistant({ return ( <> - {progress.showProgressArea && ( - - - - )} - {progress.streamSegments.map((msg, i) => ( Date: Sun, 26 Apr 2026 15:16:12 -0500 Subject: [PATCH 35/87] fix(tui): restore resumed transcript lineage --- hermes_state.py | 65 ++++++-- tests/test_hermes_state.py | 29 ++++ tests/test_tui_gateway_server.py | 63 ++++++++ tui_gateway/server.py | 38 ++++- ui-tui/src/__tests__/messages.test.ts | 19 +++ ui-tui/src/__tests__/text.test.ts | 38 ++++- ui-tui/src/app/turnController.ts | 213 +++++++++++++++++++++++--- ui-tui/src/components/messageLine.tsx | 17 +- ui-tui/src/components/thinking.tsx | 28 +++- ui-tui/src/config/limits.ts | 2 + ui-tui/src/lib/text.ts | 74 ++++++++- 11 files changed, 537 insertions(+), 49 deletions(-) diff --git a/hermes_state.py b/hermes_state.py index cc403130..3e5914c5 100644 --- a/hermes_state.py +++ b/hermes_state.py @@ -1132,20 +1132,29 @@ class SessionDB: current = child_id return session_id - def get_messages_as_conversation(self, session_id: str) -> List[Dict[str, Any]]: + def get_messages_as_conversation( + self, session_id: str, include_ancestors: bool = False + ) -> List[Dict[str, Any]]: """ Load messages in the OpenAI conversation format (role + content dicts). Used by the gateway to restore conversation history. """ + session_ids = [session_id] + if include_ancestors: + session_ids = self._session_lineage_root_to_tip(session_id) + with self._lock: - cursor = self._conn.execute( - "SELECT role, content, tool_call_id, tool_calls, tool_name, " - "reasoning, reasoning_content, reasoning_details, codex_reasoning_items, " - "codex_message_items " - "FROM messages WHERE session_id = ? ORDER BY timestamp, id", - (session_id,), - ) - rows = cursor.fetchall() + rows = [] + for sid in session_ids: + cursor = self._conn.execute( + "SELECT role, content, tool_call_id, tool_calls, tool_name, " + "reasoning, reasoning_content, reasoning_details, codex_reasoning_items, " + "codex_message_items " + "FROM messages WHERE session_id = ? ORDER BY timestamp, id", + (sid,), + ) + rows.extend(cursor.fetchall()) + messages = [] for row in rows: msg = {"role": row["role"], "content": row["content"]} @@ -1185,9 +1194,47 @@ class SessionDB: except (json.JSONDecodeError, TypeError): logger.warning("Failed to deserialize codex_message_items, falling back to None") msg["codex_message_items"] = None + if include_ancestors and self._is_duplicate_replayed_user_message(messages, msg): + continue messages.append(msg) return messages + def _session_lineage_root_to_tip(self, session_id: str) -> List[str]: + if not session_id: + return [session_id] + + chain = [] + current = session_id + seen = set() + with self._lock: + for _ in range(100): + if not current or current in seen: + break + seen.add(current) + chain.append(current) + row = self._conn.execute( + "SELECT parent_session_id FROM sessions WHERE id = ?", + (current,), + ).fetchone() + if row is None: + break + current = row["parent_session_id"] if hasattr(row, "keys") else row[0] + return list(reversed(chain)) or [session_id] + + @staticmethod + def _is_duplicate_replayed_user_message(messages: List[Dict[str, Any]], msg: Dict[str, Any]) -> bool: + if msg.get("role") != "user": + return False + content = msg.get("content") + if not isinstance(content, str) or not content: + return False + for prev in reversed(messages): + if prev.get("role") == "user" and prev.get("content") == content: + return True + if prev.get("role") == "assistant" and (prev.get("content") or prev.get("tool_calls")): + return False + return False + # ========================================================================= # Search # ========================================================================= diff --git a/tests/test_hermes_state.py b/tests/test_hermes_state.py index 868a28c5..05cbcad5 100644 --- a/tests/test_hermes_state.py +++ b/tests/test_hermes_state.py @@ -222,6 +222,35 @@ class TestMessageStorage: assert conv[0] == {"role": "user", "content": "Hello"} assert conv[1] == {"role": "assistant", "content": "Hi!"} + def test_get_messages_as_conversation_includes_ancestor_chain(self, db): + db.create_session("root", "tui") + db.append_message("root", role="user", content="first prompt") + db.append_message("root", role="assistant", content="first answer") + db.create_session("child", "tui", parent_session_id="root") + db.append_message("child", role="user", content="second prompt") + db.append_message("child", role="assistant", content="second answer") + + conv = db.get_messages_as_conversation("child", include_ancestors=True) + + assert [m["content"] for m in conv] == [ + "first prompt", + "first answer", + "second prompt", + "second answer", + ] + + def test_get_messages_as_conversation_avoids_repeated_resume_prompts_from_ancestors(self, db): + db.create_session("root", "tui") + db.append_message("root", role="user", content="same prompt") + db.append_message("root", role="user", content="same prompt") + db.append_message("root", role="assistant", content="answer") + db.create_session("child", "tui", parent_session_id="root") + db.append_message("child", role="user", content="next prompt") + + conv = db.get_messages_as_conversation("child", include_ancestors=True) + + assert [m["content"] for m in conv if m["role"] == "user"] == ["same prompt", "next prompt"] + def test_finish_reason_stored(self, db): db.create_session(session_id="s1", source="cli") db.append_message("s1", role="assistant", content="Done", finish_reason="stop") diff --git a/tests/test_tui_gateway_server.py b/tests/test_tui_gateway_server.py index 0fd5cb7d..fef44b40 100644 --- a/tests/test_tui_gateway_server.py +++ b/tests/test_tui_gateway_server.py @@ -59,6 +59,69 @@ def test_write_json_returns_false_on_broken_pipe(monkeypatch): assert server.write_json({"ok": True}) is False +def test_history_to_messages_preserves_tool_calls_for_resume_display(): + history = [ + {"role": "user", "content": "first prompt"}, + { + "role": "assistant", + "content": "", + "tool_calls": [ + { + "id": "call_1", + "function": { + "name": "search_files", + "arguments": json.dumps({"pattern": "resume"}), + }, + } + ], + }, + {"role": "tool", "content": "{}", "tool_call_id": "call_1"}, + {"role": "assistant", "content": "first answer"}, + {"role": "user", "content": "second prompt"}, + ] + + assert server._history_to_messages(history) == [ + {"role": "user", "text": "first prompt"}, + {"context": "resume", "name": "search_files", "role": "tool"}, + {"role": "assistant", "text": "first answer"}, + {"role": "user", "text": "second prompt"}, + ] + + +def test_session_resume_uses_parent_lineage_for_display(monkeypatch): + captured = {} + + class FakeDB: + def get_session(self, target): + return {"id": target} + + def reopen_session(self, target): + captured["reopened"] = target + + def get_messages_as_conversation(self, target, include_ancestors=False): + captured.setdefault("history_calls", []).append((target, include_ancestors)) + return [ + {"role": "user", "content": "root prompt"}, + {"role": "assistant", "content": "root answer"}, + ] if include_ancestors else [{"role": "user", "content": "tip prompt"}] + + monkeypatch.setattr(server, "_get_db", lambda: FakeDB()) + monkeypatch.setattr(server, "_enable_gateway_prompts", lambda: None) + monkeypatch.setattr(server, "_set_session_context", lambda target: []) + monkeypatch.setattr(server, "_clear_session_context", lambda tokens: None) + monkeypatch.setattr(server, "_make_agent", lambda *args, **kwargs: types.SimpleNamespace(model="test")) + monkeypatch.setattr(server, "_session_info", lambda agent: {"model": "test", "tools": {}, "skills": {}}) + monkeypatch.setattr(server, "_init_session", lambda sid, key, agent, history, cols=80: None) + + resp = server.handle_request({"id": "1", "method": "session.resume", "params": {"session_id": "tip"}}) + + assert resp["result"]["messages"] == [ + {"role": "user", "text": "root prompt"}, + {"role": "assistant", "text": "root answer"}, + ] + assert captured["history_calls"] == [("tip", False), ("tip", True)] + + def test_status_callback_emits_kind_and_text(): with patch("tui_gateway.server._emit") as emit: cb = server._agent_cbs("sid")["status_callback"] diff --git a/tui_gateway/server.py b/tui_gateway/server.py index 397f4f17..48651e08 100644 --- a/tui_gateway/server.py +++ b/tui_gateway/server.py @@ -913,8 +913,16 @@ def _probe_config_health(cfg: dict) -> str: def _session_info(agent) -> dict: + reasoning_config = getattr(agent, "reasoning_config", None) + reasoning_effort = "" + if isinstance(reasoning_config, dict) and reasoning_config.get("enabled") is not False: + reasoning_effort = str(reasoning_config.get("effort", "") or "") + service_tier = getattr(agent, "service_tier", None) or "" info: dict = { "model": getattr(agent, "model", ""), + "reasoning_effort": reasoning_effort, + "service_tier": service_tier, + "fast": service_tier == "priority", "tools": {}, "skills": {}, "cwd": os.getcwd(), @@ -1013,7 +1021,7 @@ def _tool_summary(name: str, result: str, duration_s: float | None) -> str | Non if n is not None: text = f"Extracted {n} {'page' if n == 1 else 'pages'}" - return f"{text or 'Completed'}{suffix}" if (text or dur) else None + return f"{text}{suffix}" if text else None def _on_tool_start(sid: str, tool_call_id: str, name: str, args: dict): @@ -1029,10 +1037,13 @@ def _on_tool_start(sid: str, tool_call_id: str, name: str, args: dict): pass session.setdefault("tool_started_at", {})[tool_call_id] = time.time() if _tool_progress_enabled(sid): + payload = {"tool_id": tool_call_id, "name": name, "context": _tool_ctx(name, args)} + if name == "todo" and isinstance(args, dict) and isinstance(args.get("todos"), list): + payload["todos"] = args.get("todos") _emit( "tool.start", sid, - {"tool_id": tool_call_id, "name": name, "context": _tool_ctx(name, args)}, + payload, ) @@ -1050,6 +1061,13 @@ def _on_tool_complete(sid: str, tool_call_id: str, name: str, args: dict, result summary = _tool_summary(name, result, duration_s) if summary: payload["summary"] = summary + if name == "todo": + try: + data = json.loads(result) + if isinstance(data, dict) and isinstance(data.get("todos"), list): + payload["todos"] = data.get("todos") + except Exception: + pass try: from agent.display import render_edit_diff_with_delta @@ -1698,7 +1716,8 @@ def _(rid, params: dict) -> dict: try: db.reopen_session(target) history = db.get_messages_as_conversation(target) - messages = _history_to_messages(history) + display_history = db.get_messages_as_conversation(target, include_ancestors=True) + messages = _history_to_messages(display_history) tokens = _set_session_context(target) try: agent = _make_agent(sid, target, session_id=target) @@ -1746,11 +1765,20 @@ def _(rid, params: dict) -> dict: @method("session.history") def _(rid, params: dict) -> dict: session, err = _sess(params, rid) - return err or _ok( + if err: + return err + history = list(session.get("history", [])) + db = _get_db() + if db is not None and session.get("session_key"): + try: + history = db.get_messages_as_conversation(session["session_key"], include_ancestors=True) + except Exception: + pass + return _ok( rid, { "count": len(session.get("history", [])), - "messages": _history_to_messages(list(session.get("history", []))), + "messages": _history_to_messages(history), }, ) diff --git a/ui-tui/src/__tests__/messages.test.ts b/ui-tui/src/__tests__/messages.test.ts index 8f6a265f..1da4bfd4 100644 --- a/ui-tui/src/__tests__/messages.test.ts +++ b/ui-tui/src/__tests__/messages.test.ts @@ -1,7 +1,26 @@ import { describe, expect, it } from 'vitest' +import { toTranscriptMessages } from '../domain/messages.js' import { upsert } from '../lib/messages.js' +describe('toTranscriptMessages', () => { + it('preserves assistant tool-call rows so resume does not drop prior turns', () => { + const rows = [ + { role: 'user', text: 'first prompt' }, + { role: 'tool', context: 'repo', name: 'search_files', text: 'ignored raw result' }, + { role: 'assistant', text: 'first answer' }, + { role: 'user', text: 'second prompt' } + ] + + expect(toTranscriptMessages(rows).map(msg => [msg.role, msg.text])).toEqual([ + ['user', 'first prompt'], + ['assistant', 'first answer'], + ['user', 'second prompt'] + ]) + expect(toTranscriptMessages(rows)[1]?.tools?.[0]).toContain('Search Files') + }) +}) + describe('upsert', () => { it('appends when last role differs', () => { expect(upsert([{ role: 'user', text: 'hi' }], 'assistant', 'hello')).toHaveLength(2) diff --git a/ui-tui/src/__tests__/text.test.ts b/ui-tui/src/__tests__/text.test.ts index d4a2469e..1690996d 100644 --- a/ui-tui/src/__tests__/text.test.ts +++ b/ui-tui/src/__tests__/text.test.ts @@ -1,14 +1,18 @@ import { describe, expect, it } from 'vitest' import { + boundedLiveRenderText, + buildToolTrailLine, edgePreview, estimateRows, estimateTokensRough, fmtK, isToolTrailResultLine, lastCotTrailIndex, + parseToolTrailResultLine, pasteTokenLabel, - sameToolTrailGroup + sameToolTrailGroup, + splitToolDuration } from '../lib/text.js' describe('isToolTrailResultLine', () => { @@ -19,6 +23,16 @@ describe('isToolTrailResultLine', () => { }) }) +describe('buildToolTrailLine', () => { + it('puts completion duration inline before the result marker', () => { + const line = buildToolTrailLine('read_file', 'x', false, '', 0.94) + + expect(line).toBe('Read File("x") (0.9s) ✓') + expect(parseToolTrailResultLine(line)).toEqual({ call: 'Read File("x") (0.9s)', detail: '', mark: '✓' }) + expect(splitToolDuration('Read File("x") (0.9s)')).toEqual({ label: 'Read File("x")', duration: ' (0.9s)' }) + }) +}) + describe('lastCotTrailIndex', () => { it('finds last non-result line', () => { expect(lastCotTrailIndex(['a ✓', 'thinking…'])).toBe(1) @@ -68,6 +82,28 @@ describe('estimateTokensRough', () => { }) }) +describe('boundedLiveRenderText', () => { + it('preserves short live text verbatim', () => { + expect(boundedLiveRenderText('one\ntwo', { maxChars: 100, maxLines: 10 })).toBe('one\ntwo') + }) + + it('keeps the live tail by character budget', () => { + const out = boundedLiveRenderText('abcdefghij', { maxChars: 4, maxLines: 10 }) + + expect(out).toContain('ghij') + expect(out).toContain('omitted') + expect(out).not.toContain('abcdef') + }) + + it('keeps the live tail by line budget', () => { + const out = boundedLiveRenderText(['a', 'b', 'c', 'd'].join('\n'), { maxChars: 100, maxLines: 2 }) + + expect(out).toContain('c\nd') + expect(out).toContain('omitted 2 lines') + expect(out).not.toContain('a\nb') + }) +}) + describe('edgePreview', () => { it('keeps both ends for long text', () => { expect(edgePreview('Vampire Bondage ropes slipped from her neck, still stained with blood', 8, 18)).toBe( diff --git a/ui-tui/src/app/turnController.ts b/ui-tui/src/app/turnController.ts index e676fbd3..8d9d2e13 100644 --- a/ui-tui/src/app/turnController.ts +++ b/ui-tui/src/app/turnController.ts @@ -2,18 +2,20 @@ import { REASONING_PULSE_MS, STREAM_BATCH_MS, STREAM_IDLE_BATCH_MS, + STREAM_SCROLL_BATCH_MS, STREAM_TYPING_BATCH_MS } from '../config/timing.js' import type { SessionInterruptResponse, SubagentEventPayload } from '../gatewayTypes.js' import { hasReasoningTag, splitReasoning } from '../lib/reasoning.js' import { + boundedLiveRenderText, buildToolTrailLine, estimateTokensRough, isTransientTrailLine, sameToolTrailGroup, toolTrailLabel } from '../lib/text.js' -import type { ActiveTool, ActivityItem, Msg, SubagentProgress } from '../types.js' +import type { ActiveTool, ActivityItem, Msg, SubagentProgress, TodoItem } from '../types.js' import { resetFlowOverlays } from './overlayStore.js' import { pushSnapshot } from './spawnHistoryStore.js' @@ -40,7 +42,52 @@ const diffSegmentBody = (msg: Msg): null | string => { const hasDetails = (msg: Msg): boolean => Boolean(msg.thinking || msg.tools?.length || msg.toolTokens) -const textSegments = (segments: Msg[]) => segments.filter(msg => msg.role === 'assistant' && msg.kind !== 'diff').map(msg => msg.text) +const isToolOnly = (msg: Msg | undefined) => + Boolean(msg && msg.kind === 'trail' && !msg.thinking?.trim() && !msg.text && msg.tools?.length) + +const mergeSequentialToolOnly = (segments: Msg[]) => + segments.reduce((acc, msg) => { + if (isToolOnly(msg) && isToolOnly(acc.at(-1))) { + const prev = acc.at(-1)! + + return [...acc.slice(0, -1), { ...prev, tools: [...(prev.tools ?? []), ...(msg.tools ?? [])] }] + } + + return [...acc, msg] + }, []) + +const isTodoStatus = (status: unknown): status is TodoItem['status'] => + status === 'pending' || status === 'in_progress' || status === 'completed' || status === 'cancelled' + +const parseTodos = (value: unknown): null | TodoItem[] => { + if (!Array.isArray(value)) { + return null + } + + return value + .map(item => { + if (!item || typeof item !== 'object') { + return null + } + + const row = item as Record + const status = row.status + + if (!isTodoStatus(status)) { + return null + } + + return { + content: String(row.content ?? '').trim(), + id: String(row.id ?? '').trim(), + status + } + }) + .filter((item): item is TodoItem => Boolean(item?.id && item.content)) +} + +const textSegments = (segments: Msg[]) => + segments.filter(msg => msg.role === 'assistant' && msg.kind !== 'diff').map(msg => msg.text) const finalTail = (finalText: string, segments: Msg[]) => { let tail = finalText @@ -88,6 +135,7 @@ class TurnController { turnTools: string[] = [] private activeTools: ActiveTool[] = [] + private activeReasoningText = '' private reasoningSegmentIndex: null | number = null private activityId = 0 private reasoningStreamingTimer: Timer = null @@ -100,12 +148,18 @@ class TurnController { this.streamDelay = STREAM_TYPING_BATCH_MS } + boostStreamingForScroll() { + this.streamDelay = Math.max(this.streamDelay, STREAM_SCROLL_BATCH_MS) + } + relaxStreaming() { this.streamDelay = STREAM_IDLE_BATCH_MS } clearReasoning() { this.reasoningTimer = clear(this.reasoningTimer) + this.activeReasoningText = '' + this.reasoningSegmentIndex = null this.reasoningText = '' this.toolTokenAcc = 0 patchTurnState({ reasoning: '', reasoningTokens: 0, toolTokens: 0 }) @@ -144,6 +198,8 @@ class TurnController { this.interrupted = true gw.request('session.interrupt', { session_id: sid }).catch(() => {}) + this.closeReasoningSegment() + const segments = this.segmentMessages const partial = this.bufRef.trimStart() const tools = this.pendingSegmentTools @@ -193,7 +249,7 @@ class TurnController { } private syncReasoningSegment() { - const thinking = this.reasoningText.trim() + const thinking = this.activeReasoningText.trim() if (!thinking) { return @@ -205,8 +261,7 @@ class TurnController { text: '', thinking, thinkingTokens: estimateTokensRough(thinking), - toolTokens: this.toolTokenAcc || undefined, - ...(this.pendingSegmentTools.length && { tools: this.pendingSegmentTools }) + toolTokens: this.toolTokenAcc || undefined } if (this.reasoningSegmentIndex === null) { @@ -219,13 +274,40 @@ class TurnController { patchTurnState({ streamSegments: this.segmentMessages }) } + private closeReasoningSegment() { + this.syncReasoningSegment() + this.activeReasoningText = '' + this.reasoningSegmentIndex = null + } + + private pushSegment(msg: Msg) { + if (isToolOnly(msg) && isToolOnly(this.segmentMessages.at(-1)!)) { + const prev = this.segmentMessages.at(-1)! + this.segmentMessages = [ + ...this.segmentMessages.slice(0, -1), + { ...prev, tools: [...(prev.tools ?? []), ...(msg.tools ?? [])] } + ] + + return + } + + this.segmentMessages = [...this.segmentMessages, msg] + } + flushStreamingSegment() { const raw = this.bufRef.trimStart() - const split = raw ? (hasReasoningTag(raw) ? splitReasoning(raw) : { reasoning: '', text: raw }) : { reasoning: '', text: '' } + + const split = raw + ? hasReasoningTag(raw) + ? splitReasoning(raw) + : { reasoning: '', text: raw } + : { reasoning: '', text: '' } if (split.reasoning && !this.reasoningText.trim()) { this.reasoningText = split.reasoning + this.activeReasoningText = split.reasoning patchTurnState({ reasoning: this.reasoningText, reasoningTokens: estimateTokensRough(this.reasoningText) }) + this.syncReasoningSegment() } const msg: Msg = { @@ -238,7 +320,7 @@ class TurnController { this.streamTimer = clear(this.streamTimer) if (split.text || hasDetails(msg)) { - this.segmentMessages = [...this.segmentMessages, msg] + this.pushSegment(msg) } this.pendingSegmentTools = [] @@ -256,6 +338,31 @@ class TurnController { }, REASONING_PULSE_MS) } + recordTodos(value: unknown) { + const todos = parseTodos(value) + + if (todos !== null) { + patchTurnState({ todos }) + } + } + + private flushPendingToolsIntoLastSegment() { + const last = this.segmentMessages[this.segmentMessages.length - 1] + + if (!this.pendingSegmentTools.length || !isToolOnly(last)) { + return false + } + + this.segmentMessages = [ + ...this.segmentMessages.slice(0, -1), + { ...last, tools: [...(last.tools ?? []), ...this.pendingSegmentTools] } + ] + this.pendingSegmentTools = [] + patchTurnState({ streamPendingTools: [], streamSegments: this.segmentMessages }) + + return true + } + pushInlineDiffSegment(diffText: string, tools: string[] = []) { // Strip CLI chrome the gateway emits before the unified diff (e.g. a // leading "┊ review diff" header written by `_emit_inline_diff` for the @@ -283,7 +390,10 @@ class TurnController { return } - this.segmentMessages = [...this.segmentMessages, { kind: 'diff', role: 'assistant', text: block, ...(tools.length && { tools }) }] + this.segmentMessages = [ + ...this.segmentMessages, + { kind: 'diff', role: 'assistant', text: block, ...(tools.length && { tools }) } + ] patchTurnState({ streamSegments: this.segmentMessages }) } @@ -328,13 +438,25 @@ class TurnController { } recordMessageComplete(payload: { rendered?: string; reasoning?: string; text?: string }) { + this.closeReasoningSegment() + const rawText = (payload.rendered ?? payload.text ?? this.bufRef).trimStart() const split = splitReasoning(rawText) const finalText = finalTail(split.text, this.segmentMessages) const existingReasoning = this.reasoningText.trim() || String(payload.reasoning ?? '').trim() const savedReasoning = [existingReasoning, existingReasoning ? '' : split.reasoning].filter(Boolean).join('\n\n') const savedToolTokens = this.toolTokenAcc - const tools = this.pendingSegmentTools + let tools = this.pendingSegmentTools + const last = this.segmentMessages[this.segmentMessages.length - 1] + + if (tools.length && isToolOnly(last)) { + this.segmentMessages = [ + ...this.segmentMessages.slice(0, -1), + { ...last, tools: [...(last.tools ?? []), ...tools] } + ] + this.pendingSegmentTools = [] + tools = [] + } // Drop diff-only segments the agent is about to narrate in the final // reply. Without this, a closing "here's the diff …" message would @@ -343,13 +465,19 @@ class TurnController { // assistant narration stays put. const finalHasOwnDiffFence = /```(?:diff|patch)\b/i.test(finalText) - const segments = this.segmentMessages.filter(msg => { - const body = diffSegmentBody(msg) + const segments = mergeSequentialToolOnly( + this.segmentMessages.filter(msg => { + const body = diffSegmentBody(msg) - return body === null || (!finalHasOwnDiffFence && !finalText.includes(body)) - }) + return body === null || (!finalHasOwnDiffFence && !finalText.includes(body)) + }) + ) + + const hasReasoningSegment = + this.reasoningSegmentIndex !== null || segments.some(msg => Boolean(msg.thinking?.trim())) + + const finalThinking = hasReasoningSegment ? '' : savedReasoning.trim() - const finalThinking = savedReasoning.trim() const finalDetails: Msg = { kind: 'trail', role: 'system', @@ -359,8 +487,8 @@ class TurnController { toolTokens: savedToolTokens || undefined, ...(tools.length && { tools }) } - const hasReasoningSegment = this.reasoningSegmentIndex !== null - const finalMessages = hasDetails(finalDetails) && !hasReasoningSegment ? [...segments, finalDetails] : [...segments] + + const finalMessages = hasDetails(finalDetails) ? [...segments, finalDetails] : [...segments] if (finalText) { finalMessages.push({ role: 'assistant', text: finalText }) @@ -387,6 +515,7 @@ class TurnController { this.turnTools = [] this.persistedToolLabels.clear() this.bufRef = '' + this.interrupted = false patchTurnState({ activity: [], outcome: '' }) return { finalMessages, finalText, wasInterrupted } @@ -419,6 +548,7 @@ class TurnController { } this.reasoningText = incoming + this.activeReasoningText = incoming this.scheduleReasoning() this.syncReasoningSegment() this.pulseReasoningStreaming() @@ -429,30 +559,63 @@ class TurnController { return } + if (!this.activeReasoningText.trim() && this.pendingSegmentTools.length) { + this.flushStreamingSegment() + } + this.reasoningText += text + this.activeReasoningText += text + + if (this.reasoningText.length > 80_000) { + this.reasoningText = this.reasoningText.slice(-60_000) + } + this.scheduleReasoning() this.syncReasoningSegment() this.pulseReasoningStreaming() } - recordToolComplete(toolId: string, fallbackName?: string, error?: string, summary?: string) { - const line = this.completeTool(toolId, fallbackName, error, summary) + recordToolComplete( + toolId: string, + fallbackName?: string, + error?: string, + summary?: string, + duration?: number, + todos?: unknown + ) { + this.recordTodos(todos) + const line = this.completeTool(toolId, fallbackName, error, summary, duration) this.pendingSegmentTools = [...this.pendingSegmentTools, line] + this.flushPendingToolsIntoLastSegment() this.publishToolState() } - recordInlineDiffToolComplete(diffText: string, toolId: string, fallbackName?: string, error?: string) { + recordInlineDiffToolComplete( + diffText: string, + toolId: string, + fallbackName?: string, + error?: string, + duration?: number + ) { this.flushStreamingSegment() - this.pushInlineDiffSegment(diffText, [this.completeTool(toolId, fallbackName, error, '')]) + this.pushInlineDiffSegment(diffText, [this.completeTool(toolId, fallbackName, error, '', duration)]) this.publishToolState() } - private completeTool(toolId: string, fallbackName?: string, error?: string, summary?: string) { + private completeTool(toolId: string, fallbackName?: string, error?: string, summary?: string, duration?: number) { const done = this.activeTools.find(tool => tool.id === toolId) const name = done?.name ?? fallbackName ?? 'tool' const label = toolTrailLabel(name) - const line = buildToolTrailLine(name, done?.context || '', Boolean(error), error || summary || '') + const fallbackDuration = done?.startedAt ? (Date.now() - done.startedAt) / 1000 : undefined + + const line = buildToolTrailLine( + name, + done?.context || '', + Boolean(error), + error || summary || '', + duration ?? fallbackDuration + ) this.activeTools = this.activeTools.filter(tool => tool.id !== toolId) @@ -496,6 +659,7 @@ class TurnController { recordToolStart(toolId: string, name: string, context: string) { this.flushStreamingSegment() + this.closeReasoningSegment() this.pruneTransient() this.endReasoningPhase() @@ -514,6 +678,7 @@ class TurnController { this.bufRef = '' this.interrupted = false this.lastStatusNote = '' + this.activeReasoningText = '' this.pendingSegmentTools = [] this.protocolWarned = false this.reasoningSegmentIndex = null @@ -552,7 +717,7 @@ class TurnController { this.streamTimer = null const raw = this.bufRef.trimStart() const visible = hasReasoningTag(raw) ? splitReasoning(raw).text : raw - patchTurnState({ streaming: visible }) + patchTurnState({ streaming: boundedLiveRenderText(visible) }) }, this.streamDelay) } @@ -560,6 +725,8 @@ class TurnController { this.endReasoningPhase() this.clearReasoning() this.activeTools = [] + this.activeReasoningText = '' + this.reasoningSegmentIndex = null this.turnTools = [] this.toolTokenAcc = 0 this.persistedToolLabels.clear() diff --git a/ui-tui/src/components/messageLine.tsx b/ui-tui/src/components/messageLine.tsx index bb6f811a..e827dd5f 100644 --- a/ui-tui/src/components/messageLine.tsx +++ b/ui-tui/src/components/messageLine.tsx @@ -5,9 +5,9 @@ import { LONG_MSG } from '../config/limits.js' import { sectionMode } from '../domain/details.js' import { userDisplay } from '../domain/messages.js' import { ROLE } from '../domain/roles.js' -import { compactPreview, hasAnsi, isPasteBackedText, stripAnsi } from '../lib/text.js' +import { boundedLiveRenderText, compactPreview, hasAnsi, isPasteBackedText, stripAnsi } from '../lib/text.js' import type { Theme } from '../theme.js' -import type { DetailsMode, Msg, SectionVisibility } from '../types.js' +import type { ActiveTool, DetailsMode, Msg, SectionVisibility } from '../types.js' import { Md } from './markdown.js' import { ToolTrail } from './thinking.js' @@ -20,7 +20,8 @@ export const MessageLine = memo(function MessageLine({ isStreaming = false, msg, sections, - t + t, + tools = [] }: MessageLineProps) { // Per-section overrides win over the global mode, so resolve each section // we might consume here once and gate visibility on the *content-bearing* @@ -34,7 +35,7 @@ export const MessageLine = memo(function MessageLine({ const activityMode = sectionMode('activity', detailsMode, sections, detailsModeCommandOverride) const thinking = msg.thinking?.trim() ?? '' - if (msg.kind === 'trail' && (msg.tools?.length || thinking)) { + if (msg.kind === 'trail' && (msg.tools?.length || tools.length || thinking)) { return thinkingMode !== 'hidden' || toolsMode !== 'hidden' || activityMode !== 'hidden' ? ( @@ -86,7 +88,11 @@ export const MessageLine = memo(function MessageLine({ } if (msg.role === 'assistant') { - return isStreaming ? {msg.text} : + return isStreaming ? ( + {boundedLiveRenderText(msg.text)} + ) : ( + + ) } if (msg.role === 'user' && msg.text.length > LONG_MSG && isPasteBackedText(msg.text)) { @@ -154,4 +160,5 @@ interface MessageLineProps { msg: Msg sections?: SectionVisibility t: Theme + tools?: ActiveTool[] } diff --git a/ui-tui/src/components/thinking.tsx b/ui-tui/src/components/thinking.tsx index b8436fc4..0fd47315 100644 --- a/ui-tui/src/components/thinking.tsx +++ b/ui-tui/src/components/thinking.tsx @@ -16,12 +16,14 @@ import { widthByDepth } from '../lib/subagentTree.js' import { + boundedLiveRenderText, compactPreview, estimateTokensRough, fmtK, formatToolCall, parseToolTrailResultLine, pick, + splitToolDuration, thinkingPreview, toolTrailLabel } from '../lib/text.js' @@ -633,7 +635,12 @@ export const Thinking = memo(function Thinking({ streaming?: boolean t: Theme }) { - const preview = useMemo(() => thinkingPreview(reasoning, mode, THINKING_COT_MAX), [mode, reasoning]) + const preview = useMemo(() => { + const raw = thinkingPreview(reasoning, mode, THINKING_COT_MAX) + + return mode === 'full' ? boundedLiveRenderText(raw) : raw + }, [mode, reasoning]) + const lines = useMemo(() => preview.split('\n').map(line => line.replace(/\t/g, ' ')), [preview]) if (!preview && !active) { @@ -790,7 +797,7 @@ export const ToolTrail = memo(function ToolTrail({ if (parsed) { groups.push({ color: parsed.mark === '✗' ? t.color.error : t.color.cornsilk, - content: parsed.detail ? parsed.call : `${parsed.call} ${parsed.mark}`, + content: parsed.call, details: [], key: `tr-${i}`, label: parsed.call @@ -886,6 +893,21 @@ export const ToolTrail = memo(function ToolTrail({ const delegateGroups = groups.filter(g => g.label.startsWith('Delegate Task')) const inlineDelegateKey = hasSubagents && delegateGroups.length === 1 ? delegateGroups[0]!.key : null + const toolLabel = (group: Group) => { + const { duration, label } = splitToolDuration(String(group.content)) + + return duration ? ( + <> + {label} + + {duration} + + + ) : ( + group.content + ) + } + // ── Backstop: floating alerts when every panel is hidden ───────── // // Per-section overrides win over the global details_mode (they're computed @@ -1051,7 +1073,7 @@ export const ToolTrail = memo(function ToolTrail({ content={ <> - {group.content} + {toolLabel(group)} } rails={rails} diff --git a/ui-tui/src/config/limits.ts b/ui-tui/src/config/limits.ts index 875b6bac..a2e817d8 100644 --- a/ui-tui/src/config/limits.ts +++ b/ui-tui/src/config/limits.ts @@ -1,4 +1,6 @@ export const LARGE_PASTE = { chars: 8000, lines: 80 } +export const LIVE_RENDER_MAX_CHARS = 16_000 +export const LIVE_RENDER_MAX_LINES = 240 export const LONG_MSG = 300 export const MAX_HISTORY = 800 export const THINKING_COT_MAX = 160 diff --git a/ui-tui/src/lib/text.ts b/ui-tui/src/lib/text.ts index 9407c8fa..256cbc0f 100644 --- a/ui-tui/src/lib/text.ts +++ b/ui-tui/src/lib/text.ts @@ -1,4 +1,4 @@ -import { THINKING_COT_MAX } from '../config/limits.js' +import { LIVE_RENDER_MAX_CHARS, LIVE_RENDER_MAX_LINES, THINKING_COT_MAX } from '../config/limits.js' import { VERBS } from '../content/verbs.js' import type { ThinkingMode } from '../types.js' @@ -88,6 +88,61 @@ export const thinkingPreview = (reasoning: string, mode: ThinkingMode, max: numb return !raw || mode === 'collapsed' ? '' : mode === 'full' ? raw : compactPreview(raw.replace(WS_RE, ' '), max) } +export const boundedLiveRenderText = ( + text: string, + { maxChars = LIVE_RENDER_MAX_CHARS, maxLines = LIVE_RENDER_MAX_LINES } = {} +) => { + if (text.length <= maxChars && text.split('\n', maxLines + 1).length <= maxLines) { + return text + } + + let start = 0 + let idx = text.length + + for (let seen = 0; seen < maxLines && idx > 0; seen++) { + idx = text.lastIndexOf('\n', idx - 1) + start = idx < 0 ? 0 : idx + 1 + + if (idx < 0) { + break + } + } + + const lineStart = start + start = Math.max(lineStart, text.length - maxChars) + + if (start > lineStart) { + const nextBreak = text.indexOf('\n', start) + + if (nextBreak >= 0 && nextBreak < text.length - 1) { + start = nextBreak + 1 + } + } + + const tail = text.slice(start).trimStart() + const omittedLines = countNewlines(text, start) + const omittedChars = Math.max(0, text.length - tail.length) + + const label = + omittedLines > 0 + ? `[showing live tail; omitted ${fmtK(omittedLines)} lines / ${fmtK(omittedChars)} chars]\n` + : `[showing live tail; omitted ${fmtK(omittedChars)} chars]\n` + + return `${label}${tail}` +} + +const countNewlines = (text: string, end: number) => { + let count = 0 + + for (let i = 0; i < end; i++) { + if (text.charCodeAt(i) === 10) { + count++ + } + } + + return count +} + export const stripTrailingPasteNewlines = (text: string) => (/[^\n]/.test(text) ? text.replace(/\n+$/, '') : text) export const toolTrailLabel = (name: string) => @@ -104,10 +159,17 @@ export const formatToolCall = (name: string, context = '') => { return preview ? `${label}("${preview}")` : label } -export const buildToolTrailLine = (name: string, context: string, error?: boolean, note?: string) => { +export const buildToolTrailLine = ( + name: string, + context: string, + error?: boolean, + note?: string, + duration?: number +) => { const detail = compactPreview(note ?? '', 72) + const took = duration !== undefined ? ` (${duration.toFixed(1)}s)` : '' - return `${formatToolCall(name, context)}${detail ? ` :: ${detail}` : ''} ${error ? ' ✗' : ' ✓'}` + return `${formatToolCall(name, context)}${took}${detail ? ` :: ${detail}` : ''} ${error ? '✗' : '✓'}` } export const isToolTrailResultLine = (line: string) => line.endsWith(' ✓') || line.endsWith(' ✗') @@ -134,6 +196,12 @@ export const parseToolTrailResultLine = (line: string) => { return { call: body, detail: '', mark } } +export const splitToolDuration = (call: string) => { + const match = call.match(/^(.*?)( \(\d+(?:\.\d)?s\))$/) + + return match ? { label: match[1]!, duration: match[2]! } : { label: call, duration: '' } +} + export const isTransientTrailLine = (line: string) => line.startsWith('drafting ') || line === 'analyzing tool output…' export const sameToolTrailGroup = (label: string, entry: string) => From a7831b63dbc493c1f506e6c09e420d79cf554c08 Mon Sep 17 00:00:00 2001 From: Brooklyn Nicholson Date: Sun, 26 Apr 2026 15:23:43 -0500 Subject: [PATCH 36/87] fix(tui): stabilize live progress rendering --- .../src/ink/components/ScrollBox.tsx | 8 ++ .../hermes-ink/src/ink/events/input-event.ts | 12 +- ui-tui/packages/hermes-ink/src/ink/ink.tsx | 20 ++- .../packages/hermes-ink/src/ink/termio/osc.ts | 14 ++- ui-tui/scripts/profile-tui.mjs | 112 +++++++++++++++++ .../createGatewayEventHandler.test.ts | 80 +++++++++++- ui-tui/src/__tests__/scroll.test.ts | 2 + ui-tui/src/__tests__/turnStore.test.ts | 27 ++++ .../src/__tests__/virtualHistoryClamp.test.ts | 6 + ui-tui/src/app/createGatewayEventHandler.ts | 13 +- ui-tui/src/app/interfaces.ts | 17 --- ui-tui/src/app/slash/commands/core.ts | 4 +- ui-tui/src/app/turnStore.ts | 14 ++- ui-tui/src/app/useInputHandlers.ts | 30 ++++- ui-tui/src/app/useMainApp.ts | 53 +++++--- ui-tui/src/app/useSubmission.ts | 3 + ui-tui/src/components/appChrome.tsx | 27 +++- ui-tui/src/components/appLayout.tsx | 85 ++----------- ui-tui/src/components/streamingAssistant.tsx | 119 ++++++++++++++++++ ui-tui/src/components/textInput.tsx | 3 +- ui-tui/src/components/todoPanel.tsx | 46 +++++++ ui-tui/src/config/timing.ts | 1 + ui-tui/src/gatewayTypes.ts | 16 ++- ui-tui/src/hooks/useVirtualHistory.ts | 35 ++++-- ui-tui/src/lib/todo.test.ts | 12 ++ ui-tui/src/lib/todo.ts | 4 + ui-tui/src/types.ts | 9 ++ ui-tui/src/types/hermes-ink.d.ts | 1 + 28 files changed, 619 insertions(+), 154 deletions(-) create mode 100644 ui-tui/scripts/profile-tui.mjs create mode 100644 ui-tui/src/__tests__/turnStore.test.ts create mode 100644 ui-tui/src/components/streamingAssistant.tsx create mode 100644 ui-tui/src/components/todoPanel.tsx create mode 100644 ui-tui/src/lib/todo.test.ts create mode 100644 ui-tui/src/lib/todo.ts diff --git a/ui-tui/packages/hermes-ink/src/ink/components/ScrollBox.tsx b/ui-tui/packages/hermes-ink/src/ink/components/ScrollBox.tsx index 38f04b4f..c475773c 100644 --- a/ui-tui/packages/hermes-ink/src/ink/components/ScrollBox.tsx +++ b/ui-tui/packages/hermes-ink/src/ink/components/ScrollBox.tsx @@ -38,6 +38,7 @@ export type ScrollBoxHandle = { * padding). Used for drag-to-scroll edge detection. */ getViewportTop: () => number + getLastManualScrollAt: () => number /** * True when scroll is pinned to the bottom. Set by scrollToBottom, the * initial stickyScroll attribute, and by the renderer when positional @@ -94,6 +95,7 @@ function ScrollBox({ children, ref, stickyScroll, ...style }: PropsWithChildren< // forces a React render: sticky is attribute-observed, no DOM-only path. const [, forceRender] = useState(0) const listenersRef = useRef(new Set<() => void>()) + const manualScrollAtRef = useRef(0) const renderQueuedRef = useRef(false) const notify = () => { @@ -130,6 +132,7 @@ function ScrollBox({ children, ref, stickyScroll, ...style }: PropsWithChildren< } el.stickyScroll = false + manualScrollAtRef.current = Date.now() el.scrollAnchor = undefined el.pendingScrollDelta = (el.pendingScrollDelta ?? 0) + Math.floor(dy) scrollMutated(el) @@ -148,6 +151,7 @@ function ScrollBox({ children, ref, stickyScroll, ...style }: PropsWithChildren< // Explicit false overrides the DOM attribute so manual scroll // breaks stickiness. Render code checks ?? precedence. el.stickyScroll = false + manualScrollAtRef.current = Date.now() el.pendingScrollDelta = undefined el.scrollAnchor = undefined el.scrollTop = Math.max(0, Math.floor(y)) @@ -161,6 +165,7 @@ function ScrollBox({ children, ref, stickyScroll, ...style }: PropsWithChildren< } box.stickyScroll = false + manualScrollAtRef.current = Date.now() box.pendingScrollDelta = undefined box.scrollAnchor = { el, @@ -205,6 +210,9 @@ function ScrollBox({ children, ref, stickyScroll, ...style }: PropsWithChildren< getViewportTop() { return domRef.current?.scrollViewportTop ?? 0 }, + getLastManualScrollAt() { + return manualScrollAtRef.current + }, isSticky() { const el = domRef.current diff --git a/ui-tui/packages/hermes-ink/src/ink/events/input-event.ts b/ui-tui/packages/hermes-ink/src/ink/events/input-event.ts index a3cd3fab..6e80070e 100644 --- a/ui-tui/packages/hermes-ink/src/ink/events/input-event.ts +++ b/ui-tui/packages/hermes-ink/src/ink/events/input-event.ts @@ -120,11 +120,7 @@ function parseKey(keypress: ParsedKey): [Key, string] { // through key.return/key.escape, and processedAsSpecialSequence bypasses // the nonAlphanumericKeys clear below, so clear them explicitly here. input = - keypress.name === 'space' - ? ' ' - : keypress.name === 'return' || keypress.name === 'escape' - ? '' - : keypress.name + keypress.name === 'space' ? ' ' : keypress.name === 'return' || keypress.name === 'escape' ? '' : keypress.name } processedAsSpecialSequence = true @@ -143,11 +139,7 @@ function parseKey(keypress: ParsedKey): [Key, string] { input = '' } else { input = - keypress.name === 'space' - ? ' ' - : keypress.name === 'return' || keypress.name === 'escape' - ? '' - : keypress.name + keypress.name === 'space' ? ' ' : keypress.name === 'return' || keypress.name === 'escape' ? '' : keypress.name } processedAsSpecialSequence = true diff --git a/ui-tui/packages/hermes-ink/src/ink/ink.tsx b/ui-tui/packages/hermes-ink/src/ink/ink.tsx index 9db39804..71e3066a 100644 --- a/ui-tui/packages/hermes-ink/src/ink/ink.tsx +++ b/ui-tui/packages/hermes-ink/src/ink/ink.tsx @@ -1328,7 +1328,9 @@ export default class Ink { } if (process.env.HERMES_TUI_DEBUG_CLIPBOARD) { - console.error('[clipboard] no path reached the clipboard (headless + no tmux?) — set HERMES_TUI_FORCE_OSC52=1 to force the escape sequence') + console.error( + '[clipboard] no path reached the clipboard (headless + no tmux?) — set HERMES_TUI_FORCE_OSC52=1 to force the escape sequence' + ) } } catch (err) { if (process.env.HERMES_TUI_DEBUG_CLIPBOARD) { @@ -1799,6 +1801,7 @@ export default class Ink { if (this.selectionDragCell?.col === col && this.selectionDragCell.row === row) { this.updateSelectionAutoScroll(row) + return } @@ -1822,6 +1825,7 @@ export default class Ink { private updateSelectionAutoScroll(row: number): void { if (!this.selection.isDragging || !this.altScreenActive) { this.stopSelectionAutoScroll() + return } @@ -1829,6 +1833,7 @@ export default class Ink { if (dir === 0) { this.stopSelectionAutoScroll() + return } @@ -1844,6 +1849,7 @@ export default class Ink { private stepSelectionAutoScroll(): void { if (!this.selection.isDragging || !this.altScreenActive || this.selectionAutoScrollDir === 0) { this.stopSelectionAutoScroll() + return } @@ -1851,6 +1857,7 @@ export default class Ink { if (!box) { this.stopSelectionAutoScroll() + return } @@ -1889,7 +1896,10 @@ export default class Ink { } } - this.applySelectionDrag(this.selectionDragCell?.col ?? 0, this.selectionDragCell?.row ?? (this.selectionAutoScrollDir > 0 ? bottom : top)) + this.applySelectionDrag( + this.selectionDragCell?.col ?? 0, + this.selectionDragCell?.row ?? (this.selectionAutoScrollDir > 0 ? bottom : top) + ) } private stopSelectionAutoScroll(): void { @@ -1908,7 +1918,11 @@ export default class Ink { while (stack.length) { const node = stack.shift()! - if (node.style.overflowY === 'scroll' && node.scrollHeight !== undefined && node.scrollViewportHeight !== undefined) { + if ( + node.style.overflowY === 'scroll' && + node.scrollHeight !== undefined && + node.scrollViewportHeight !== undefined + ) { return node } diff --git a/ui-tui/packages/hermes-ink/src/ink/termio/osc.ts b/ui-tui/packages/hermes-ink/src/ink/termio/osc.ts index c60196b8..fb683794 100644 --- a/ui-tui/packages/hermes-ink/src/ink/termio/osc.ts +++ b/ui-tui/packages/hermes-ink/src/ink/termio/osc.ts @@ -87,7 +87,8 @@ export function shouldEmitClipboardSequence(env: NodeJS.ProcessEnv = process.env const override = ( env.HERMES_TUI_FORCE_OSC52 ?? env.HERMES_TUI_CLIPBOARD_OSC52 ?? - env.HERMES_TUI_COPY_OSC52 ?? '' + env.HERMES_TUI_COPY_OSC52 ?? + '' ).trim() if (ENV_ON_RE.test(override)) { @@ -196,16 +197,19 @@ export async function setClipboard(text: string): Promise { // forever but SSH_CONNECTION is in tmux's default update-environment and // clears on local attach. Fire-and-forget, but `copyNativeAttempted` // tells us whether ANY native path will be tried on this platform. - const nativeAttempted = - !process.env['SSH_CONNECTION'] && copyNative(text) + const nativeAttempted = !process.env['SSH_CONNECTION'] && copyNative(text) const tmuxBufferLoaded = await tmuxLoadBuffer(text) // Inner OSC uses BEL directly (not osc()) — ST's ESC would need doubling // too, and BEL works everywhere for OSC 52. const sequence = tmuxBufferLoaded - ? (emitSequence ? tmuxPassthrough(`${ESC}]52;c;${b64}${BEL}`) : '') - : (emitSequence ? raw : '') + ? emitSequence + ? tmuxPassthrough(`${ESC}]52;c;${b64}${BEL}`) + : '' + : emitSequence + ? raw + : '' // Success if any path was taken. Native and tmux are fire-and-forget, // so we can't truly confirm the clipboard was written — but if native diff --git a/ui-tui/scripts/profile-tui.mjs b/ui-tui/scripts/profile-tui.mjs new file mode 100644 index 00000000..7093ef9f --- /dev/null +++ b/ui-tui/scripts/profile-tui.mjs @@ -0,0 +1,112 @@ +#!/usr/bin/env node +import inspector from 'node:inspector' +import { performance } from 'node:perf_hooks' + +import React from 'react' +import { render } from '@hermes/ink' +import { AppLayout } from '../src/components/appLayout.tsx' +import { resetOverlayState } from '../src/app/overlayStore.ts' +import { resetTurnState } from '../src/app/turnStore.ts' +import { resetUiState } from '../src/app/uiStore.ts' + +const session = new inspector.Session() +session.connect() +const post = (method, params = {}) => new Promise((resolve, reject) => { + session.post(method, params, (err, result) => err ? reject(err) : resolve(result)) +}) + +class Sink { + columns = Number(process.env.COLS || 120) + rows = Number(process.env.ROWS || 42) + isTTY = true + bytes = 0 + writes = 0 + listeners = new Map() + write(chunk) { + const s = String(chunk ?? '') + this.bytes += Buffer.byteLength(s) + this.writes++ + return true + } + on(event, fn) { this.listeners.set(event, fn); return this } + off(event) { this.listeners.delete(event); return this } + once(event, fn) { this.listeners.set(event, fn); return this } + removeListener(event) { this.listeners.delete(event); return this } +} + +const theme = { + brand: { prompt: '›' }, + color: { + amber: '#d19a66', bronze: '#8b6f47', dim: '#6b7280', error: '#ff5555', gold: '#ffd166', label: '#61afef', + ok: '#98c379', warn: '#e5c07b', cornsilk: '#fff8dc', prompt: '#c678dd', shellDollar: '#98c379', + statusCritical: '#ff5555', statusBad: '#e06c75', statusWarn: '#e5c07b', statusGood: '#98c379', + selectionBg: '#44475a' + } +} + +const noop = () => {} +const makeMsg = i => ({ role: i % 5 === 0 ? 'user' : 'assistant', text: `message ${i}\n${'lorem ipsum '.repeat(80)}` }) +const historyItems = [{ kind: 'intro', role: 'system', text: '', info: { model: 'test', tools: {}, skills: {}, version: 'test' } }, ...Array.from({ length: Number(process.env.HISTORY || 500) }, (_, i) => makeMsg(i))] +const mkRows = items => items.map((msg, index) => ({ index, key: `m${index}`, msg })) +const scrollRef = { current: { + getScrollTop: () => 0, + getPendingDelta: () => 0, + getScrollHeight: () => Number(process.env.HISTORY || 500) * 4, + getViewportHeight: () => 30, + getViewportTop: () => 0, + isSticky: () => true, + subscribe: () => () => {}, + scrollBy: noop, + scrollTo: noop, + scrollToBottom: noop, + setClampBounds: noop, + getLastManualScrollAt: () => 0 +} } + +const baseProps = streamingText => ({ + actions: { answerApproval: noop, answerClarify: noop, answerSecret: noop, answerSudo: noop, onModelSelect: noop, resumeById: noop, setStickyPrompt: noop }, + composer: { cols: 120, compIdx: 0, completions: [], empty: false, handleTextPaste: () => null, input: '', inputBuf: [], pagerPageSize: 10, queueEditIdx: null, queuedDisplay: [], submit: noop, updateInput: noop }, + mouseTracking: false, + progress: { + activity: [], outcome: '', reasoning: streamingText, reasoningActive: true, reasoningStreaming: true, + reasoningTokens: Math.ceil(streamingText.length / 4), showProgressArea: true, showStreamingArea: true, + streamPendingTools: [], streamSegments: [], streaming: streamingText, subagents: [], toolTokens: 0, tools: [], turnTrail: [], todos: [] + }, + status: { cwdLabel: '~/repo', goodVibesTick: 0, sessionStartedAt: Date.now(), showStickyPrompt: false, statusColor: theme.color.ok, stickyPrompt: '', turnStartedAt: Date.now(), voiceLabel: 'voice off' }, + transcript: { + historyItems, + scrollRef, + virtualHistory: { bottomSpacer: 0, end: historyItems.length, measureRef: () => noop, offsets: historyItems.map((_, i) => i * 4), start: Math.max(0, historyItems.length - Number(process.env.MOUNTED || 120)), topSpacer: 0 }, + virtualRows: mkRows(historyItems) + } +}) + +async function main() { + resetUiState(); resetTurnState(); resetOverlayState() + const stdout = new Sink() + const stdin = { isTTY: true, setRawMode: noop, on: noop, off: noop, resume: noop, pause: noop } + const text = Array.from({ length: Number(process.env.LINES || 1200) }, (_, i) => `stream line ${i} ${'x'.repeat(90)}`).join('\n') + const inst = render(React.createElement(AppLayout, baseProps('')), { stdout, stdin, stderr: stdout, debug: false, exitOnCtrlC: false }) + + await post('Profiler.enable') + await post('HeapProfiler.enable') + await post('Profiler.start') + const startMem = process.memoryUsage() + const t0 = performance.now() + const iterations = Number(process.env.ITERS || 40) + for (let i = 1; i <= iterations; i++) { + const prefix = text.slice(0, Math.floor(text.length * i / iterations)) + inst.rerender(React.createElement(AppLayout, baseProps(prefix))) + await new Promise(r => setImmediate(r)) + } + const elapsed = performance.now() - t0 + const prof = await post('Profiler.stop') + const endMem = process.memoryUsage() + await post('HeapProfiler.collectGarbage') + const afterGc = process.memoryUsage() + inst.unmount() + session.disconnect() + console.log(JSON.stringify({ elapsedMs: Math.round(elapsed), stdoutBytes: stdout.bytes, stdoutWrites: stdout.writes, startMem, endMem, afterGc, profileNodes: prof.profile.nodes.length }, null, 2)) +} + +main().catch(err => { console.error(err); process.exit(1) }) diff --git a/ui-tui/src/__tests__/createGatewayEventHandler.test.ts b/ui-tui/src/__tests__/createGatewayEventHandler.test.ts index 27c49b0d..ad4a8f8e 100644 --- a/ui-tui/src/__tests__/createGatewayEventHandler.test.ts +++ b/ui-tui/src/__tests__/createGatewayEventHandler.test.ts @@ -59,6 +59,54 @@ describe('createGatewayEventHandler', () => { patchUiState({ showReasoning: true }) }) + it('keeps todo list visible after final assistant text completes', () => { + const appended: Msg[] = [] + + const todos = [ + { content: 'Gather ingredients', id: 'prep', status: 'completed' }, + { content: 'Boil water', id: 'boil', status: 'in_progress' }, + { content: 'Make sauce', id: 'sauce', status: 'pending' } + ] + + const onEvent = createGatewayEventHandler(buildCtx(appended)) + + onEvent({ payload: {}, type: 'message.start' } as any) + onEvent({ payload: { name: 'todo', todos, tool_id: 'todo-1' }, type: 'tool.start' } as any) + expect(getTurnState().todos).toEqual(todos) + + onEvent({ payload: { text: 'Started a todo list.' }, type: 'message.complete' } as any) + + expect(appended[appended.length - 1]).toMatchObject({ role: 'assistant', text: 'Started a todo list.' }) + expect(getTurnState().todos).toEqual(todos) + }) + + it('keeps the current todo list visible when the next message starts', () => { + const appended: Msg[] = [] + const todos = [{ content: 'Boil water', id: 'boil', status: 'in_progress' }] + + const onEvent = createGatewayEventHandler(buildCtx(appended)) + + onEvent({ payload: { name: 'todo', todos, tool_id: 'todo-1' }, type: 'tool.start' } as any) + expect(getTurnState().todos).toEqual(todos) + + onEvent({ payload: {}, type: 'message.start' } as any) + + expect(getTurnState().todos).toEqual(todos) + }) + + it('clears the visible todo list when the todo tool returns an empty list', () => { + const appended: Msg[] = [] + const todos = [{ content: 'Boil water', id: 'boil', status: 'in_progress' }] + const onEvent = createGatewayEventHandler(buildCtx(appended)) + + onEvent({ payload: { name: 'todo', todos, tool_id: 'todo-1' }, type: 'tool.start' } as any) + expect(getTurnState().todos).toEqual(todos) + + onEvent({ payload: { name: 'todo', todos: [], tool_id: 'todo-1' }, type: 'tool.complete' } as any) + + expect(getTurnState().todos).toEqual([]) + }) + it('persists completed tool rows when message.complete lands immediately after tool.complete', () => { const appended: Msg[] = [] @@ -90,6 +138,31 @@ describe('createGatewayEventHandler', () => { expect(appended[1]).toMatchObject({ role: 'assistant', text: 'final answer' }) }) + it('groups sequential completed tools into one trail when the turn completes', () => { + const appended: Msg[] = [] + const onEvent = createGatewayEventHandler(buildCtx(appended)) + + onEvent({ payload: { context: 'alpha', name: 'search_files', tool_id: 'tool-1' }, type: 'tool.start' } as any) + onEvent({ + payload: { name: 'search_files', summary: 'first done', tool_id: 'tool-1' }, + type: 'tool.complete' + } as any) + onEvent({ payload: { context: 'beta', name: 'read_file', tool_id: 'tool-2' }, type: 'tool.start' } as any) + onEvent({ payload: { name: 'read_file', summary: 'second done', tool_id: 'tool-2' }, type: 'tool.complete' } as any) + + expect(getTurnState().streamSegments.filter(msg => msg.kind === 'trail' && msg.tools?.length)).toHaveLength(1) + expect(getTurnState().streamSegments[0]?.tools).toHaveLength(2) + expect(getTurnState().streamPendingTools).toEqual([]) + + onEvent({ payload: { text: '' }, type: 'message.complete' } as any) + + const toolTrails = appended.filter(msg => msg.kind === 'trail' && msg.tools?.length) + expect(toolTrails).toHaveLength(1) + expect(toolTrails[0]?.tools).toHaveLength(2) + expect(toolTrails[0]?.tools?.[0]).toContain('Search Files') + expect(toolTrails[0]?.tools?.[1]).toContain('Read File') + }) + it('keeps tool tokens across handler recreation mid-turn', () => { const appended: Msg[] = [] @@ -213,7 +286,12 @@ describe('createGatewayEventHandler', () => { expect(appended).toHaveLength(0) expect(turnController.segmentMessages).toEqual([ { role: 'assistant', text: 'Editing the file' }, - { kind: 'diff', role: 'assistant', text: block, tools: ['Patch("foo.ts") ✓'] } + { + kind: 'diff', + role: 'assistant', + text: block, + tools: [expect.stringMatching(/^Patch\("foo\.ts"\)(?: \([^)]+\))? ✓$/)] + } ]) onEvent({ payload: { text: 'patch applied' }, type: 'message.complete' } as any) diff --git a/ui-tui/src/__tests__/scroll.test.ts b/ui-tui/src/__tests__/scroll.test.ts index 22f5d3f1..652cca09 100644 --- a/ui-tui/src/__tests__/scroll.test.ts +++ b/ui-tui/src/__tests__/scroll.test.ts @@ -21,6 +21,7 @@ describe('scrollWithSelectionBy', () => { getScrollTop: vi.fn(() => 9), getViewportHeight: vi.fn(() => 20) }) + const selection = { captureScrolledRows: vi.fn(), getState: vi.fn(() => null), @@ -39,6 +40,7 @@ describe('scrollWithSelectionBy', () => { getScrollTop: vi.fn(() => 10), getViewportHeight: vi.fn(() => 20) }) + const selection = { captureScrolledRows: vi.fn(), getState: vi.fn(() => null), diff --git a/ui-tui/src/__tests__/turnStore.test.ts b/ui-tui/src/__tests__/turnStore.test.ts new file mode 100644 index 00000000..13cd0f64 --- /dev/null +++ b/ui-tui/src/__tests__/turnStore.test.ts @@ -0,0 +1,27 @@ +import { describe, expect, it } from 'vitest' + +import { + freezeTurnRendering, + getRenderableTurnState, + patchTurnState, + resetTurnState, + unfreezeTurnRendering +} from '../app/turnStore.js' + +describe('turn render freezing', () => { + it('holds the render snapshot stable while live turn state keeps changing', () => { + resetTurnState() + patchTurnState({ streaming: 'before scroll' }) + freezeTurnRendering() + + patchTurnState({ reasoning: 'new thinking', streaming: 'new streamed text' }) + + expect(getRenderableTurnState().streaming).toBe('before scroll') + expect(getRenderableTurnState().reasoning).toBe('') + + unfreezeTurnRendering() + + expect(getRenderableTurnState().streaming).toBe('new streamed text') + expect(getRenderableTurnState().reasoning).toBe('new thinking') + }) +}) diff --git a/ui-tui/src/__tests__/virtualHistoryClamp.test.ts b/ui-tui/src/__tests__/virtualHistoryClamp.test.ts index 255fad7c..d14f308d 100644 --- a/ui-tui/src/__tests__/virtualHistoryClamp.test.ts +++ b/ui-tui/src/__tests__/virtualHistoryClamp.test.ts @@ -10,4 +10,10 @@ describe('virtual history clamp bounds', () => { it('sets clamp bounds after manual scroll breaks sticky mode', () => { expect(shouldSetVirtualClamp({ itemCount: 20, sticky: false, viewportHeight: 10 })).toBe(true) }) + + it('does not clamp while a live tail is growing below virtual history', () => { + expect(shouldSetVirtualClamp({ itemCount: 20, liveTailActive: true, sticky: false, viewportHeight: 10 })).toBe( + false + ) + }) }) diff --git a/ui-tui/src/app/createGatewayEventHandler.ts b/ui-tui/src/app/createGatewayEventHandler.ts index 699a8138..267bf8c1 100644 --- a/ui-tui/src/app/createGatewayEventHandler.ts +++ b/ui-tui/src/app/createGatewayEventHandler.ts @@ -372,6 +372,7 @@ export function createGatewayEventHandler(ctx: GatewayEventHandlerContext): (ev: return case 'tool.start': + turnController.recordTodos(ev.payload.todos) turnController.recordToolStart(ev.payload.tool_id, ev.payload.name ?? 'tool', ev.payload.context ?? '') return @@ -384,10 +385,18 @@ export function createGatewayEventHandler(ctx: GatewayEventHandlerContext): (ev: inlineDiffText, ev.payload.tool_id, ev.payload.name, - ev.payload.error + ev.payload.error, + ev.payload.duration_s ) } else { - turnController.recordToolComplete(ev.payload.tool_id, ev.payload.name, ev.payload.error, ev.payload.summary) + turnController.recordToolComplete( + ev.payload.tool_id, + ev.payload.name, + ev.payload.error, + ev.payload.summary, + ev.payload.duration_s, + ev.payload.todos + ) } return diff --git a/ui-tui/src/app/interfaces.ts b/ui-tui/src/app/interfaces.ts index f2215391..1904277c 100644 --- a/ui-tui/src/app/interfaces.ts +++ b/ui-tui/src/app/interfaces.ts @@ -7,8 +7,6 @@ import type { ImageAttachResponse } from '../gatewayTypes.js' import type { RpcResult } from '../lib/rpc.js' import type { Theme } from '../theme.js' import type { - ActiveTool, - ActivityItem, ApprovalReq, ClarifyReq, ConfirmReq, @@ -19,7 +17,6 @@ import type { SectionVisibility, SessionInfo, SlashCatalog, - SubagentProgress, SudoReq, Usage } from '../types.js' @@ -308,21 +305,7 @@ export interface AppLayoutComposerProps { } export interface AppLayoutProgressProps { - activity: ActivityItem[] - outcome: string - reasoning: string - reasoningActive: boolean - reasoningStreaming: boolean - reasoningTokens: number showProgressArea: boolean - showStreamingArea: boolean - streamPendingTools: string[] - streamSegments: Msg[] - streaming: string - subagents: SubagentProgress[] - toolTokens: number - tools: ActiveTool[] - turnTrail: string[] } export interface AppLayoutStatusProps { diff --git a/ui-tui/src/app/slash/commands/core.ts b/ui-tui/src/app/slash/commands/core.ts index ecc080ca..4c14fde4 100644 --- a/ui-tui/src/app/slash/commands/core.ts +++ b/ui-tui/src/app/slash/commands/core.ts @@ -260,7 +260,9 @@ export const coreCommands: SlashCommand[] = [ if (text) { return sys(`copied ${text.length} characters`) } else { - return sys('clipboard copy failed — try HERMES_TUI_FORCE_OSC52=1 to force the escape sequence; HERMES_TUI_DEBUG_CLIPBOARD=1 for details') + return sys( + 'clipboard copy failed — try HERMES_TUI_FORCE_OSC52=1 to force the escape sequence; HERMES_TUI_DEBUG_CLIPBOARD=1 for details' + ) } } diff --git a/ui-tui/src/app/turnStore.ts b/ui-tui/src/app/turnStore.ts index 148a50c1..f6d40bd3 100644 --- a/ui-tui/src/app/turnStore.ts +++ b/ui-tui/src/app/turnStore.ts @@ -1,6 +1,7 @@ import { atom } from 'nanostores' +import { useSyncExternalStore } from 'react' -import type { ActiveTool, ActivityItem, Msg, SubagentProgress } from '../types.js' +import type { ActiveTool, ActivityItem, Msg, SubagentProgress, TodoItem } from '../types.js' const buildTurnState = (): TurnState => ({ activity: [], @@ -13,6 +14,7 @@ const buildTurnState = (): TurnState => ({ streamSegments: [], streaming: '', subagents: [], + todos: [], toolTokens: 0, tools: [], turnTrail: [] @@ -22,6 +24,15 @@ export const $turnState = atom(buildTurnState()) export const getTurnState = () => $turnState.get() +const subscribeTurn = (cb: () => void) => $turnState.listen(() => cb()) + +export const useTurnSelector = (selector: (state: TurnState) => T): T => + useSyncExternalStore( + subscribeTurn, + () => selector($turnState.get()), + () => selector($turnState.get()) + ) + export const patchTurnState = (next: Partial | ((state: TurnState) => TurnState)) => $turnState.set(typeof next === 'function' ? next($turnState.get()) : { ...$turnState.get(), ...next }) @@ -38,6 +49,7 @@ export interface TurnState { streamSegments: Msg[] streaming: string subagents: SubagentProgress[] + todos: TodoItem[] toolTokens: number tools: ActiveTool[] turnTrail: string[] diff --git a/ui-tui/src/app/useInputHandlers.ts b/ui-tui/src/app/useInputHandlers.ts index d2b8bf27..fff73d9c 100644 --- a/ui-tui/src/app/useInputHandlers.ts +++ b/ui-tui/src/app/useInputHandlers.ts @@ -1,6 +1,8 @@ import { useInput } from '@hermes/ink' import { useStore } from '@nanostores/react' +import { useRef } from 'react' +import { TYPING_IDLE_MS } from '../config/timing.js' import type { ApprovalRespondResponse, ConfigSetResponse, @@ -26,6 +28,24 @@ export function useInputHandlers(ctx: InputHandlerContext): InputHandlerResult { const overlay = useStore($overlayState) const isBlocked = useStore($isBlocked) const pagerPageSize = Math.max(5, (terminal.stdout?.rows ?? 24) - 6) + const scrollIdleTimer = useRef | null>(null) + + const scrollTranscript = (delta: number) => { + if (getUiState().busy) { + turnController.boostStreamingForScroll() + + if (scrollIdleTimer.current) { + clearTimeout(scrollIdleTimer.current) + } + + scrollIdleTimer.current = setTimeout(() => { + scrollIdleTimer.current = null + turnController.relaxStreaming() + }, TYPING_IDLE_MS) + } + + terminal.scrollWithSelection(delta) + } const copySelection = () => { // ink's copySelection() already calls setClipboard() which handles @@ -259,26 +279,26 @@ export function useInputHandlers(ctx: InputHandlerContext): InputHandlerResult { } if (key.wheelUp) { - return terminal.scrollWithSelection(-wheelStep) + return scrollTranscript(-wheelStep) } if (key.wheelDown) { - return terminal.scrollWithSelection(wheelStep) + return scrollTranscript(wheelStep) } if (key.shift && key.upArrow) { - return terminal.scrollWithSelection(-1) + return scrollTranscript(-1) } if (key.shift && key.downArrow) { - return terminal.scrollWithSelection(1) + return scrollTranscript(1) } if (key.pageUp || key.pageDown) { const viewport = terminal.scrollRef.current?.getViewportHeight() ?? Math.max(6, (terminal.stdout?.rows ?? 24) - 8) const step = Math.max(4, viewport - 2) - return terminal.scrollWithSelection(key.pageUp ? -step : step) + return scrollTranscript(key.pageUp ? -step : step) } if (key.escape && terminal.hasSelection) { diff --git a/ui-tui/src/app/useMainApp.ts b/ui-tui/src/app/useMainApp.ts index 6e07f8f8..262b400f 100644 --- a/ui-tui/src/app/useMainApp.ts +++ b/ui-tui/src/app/useMainApp.ts @@ -28,7 +28,7 @@ import { type GatewayRpc, type TranscriptRow } from './interfaces.js' import { $overlayState, patchOverlayState } from './overlayStore.js' import { scrollWithSelectionBy } from './scroll.js' import { turnController } from './turnController.js' -import { $turnState, patchTurnState } from './turnStore.js' +import { $turnState, patchTurnState, useTurnSelector } from './turnStore.js' import { $uiState, getUiState, patchUiState } from './uiStore.js' import { useComposerState } from './useComposerState.js' import { useConfigSync } from './useConfigSync.js' @@ -108,6 +108,19 @@ export function useMainApp(gw: GatewayClient) { const overlay = useStore($overlayState) const turn = useStore($turnState) + const turnLiveTailActive = useTurnSelector(state => + Boolean( + state.streaming || + state.streamPendingTools.length || + state.streamSegments.length || + state.reasoning.trim() || + state.reasoningActive || + state.tools.length || + state.subagents.length || + state.todos.length + ) + ) + const slashFlightRef = useRef(0) const slashRef = useRef<(cmd: string) => boolean>(() => false) const colsRef = useRef(cols) @@ -178,7 +191,7 @@ export function useMainApp(gw: GatewayClient) { [historyItems, messageId] ) - const virtualHistory = useVirtualHistory(scrollRef, virtualRows, cols) + const virtualHistory = useVirtualHistory(scrollRef, virtualRows, cols, { liveTailActive: turnLiveTailActive }) const scrollWithSelection = useCallback( (delta: number) => scrollWithSelectionBy(delta, { scrollRef, selection }), @@ -587,7 +600,7 @@ export function useMainApp(gw: GatewayClient) { slashRef.current(`/model ${value} --global`) }, []) - const hasReasoning = Boolean(turn.reasoning.trim()) + const hasReasoning = useTurnSelector(state => Boolean(state.reasoning.trim())) // Per-section overrides win over the global mode — when every section is // resolved to hidden, the only thing ToolTrail will surface is the @@ -597,19 +610,22 @@ export function useMainApp(gw: GatewayClient) { s => sectionMode(s, ui.detailsMode, ui.sections, ui.detailsModeCommandOverride) !== 'hidden' ) - const showProgressArea = anyPanelVisible - ? Boolean( - ui.busy || - turn.outcome || - turn.streamPendingTools.length || - turn.streamSegments.length || - turn.subagents.length || - turn.tools.length || - turn.turnTrail.length || - hasReasoning || - turn.activity.length - ) - : turn.activity.some(item => item.tone !== 'info') + const showProgressArea = useTurnSelector(state => + anyPanelVisible + ? Boolean( + ui.busy || + state.outcome || + state.streamPendingTools.length || + state.streamSegments.length || + state.subagents.length || + state.tools.length || + state.todos.length || + state.turnTrail.length || + hasReasoning || + state.activity.length + ) + : state.activity.some(item => item.tone !== 'info') + ) const appActions = useMemo( () => ({ @@ -654,10 +670,7 @@ export function useMainApp(gw: GatewayClient) { return bottom >= scrollHeight - 3 })() - const liveProgress = useMemo( - () => ({ ...turn, showProgressArea, showStreamingArea: Boolean(turn.streaming) }), - [turn, showProgressArea] - ) + const liveProgress = useMemo(() => ({ showProgressArea }), [showProgressArea]) // Always pass current progress through. Freezing this while offscreen looked // like a nice scroll optimization, but it also froze the live tail's diff --git a/ui-tui/src/app/useSubmission.ts b/ui-tui/src/app/useSubmission.ts index 046b2316..6d9c7740 100644 --- a/ui-tui/src/app/useSubmission.ts +++ b/ui-tui/src/app/useSubmission.ts @@ -58,6 +58,7 @@ export function useSubmission(opts: UseSubmissionOptions) { if (!composerState.input && !composerState.inputBuf.length) { turnController.relaxStreaming() + return } @@ -92,9 +93,11 @@ export function useSubmission(opts: UseSubmissionOptions) { turnController.clearStatusTimer() maybeGoodVibes(submitText) setLastUserMsg(text) + if (showUserMessage) { appendMessage({ role: 'user', text: displayText }) } + patchUiState({ busy: true, status: 'running…' }) turnController.bufRef = '' turnController.interrupted = false diff --git a/ui-tui/src/components/appChrome.tsx b/ui-tui/src/components/appChrome.tsx index f03e0f5a..17ba966d 100644 --- a/ui-tui/src/components/appChrome.tsx +++ b/ui-tui/src/components/appChrome.tsx @@ -139,6 +139,27 @@ function SessionDuration({ startedAt }: { startedAt: number }) { return fmtDuration(now - startedAt) } +const effortLabel = (effort?: string) => { + const value = String(effort ?? '') + .trim() + .toLowerCase() + + return value && value !== 'medium' && value !== 'normal' && value !== 'default' ? value : '' +} + +const shortModelLabel = (model: string) => + model + .split('/') + .pop()! + .replace(/^claude[-_]/, '') + .replace(/^anthropic[-_]/, '') + .replace(/[-_]/g, ' ') + .replace(/\b(\d+)\s+(\d+)\b/g, '$1.$2') + .trim() + +const modelLabel = (model: string, effort?: string, fast?: boolean) => + [shortModelLabel(model), effortLabel(effort), fast ? 'fast' : ''].filter(Boolean).join(' ') + export function GoodVibesHeart({ tick, t }: { tick: number; t: Theme }) { const [active, setActive] = useState(false) const [color, setColor] = useState(t.color.amber) @@ -171,6 +192,8 @@ export function StatusRule({ status, statusColor, model, + modelFast, + modelReasoningEffort, usage, bgCount, sessionStartedAt, @@ -201,7 +224,7 @@ export function StatusRule({ ) : ( {status} )} - │ {model} + │ {modelLabel(model, modelReasoningEffort, modelFast)} {ctxLabel ? │ {ctxLabel} : null} {bar ? ( @@ -337,6 +360,8 @@ interface StatusRuleProps { cols: number cwdLabel: string model: string + modelFast?: boolean + modelReasoningEffort?: string sessionStartedAt?: null | number showCost: boolean status: string diff --git a/ui-tui/src/components/appLayout.tsx b/ui-tui/src/components/appLayout.tsx index 744f6e73..fe370700 100644 --- a/ui-tui/src/components/appLayout.tsx +++ b/ui-tui/src/components/appLayout.tsx @@ -3,13 +3,11 @@ import { useStore } from '@nanostores/react' import { memo } from 'react' import { useGateway } from '../app/gatewayContext.js' -import type { AppLayoutProgressProps, AppLayoutProps } from '../app/interfaces.js' +import type { AppLayoutProps } from '../app/interfaces.js' import { $isBlocked, $overlayState, patchOverlayState } from '../app/overlayStore.js' import { $uiState } from '../app/uiStore.js' import { PLACEHOLDER } from '../content/placeholders.js' import { inputVisualHeight, stableComposerColumns } from '../lib/inputMetrics.js' -import type { Theme } from '../theme.js' -import type { DetailsMode, SectionVisibility } from '../types.js' import { AgentsOverlay } from './agentsOverlay.js' import { GoodVibesHeart, StatusRule, StickyPromptTracker, TranscriptScrollbar } from './appChrome.js' @@ -17,69 +15,9 @@ import { FloatingOverlays, PromptZone } from './appOverlays.js' import { Banner, Panel, SessionPanel } from './branding.js' import { MessageLine } from './messageLine.js' import { QueuedMessages } from './queuedMessages.js' +import { LiveTodoPanel, StreamingAssistant } from './streamingAssistant.js' import { TextInput } from './textInput.js' -const StreamingAssistant = memo(function StreamingAssistant({ - busy, - cols, - compact, - detailsMode, - detailsModeCommandOverride, - progress, - sections, - t -}: StreamingAssistantProps) { - if (!progress.showProgressArea && !progress.showStreamingArea) { - return null - } - - return ( - <> - {progress.streamSegments.map((msg, i) => ( - - ))} - - {progress.showStreamingArea && ( - - )} - - {!progress.showStreamingArea && !!progress.streamPendingTools.length && ( - - )} - - ) -}) - const TranscriptPane = memo(function TranscriptPane({ actions, composer, @@ -120,15 +58,15 @@ const TranscriptPane = memo(function TranscriptPane({ {transcript.virtualHistory.bottomSpacer > 0 ? : null} + + @@ -279,7 +217,9 @@ const StatusRulePane = memo(function StatusRulePane({ busy={ui.busy} cols={composer.cols} cwdLabel={status.cwdLabel} - model={ui.info?.model?.split('/').pop() ?? ''} + model={ui.info?.model ?? ''} + modelFast={ui.info?.fast || ui.info?.service_tier === 'priority'} + modelReasoningEffort={ui.info?.reasoning_effort} sessionStartedAt={status.sessionStartedAt} showCost={ui.showCost} status={ui.status} @@ -331,14 +271,3 @@ export const AppLayout = memo(function AppLayout({ ) }) - -interface StreamingAssistantProps { - busy: boolean - cols: number - compact?: boolean - detailsMode: DetailsMode - detailsModeCommandOverride: boolean - progress: AppLayoutProgressProps - sections?: SectionVisibility - t: Theme -} diff --git a/ui-tui/src/components/streamingAssistant.tsx b/ui-tui/src/components/streamingAssistant.tsx new file mode 100644 index 00000000..b0279986 --- /dev/null +++ b/ui-tui/src/components/streamingAssistant.tsx @@ -0,0 +1,119 @@ +import { useStore } from '@nanostores/react' +import { memo } from 'react' + +import type { AppLayoutProgressProps } from '../app/interfaces.js' +import { useTurnSelector } from '../app/turnStore.js' +import { $uiState } from '../app/uiStore.js' +import type { DetailsMode, Msg, SectionVisibility } from '../types.js' + +import { MessageLine } from './messageLine.js' +import { TodoPanel } from './todoPanel.js' + +const isToolOnly = (msg: Msg | undefined) => + Boolean(msg && msg.kind === 'trail' && !msg.thinking?.trim() && !msg.text && msg.tools?.length) + +const groupedSegments = (segments: Msg[]) => + segments.reduce((acc, msg) => { + if (isToolOnly(msg) && isToolOnly(acc.at(-1))) { + const prev = acc.at(-1)! + + return [...acc.slice(0, -1), { ...prev, tools: [...(prev.tools ?? []), ...(msg.tools ?? [])] }] + } + + return [...acc, msg] + }, []) + +export const StreamingAssistant = memo(function StreamingAssistant({ + cols, + compact, + detailsMode, + detailsModeCommandOverride, + progress, + sections +}: StreamingAssistantProps) { + const ui = useStore($uiState) + const streamSegments = useTurnSelector(state => state.streamSegments) + const streamPendingTools = useTurnSelector(state => state.streamPendingTools) + const streaming = useTurnSelector(state => state.streaming) + const activeTools = useTurnSelector(state => state.tools) + const showStreamingArea = Boolean(streaming) + + if (!progress.showProgressArea && !showStreamingArea && !activeTools.length) { + return null + } + + return ( + <> + {groupedSegments(streamSegments).map((msg, i) => ( + + ))} + + {!!activeTools.length && ( + + )} + + {showStreamingArea && ( + + )} + + {!showStreamingArea && !!streamPendingTools.length && ( + + )} + + ) +}) + +export const LiveTodoPanel = memo(function LiveTodoPanel() { + const ui = useStore($uiState) + const todos = useTurnSelector(state => state.todos) + + return +}) + +interface StreamingAssistantProps { + cols: number + compact?: boolean + detailsMode: DetailsMode + detailsModeCommandOverride: boolean + progress: AppLayoutProgressProps + sections?: SectionVisibility +} diff --git a/ui-tui/src/components/textInput.tsx b/ui-tui/src/components/textInput.tsx index 984d2178..3b916d3d 100644 --- a/ui-tui/src/components/textInput.tsx +++ b/ui-tui/src/components/textInput.tsx @@ -508,7 +508,8 @@ export function TextInput({ curRef.current = c vRef.current = next - lineWidthRef.current = nextLineWidth ?? stringWidth(next.includes('\n') ? next.slice(next.lastIndexOf('\n') + 1) : next) + lineWidthRef.current = + nextLineWidth ?? stringWidth(next.includes('\n') ? next.slice(next.lastIndexOf('\n') + 1) : next) if (next !== prev) { if (syncParent) { diff --git a/ui-tui/src/components/todoPanel.tsx b/ui-tui/src/components/todoPanel.tsx new file mode 100644 index 00000000..48904faf --- /dev/null +++ b/ui-tui/src/components/todoPanel.tsx @@ -0,0 +1,46 @@ +import { Box, Text } from '@hermes/ink' +import { memo } from 'react' + +import { todoGlyph } from '../lib/todo.js' +import type { Theme } from '../theme.js' +import type { TodoItem } from '../types.js' + +export const TodoPanel = memo(function TodoPanel({ t, todos }: { t: Theme; todos: TodoItem[] }) { + if (!todos.length) { + return null + } + + return ( + + + + + Todo + {' '} + + ({todos.filter(todo => todo.status === 'completed').length}/{todos.length}) + + + + {todos.map(todo => { + const done = todo.status === 'completed' + const cancel = todo.status === 'cancelled' + const active = todo.status === 'in_progress' + + return ( + + + {todoGlyph(todo.status)}{' '} + + {todo.content} + + ) + })} + + + ) +}) diff --git a/ui-tui/src/config/timing.ts b/ui-tui/src/config/timing.ts index e0bd611b..e1811e83 100644 --- a/ui-tui/src/config/timing.ts +++ b/ui-tui/src/config/timing.ts @@ -1,5 +1,6 @@ export const STREAM_BATCH_MS = 16 export const STREAM_IDLE_BATCH_MS = 16 +export const STREAM_SCROLL_BATCH_MS = 96 export const STREAM_TYPING_BATCH_MS = 80 export const TYPING_IDLE_MS = 250 export const REASONING_PULSE_MS = 700 diff --git a/ui-tui/src/gatewayTypes.ts b/ui-tui/src/gatewayTypes.ts index ce056040..335c172d 100644 --- a/ui-tui/src/gatewayTypes.ts +++ b/ui-tui/src/gatewayTypes.ts @@ -384,9 +384,21 @@ export type GatewayEvent = | { payload?: { text?: string }; session_id?: string; type: 'reasoning.delta' | 'reasoning.available' } | { payload: { name?: string; preview?: string }; session_id?: string; type: 'tool.progress' } | { payload: { name?: string }; session_id?: string; type: 'tool.generating' } - | { payload: { context?: string; name?: string; tool_id: string }; session_id?: string; type: 'tool.start' } | { - payload: { error?: string; inline_diff?: string; name?: string; summary?: string; tool_id: string } + payload: { context?: string; name?: string; tool_id: string; todos?: unknown[] } + session_id?: string + type: 'tool.start' + } + | { + payload: { + duration_s?: number + error?: string + inline_diff?: string + name?: string + summary?: string + tool_id: string + todos?: unknown[] + } session_id?: string type: 'tool.complete' } diff --git a/ui-tui/src/hooks/useVirtualHistory.ts b/ui-tui/src/hooks/useVirtualHistory.ts index 17c93a75..0d98ca5e 100644 --- a/ui-tui/src/hooks/useVirtualHistory.ts +++ b/ui-tui/src/hooks/useVirtualHistory.ts @@ -19,13 +19,15 @@ const FREEZE_RENDERS = 2 export const shouldSetVirtualClamp = ({ itemCount, + liveTailActive = false, sticky, viewportHeight }: { itemCount: number + liveTailActive?: boolean sticky: boolean viewportHeight: number -}) => itemCount > 0 && viewportHeight > 0 && !sticky +}) => itemCount > 0 && viewportHeight > 0 && !sticky && !liveTailActive const upperBound = (arr: number[], target: number) => { let lo = 0 @@ -44,7 +46,13 @@ export function useVirtualHistory( scrollRef: RefObject, items: readonly { key: string }[], columns: number, - { estimate = ESTIMATE, overscan = OVERSCAN, maxMounted = MAX_MOUNTED, coldStartCount = COLD_START } = {} + { + estimate = ESTIMATE, + liveTailActive = false, + overscan = OVERSCAN, + maxMounted = MAX_MOUNTED, + coldStartCount = COLD_START + } = {} ) { const nodes = useRef(new Map()) const heights = useRef(new Map()) @@ -92,7 +100,7 @@ export function useVirtualHistory( return NaN } - const b = Math.floor(s.getScrollTop() / QUANTUM) + const b = Math.floor((s.getScrollTop() + s.getPendingDelta()) / QUANTUM) return s.isSticky() ? -b - 1 : b }, @@ -131,8 +139,11 @@ export function useVirtualHistory( const n = items.length const total = offsets[n] ?? 0 const top = Math.max(0, scrollRef.current?.getScrollTop() ?? 0) + const pending = scrollRef.current?.getPendingDelta() ?? 0 + const target = Math.max(0, top + pending) const vp = Math.max(0, scrollRef.current?.getViewportHeight() ?? 0) const sticky = scrollRef.current?.isSticky() ?? true + const recentManual = Date.now() - (scrollRef.current?.getLastManualScrollAt() ?? 0) < 1200 // During a freeze, drop the frozen range if items shrank past its start // (/clear, compaction) — clamping would collapse to an empty mount and @@ -149,9 +160,19 @@ export function useVirtualHistory( } else if (n > 0) { if (vp <= 0) { start = Math.max(0, n - coldStartCount) + } else if (sticky && !recentManual) { + const budget = vp + overscan + start = n + + while (start > 0 && total - offsets[start - 1]! < budget) { + start-- + } } else { - start = Math.max(0, Math.min(n - 1, upperBound(offsets, Math.max(0, top - overscan)) - 1)) - end = Math.max(start + 1, Math.min(n, upperBound(offsets, top + vp + overscan))) + const lo = Math.max(0, Math.min(top, target) - overscan) + const hi = Math.max(top, target) + vp + overscan + + start = Math.max(0, Math.min(n - 1, upperBound(offsets, lo) - 1)) + end = Math.max(start + 1, Math.min(n, upperBound(offsets, hi))) } } @@ -183,7 +204,7 @@ export function useVirtualHistory( // Give the renderer the mounted-row coverage for passive scroll clamping. // Without this, burst wheel/page scroll can race past the React commit that // updates the virtual range and paint spacer-only frames. - if (s && shouldSetVirtualClamp({ itemCount: n, sticky, viewportHeight: vp })) { + if (s && shouldSetVirtualClamp({ itemCount: n, liveTailActive, sticky, viewportHeight: vp })) { const min = offsets[start] ?? 0 const max = Math.max(min, (offsets[end] ?? total) - vp) s.setClampBounds(min, max) @@ -235,7 +256,7 @@ export function useVirtualHistory( if (dirty) { setVer(v => v + 1) } - }, [end, hasScrollRef, items, n, offsets, scrollRef, start, sticky, total, vp]) + }, [end, hasScrollRef, items, liveTailActive, n, offsets, recentManual, scrollRef, start, sticky, total, vp]) return { bottomSpacer: Math.max(0, total - (offsets[end] ?? total)), diff --git a/ui-tui/src/lib/todo.test.ts b/ui-tui/src/lib/todo.test.ts new file mode 100644 index 00000000..38d95c9e --- /dev/null +++ b/ui-tui/src/lib/todo.test.ts @@ -0,0 +1,12 @@ +import { describe, expect, it } from 'vitest' + +import { todoGlyph } from './todo.js' + +describe('todoGlyph', () => { + it('uses fixed-width ASCII markers so the active row does not render wide or emoji-like', () => { + expect(todoGlyph('completed')).toBe('[x]') + expect(todoGlyph('in_progress')).toBe('[>]') + expect(todoGlyph('pending')).toBe('[ ]') + expect(todoGlyph('cancelled')).toBe('[-]') + }) +}) diff --git a/ui-tui/src/lib/todo.ts b/ui-tui/src/lib/todo.ts new file mode 100644 index 00000000..b6dc4896 --- /dev/null +++ b/ui-tui/src/lib/todo.ts @@ -0,0 +1,4 @@ +import type { TodoItem } from '../types.js' + +export const todoGlyph = (status: TodoItem['status']) => + status === 'completed' ? '[x]' : status === 'cancelled' ? '[-]' : status === 'in_progress' ? '[>]' : '[ ]' diff --git a/ui-tui/src/types.ts b/ui-tui/src/types.ts index 3fdb39b8..89c83856 100644 --- a/ui-tui/src/types.ts +++ b/ui-tui/src/types.ts @@ -5,6 +5,12 @@ export interface ActiveTool { startedAt?: number } +export interface TodoItem { + content: string + id: string + status: 'cancelled' | 'completed' | 'in_progress' | 'pending' +} + export interface ActivityItem { id: number text: string @@ -133,8 +139,11 @@ export interface McpServerStatus { export interface SessionInfo { cwd?: string + fast?: boolean mcp_servers?: McpServerStatus[] model: string + reasoning_effort?: string + service_tier?: string release_date?: string skills: Record tools: Record diff --git a/ui-tui/src/types/hermes-ink.d.ts b/ui-tui/src/types/hermes-ink.d.ts index 497bf54b..c878bdb4 100644 --- a/ui-tui/src/types/hermes-ink.d.ts +++ b/ui-tui/src/types/hermes-ink.d.ts @@ -57,6 +57,7 @@ declare module '@hermes/ink' { readonly getScrollHeight: () => number readonly getViewportHeight: () => number readonly getViewportTop: () => number + readonly getLastManualScrollAt: () => number readonly isSticky: () => boolean readonly subscribe: (listener: () => void) => () => void readonly setClampBounds: (min: number | undefined, max: number | undefined) => void From 3271ffbd80f43d149d4939d0897ba80a971bcd53 Mon Sep 17 00:00:00 2001 From: Brooklyn Nicholson Date: Sun, 26 Apr 2026 15:27:31 -0500 Subject: [PATCH 37/87] fix(tui): pin todo panel above live output --- ui-tui/src/components/appLayout.tsx | 4 ++-- ui-tui/src/components/todoPanel.tsx | 23 +++++++++++------------ ui-tui/src/lib/liveLayout.test.ts | 9 +++++++++ ui-tui/src/lib/liveLayout.ts | 1 + ui-tui/src/lib/todo.test.ts | 11 ++++++++++- ui-tui/src/lib/todo.ts | 5 +++++ 6 files changed, 38 insertions(+), 15 deletions(-) create mode 100644 ui-tui/src/lib/liveLayout.test.ts create mode 100644 ui-tui/src/lib/liveLayout.ts diff --git a/ui-tui/src/components/appLayout.tsx b/ui-tui/src/components/appLayout.tsx index fe370700..d3d70235 100644 --- a/ui-tui/src/components/appLayout.tsx +++ b/ui-tui/src/components/appLayout.tsx @@ -30,6 +30,8 @@ const TranscriptPane = memo(function TranscriptPane({ <> + + {transcript.virtualHistory.topSpacer > 0 ? : null} {transcript.virtualRows.slice(transcript.virtualHistory.start, transcript.virtualHistory.end).map(row => ( @@ -58,8 +60,6 @@ const TranscriptPane = memo(function TranscriptPane({ {transcript.virtualHistory.bottomSpacer > 0 ? : null} - - { + const tone = todoTone(status) + + return tone === 'active' ? t.color.cornsilk : tone === 'body' ? t.color.statusFg : t.color.dim +} + export const TodoPanel = memo(function TodoPanel({ t, todos }: { t: Theme; todos: TodoItem[] }) { if (!todos.length) { return null @@ -23,19 +29,12 @@ export const TodoPanel = memo(function TodoPanel({ t, todos }: { t: Theme; todos {todos.map(todo => { - const done = todo.status === 'completed' - const cancel = todo.status === 'cancelled' - const active = todo.status === 'in_progress' + const tone = todoTone(todo.status) + const color = rowColor(t, todo.status) return ( - - - {todoGlyph(todo.status)}{' '} - + + {todoGlyph(todo.status)} {todo.content} ) diff --git a/ui-tui/src/lib/liveLayout.test.ts b/ui-tui/src/lib/liveLayout.test.ts new file mode 100644 index 00000000..3d40f6f8 --- /dev/null +++ b/ui-tui/src/lib/liveLayout.test.ts @@ -0,0 +1,9 @@ +import { describe, expect, it } from 'vitest' + +import { liveTailOrder } from './liveLayout.js' + +describe('liveTailOrder', () => { + it('keeps todo before transcript and assistant live output', () => { + expect(liveTailOrder()).toEqual(['todo', 'history', 'assistant']) + }) +}) diff --git a/ui-tui/src/lib/liveLayout.ts b/ui-tui/src/lib/liveLayout.ts new file mode 100644 index 00000000..1107edfc --- /dev/null +++ b/ui-tui/src/lib/liveLayout.ts @@ -0,0 +1 @@ +export const liveTailOrder = () => ['todo', 'history', 'assistant'] as const diff --git a/ui-tui/src/lib/todo.test.ts b/ui-tui/src/lib/todo.test.ts index 38d95c9e..bf8befa2 100644 --- a/ui-tui/src/lib/todo.test.ts +++ b/ui-tui/src/lib/todo.test.ts @@ -1,6 +1,6 @@ import { describe, expect, it } from 'vitest' -import { todoGlyph } from './todo.js' +import { todoGlyph, todoTone } from './todo.js' describe('todoGlyph', () => { it('uses fixed-width ASCII markers so the active row does not render wide or emoji-like', () => { @@ -10,3 +10,12 @@ describe('todoGlyph', () => { expect(todoGlyph('cancelled')).toBe('[-]') }) }) + +describe('todoTone', () => { + it('keeps todo status rows neutral instead of red/green', () => { + expect(todoTone('completed')).toBe('dim') + expect(todoTone('cancelled')).toBe('dim') + expect(todoTone('pending')).toBe('body') + expect(todoTone('in_progress')).toBe('active') + }) +}) diff --git a/ui-tui/src/lib/todo.ts b/ui-tui/src/lib/todo.ts index b6dc4896..1846d02f 100644 --- a/ui-tui/src/lib/todo.ts +++ b/ui-tui/src/lib/todo.ts @@ -1,4 +1,9 @@ import type { TodoItem } from '../types.js' +export type TodoTone = 'active' | 'body' | 'dim' + export const todoGlyph = (status: TodoItem['status']) => status === 'completed' ? '[x]' : status === 'cancelled' ? '[-]' : status === 'in_progress' ? '[>]' : '[ ]' + +export const todoTone = (status: TodoItem['status']): TodoTone => + status === 'in_progress' ? 'active' : status === 'pending' ? 'body' : 'dim' From cf8439263ae4d85102176f1bc170624faec6ebed Mon Sep 17 00:00:00 2001 From: Brooklyn Nicholson Date: Sun, 26 Apr 2026 15:33:01 -0500 Subject: [PATCH 38/87] fix(tui): keep todo pinned outside transcript --- ui-tui/src/app/useMainApp.ts | 6 +++++- ui-tui/src/components/appLayout.tsx | 4 ++-- ui-tui/src/lib/liveLayout.test.ts | 2 +- ui-tui/src/lib/liveLayout.ts | 2 +- ui-tui/src/lib/messages.test.ts | 23 +++++++++++++++++++++++ ui-tui/src/lib/messages.ts | 13 +++++++++++++ 6 files changed, 45 insertions(+), 5 deletions(-) create mode 100644 ui-tui/src/lib/messages.test.ts diff --git a/ui-tui/src/app/useMainApp.ts b/ui-tui/src/app/useMainApp.ts index 262b400f..064d64ad 100644 --- a/ui-tui/src/app/useMainApp.ts +++ b/ui-tui/src/app/useMainApp.ts @@ -16,6 +16,7 @@ import type { } from '../gatewayTypes.js' import { useGitBranch } from '../hooks/useGitBranch.js' import { useVirtualHistory } from '../hooks/useVirtualHistory.js' +import { appendTranscriptMessage } from '../lib/messages.js' import { asRpcResult, rpcErrorMessage } from '../lib/rpc.js' import { terminalParityHints } from '../lib/terminalParity.js' import { buildToolTrailLine, sameToolTrailGroup, toolTrailLabel } from '../lib/text.js' @@ -198,7 +199,10 @@ export function useMainApp(gw: GatewayClient) { [selection] ) - const appendMessage = useCallback((msg: Msg) => setHistoryItems(prev => capHistory([...prev, msg])), []) + const appendMessage = useCallback( + (msg: Msg) => setHistoryItems(prev => capHistory(appendTranscriptMessage(prev, msg))), + [] + ) const sys = useCallback((text: string) => appendMessage({ role: 'system', text }), [appendMessage]) diff --git a/ui-tui/src/components/appLayout.tsx b/ui-tui/src/components/appLayout.tsx index d3d70235..a6862027 100644 --- a/ui-tui/src/components/appLayout.tsx +++ b/ui-tui/src/components/appLayout.tsx @@ -28,10 +28,10 @@ const TranscriptPane = memo(function TranscriptPane({ return ( <> + + - - {transcript.virtualHistory.topSpacer > 0 ? : null} {transcript.virtualRows.slice(transcript.virtualHistory.start, transcript.virtualHistory.end).map(row => ( diff --git a/ui-tui/src/lib/liveLayout.test.ts b/ui-tui/src/lib/liveLayout.test.ts index 3d40f6f8..24426efe 100644 --- a/ui-tui/src/lib/liveLayout.test.ts +++ b/ui-tui/src/lib/liveLayout.test.ts @@ -4,6 +4,6 @@ import { liveTailOrder } from './liveLayout.js' describe('liveTailOrder', () => { it('keeps todo before transcript and assistant live output', () => { - expect(liveTailOrder()).toEqual(['todo', 'history', 'assistant']) + expect(liveTailOrder()).toEqual(['todo', 'scroll-history', 'assistant']) }) }) diff --git a/ui-tui/src/lib/liveLayout.ts b/ui-tui/src/lib/liveLayout.ts index 1107edfc..a990b06d 100644 --- a/ui-tui/src/lib/liveLayout.ts +++ b/ui-tui/src/lib/liveLayout.ts @@ -1 +1 @@ -export const liveTailOrder = () => ['todo', 'history', 'assistant'] as const +export const liveTailOrder = () => ['todo', 'scroll-history', 'assistant'] as const diff --git a/ui-tui/src/lib/messages.test.ts b/ui-tui/src/lib/messages.test.ts new file mode 100644 index 00000000..6194311c --- /dev/null +++ b/ui-tui/src/lib/messages.test.ts @@ -0,0 +1,23 @@ +import { describe, expect, it } from 'vitest' + +import { appendTranscriptMessage } from './messages.js' + +describe('appendTranscriptMessage', () => { + it('merges adjacent tool-only shelves into one transcript row', () => { + const out = appendTranscriptMessage( + [{ kind: 'trail', role: 'system', text: '', tools: ['Terminal("one") ✓'] }], + { kind: 'trail', role: 'system', text: '', tools: ['Terminal("two") ✓'] } + ) + + expect(out).toEqual([{ kind: 'trail', role: 'system', text: '', tools: ['Terminal("one") ✓', 'Terminal("two") ✓'] }]) + }) + + it('does not merge tool shelves across thinking text', () => { + const out = appendTranscriptMessage( + [{ kind: 'trail', role: 'system', text: '', thinking: 'plan', tools: ['Terminal("one") ✓'] }], + { kind: 'trail', role: 'system', text: '', tools: ['Terminal("two") ✓'] } + ) + + expect(out).toHaveLength(2) + }) +}) diff --git a/ui-tui/src/lib/messages.ts b/ui-tui/src/lib/messages.ts index a459ec5a..60fc4b76 100644 --- a/ui-tui/src/lib/messages.ts +++ b/ui-tui/src/lib/messages.ts @@ -1,4 +1,17 @@ import type { Msg, Role } from '../types.js' +const isToolShelf = (msg: Msg | undefined) => + Boolean(msg?.kind === 'trail' && !msg.text && !msg.thinking?.trim() && msg.tools?.length) + +export const appendTranscriptMessage = (prev: Msg[], msg: Msg): Msg[] => { + if (isToolShelf(msg) && isToolShelf(prev.at(-1))) { + const last = prev.at(-1)! + + return [...prev.slice(0, -1), { ...last, tools: [...(last.tools ?? []), ...(msg.tools ?? [])] }] + } + + return [...prev, msg] +} + export const upsert = (prev: Msg[], role: Role, text: string): Msg[] => prev.at(-1)?.role === role ? [...prev.slice(0, -1), { role, text }] : [...prev, { role, text }] From cee4036e8b434e8821bb0d19b574eab8b67c29c0 Mon Sep 17 00:00:00 2001 From: Brooklyn Nicholson Date: Sun, 26 Apr 2026 15:35:38 -0500 Subject: [PATCH 39/87] fix(tui): merge tool shelves in transcript --- ui-tui/src/components/appLayout.tsx | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/ui-tui/src/components/appLayout.tsx b/ui-tui/src/components/appLayout.tsx index a6862027..5c63c0e2 100644 --- a/ui-tui/src/components/appLayout.tsx +++ b/ui-tui/src/components/appLayout.tsx @@ -28,7 +28,9 @@ const TranscriptPane = memo(function TranscriptPane({ return ( <> - + + + From 64de685d3ff4320d1284b91a100770ab4dd4c233 Mon Sep 17 00:00:00 2001 From: Brooklyn Nicholson Date: Sun, 26 Apr 2026 15:35:41 -0500 Subject: [PATCH 40/87] test(tui): remove stale turn freeze experiment --- ui-tui/src/__tests__/turnStore.test.ts | 27 -------------------------- 1 file changed, 27 deletions(-) delete mode 100644 ui-tui/src/__tests__/turnStore.test.ts diff --git a/ui-tui/src/__tests__/turnStore.test.ts b/ui-tui/src/__tests__/turnStore.test.ts deleted file mode 100644 index 13cd0f64..00000000 --- a/ui-tui/src/__tests__/turnStore.test.ts +++ /dev/null @@ -1,27 +0,0 @@ -import { describe, expect, it } from 'vitest' - -import { - freezeTurnRendering, - getRenderableTurnState, - patchTurnState, - resetTurnState, - unfreezeTurnRendering -} from '../app/turnStore.js' - -describe('turn render freezing', () => { - it('holds the render snapshot stable while live turn state keeps changing', () => { - resetTurnState() - patchTurnState({ streaming: 'before scroll' }) - freezeTurnRendering() - - patchTurnState({ reasoning: 'new thinking', streaming: 'new streamed text' }) - - expect(getRenderableTurnState().streaming).toBe('before scroll') - expect(getRenderableTurnState().reasoning).toBe('') - - unfreezeTurnRendering() - - expect(getRenderableTurnState().streaming).toBe('new streamed text') - expect(getRenderableTurnState().reasoning).toBe('new thinking') - }) -}) From 6a3873942fef72c871c5f3eae38126e24853db14 Mon Sep 17 00:00:00 2001 From: Brooklyn Nicholson Date: Sun, 26 Apr 2026 15:38:18 -0500 Subject: [PATCH 41/87] fix(tui): format thinking paragraphs --- ui-tui/src/__tests__/text.test.ts | 14 +++++++++++++- ui-tui/src/lib/text.ts | 19 +++++++++++++------ 2 files changed, 26 insertions(+), 7 deletions(-) diff --git a/ui-tui/src/__tests__/text.test.ts b/ui-tui/src/__tests__/text.test.ts index 1690996d..a81baa0f 100644 --- a/ui-tui/src/__tests__/text.test.ts +++ b/ui-tui/src/__tests__/text.test.ts @@ -12,7 +12,8 @@ import { parseToolTrailResultLine, pasteTokenLabel, sameToolTrailGroup, - splitToolDuration + splitToolDuration, + thinkingPreview } from '../lib/text.js' describe('isToolTrailResultLine', () => { @@ -82,6 +83,17 @@ describe('estimateTokensRough', () => { }) }) +describe('thinkingPreview', () => { + it('adds paragraph breaks before markdown thinking headings', () => { + const raw = + '**Considering user instructions**\nI need to answer.**Planning tool execution**\nI can run tools.**Determining weather search parameters**\nUse SF.' + + expect(thinkingPreview(raw, 'full')).toBe( + '**Considering user instructions**\nI need to answer.\n\n**Planning tool execution**\nI can run tools.\n\n**Determining weather search parameters**\nUse SF.' + ) + }) +}) + describe('boundedLiveRenderText', () => { it('preserves short live text verbatim', () => { expect(boundedLiveRenderText('one\ntwo', { maxChars: 100, maxLines: 10 })).toBe('one\ntwo') diff --git a/ui-tui/src/lib/text.ts b/ui-tui/src/lib/text.ts index 256cbc0f..9c9758c3 100644 --- a/ui-tui/src/lib/text.ts +++ b/ui-tui/src/lib/text.ts @@ -74,14 +74,21 @@ export const pasteTokenLabel = (text: string, lineCount: number) => { const THINKING_STATUS_RE = new RegExp(`^(?:${VERBS.join('|')})\\.{0,3}$`, 'i') const THINKING_STATUS_CHUNK_RE = new RegExp(`[^A-Za-z\n]+\\s*(?:${VERBS.join('|')})\\.{0,3}\\s*`, 'giu') -export const cleanThinkingText = (reasoning: string) => - reasoning - .split('\n') - .map(line => line.replace(THINKING_STATUS_CHUNK_RE, '').trim()) - .filter(line => line && !THINKING_STATUS_RE.test(line.replace(/\.\.\.$/, '').trim())) - .join('\n') +const normalizeThinkingParagraphs = (text: string) => + text + .replace(/([^\n])(?=\*\*[^*\n][^\n]*?\*\*)/g, '$1\n\n') + .replace(/\n{3,}/g, '\n\n') .trim() +export const cleanThinkingText = (reasoning: string) => + normalizeThinkingParagraphs( + reasoning + .split('\n') + .map(line => line.replace(THINKING_STATUS_CHUNK_RE, '').trim()) + .filter(line => line && !THINKING_STATUS_RE.test(line.replace(/\.\.\.$/, '').trim())) + .join('\n') + ) + export const thinkingPreview = (reasoning: string, mode: ThinkingMode, max: number = THINKING_COT_MAX) => { const raw = cleanThinkingText(reasoning) From f6846205cce3d5ad54952dc70cd3f77c09cc165d Mon Sep 17 00:00:00 2001 From: Brooklyn Nicholson Date: Sun, 26 Apr 2026 15:40:38 -0500 Subject: [PATCH 42/87] fix(tui): isolate turn state from app render --- ui-tui/src/__tests__/stateIsolation.test.ts | 46 +++++++++++++++++++++ ui-tui/src/app/useLongRunToolCharms.ts | 17 ++++++-- ui-tui/src/app/useMainApp.ts | 6 +-- 3 files changed, 61 insertions(+), 8 deletions(-) create mode 100644 ui-tui/src/__tests__/stateIsolation.test.ts diff --git a/ui-tui/src/__tests__/stateIsolation.test.ts b/ui-tui/src/__tests__/stateIsolation.test.ts new file mode 100644 index 00000000..0a6b898f --- /dev/null +++ b/ui-tui/src/__tests__/stateIsolation.test.ts @@ -0,0 +1,46 @@ +import { beforeEach, describe, expect, it } from 'vitest' + +import { patchTurnState, resetTurnState } from '../app/turnStore.js' +import { $uiState, resetUiState } from '../app/uiStore.js' + +const shallowEqual = >(a: T, b: T) => + Object.keys(a).length === Object.keys(b).length && Object.keys(a).every(key => Object.is(a[key], b[key])) + +const subscribeSelected = >(selector: () => T) => { + let current = selector() + let calls = 0 + + const unsubscribe = $uiState.listen(() => { + const next = selector() + + if (shallowEqual(next, current)) { + return + } + + current = next + calls++ + }) + + return { calls: () => calls, unsubscribe } +} + +describe('TUI state isolation', () => { + beforeEach(() => { + resetUiState() + resetTurnState() + }) + + it('does not notify ui/composer subscribers for high-frequency turn updates', () => { + const composerRelevant = subscribeSelected(() => ({ busy: $uiState.get().busy, sid: $uiState.get().sid })) + + try { + for (let i = 0; i < 50; i++) { + patchTurnState({ streaming: `token ${i}` }) + } + } finally { + composerRelevant.unsubscribe() + } + + expect(composerRelevant.calls()).toBe(0) + }) +}) diff --git a/ui-tui/src/app/useLongRunToolCharms.ts b/ui-tui/src/app/useLongRunToolCharms.ts index a65898db..9135abf4 100644 --- a/ui-tui/src/app/useLongRunToolCharms.ts +++ b/ui-tui/src/app/useLongRunToolCharms.ts @@ -5,6 +5,8 @@ import { pick, toolTrailLabel } from '../lib/text.js' import type { ActiveTool } from '../types.js' import { turnController } from './turnController.js' +import { useTurnSelector } from './turnStore.js' +import { getUiState } from './uiStore.js' const DELAY_MS = 8_000 const INTERVAL_MS = 10_000 @@ -15,21 +17,28 @@ interface Slot { lastAt: number } -export function useLongRunToolCharms(busy: boolean, tools: ActiveTool[]) { +export function useLongRunToolCharms() { + const tools = useTurnSelector(state => state.tools) const slots = useRef(new Map()) useEffect(() => { - if (!busy || !tools.length) { + if (!getUiState().busy || !tools.length) { slots.current.clear() return } const tick = () => { + if (!getUiState().busy) { + slots.current.clear() + + return + } + const now = Date.now() const liveIds = new Set(tools.map(t => t.id)) - for (const key of [...slots.current.keys()]) { + for (const key of Array.from(slots.current.keys())) { if (!liveIds.has(key)) { slots.current.delete(key) } @@ -57,5 +66,5 @@ export function useLongRunToolCharms(busy: boolean, tools: ActiveTool[]) { const id = setInterval(tick, 1000) return () => clearInterval(id) - }, [busy, tools]) + }, [tools]) } diff --git a/ui-tui/src/app/useMainApp.ts b/ui-tui/src/app/useMainApp.ts index 064d64ad..26431264 100644 --- a/ui-tui/src/app/useMainApp.ts +++ b/ui-tui/src/app/useMainApp.ts @@ -29,7 +29,7 @@ import { type GatewayRpc, type TranscriptRow } from './interfaces.js' import { $overlayState, patchOverlayState } from './overlayStore.js' import { scrollWithSelectionBy } from './scroll.js' import { turnController } from './turnController.js' -import { $turnState, patchTurnState, useTurnSelector } from './turnStore.js' +import { patchTurnState, useTurnSelector } from './turnStore.js' import { $uiState, getUiState, patchUiState } from './uiStore.js' import { useComposerState } from './useComposerState.js' import { useConfigSync } from './useConfigSync.js' @@ -107,8 +107,6 @@ export function useMainApp(gw: GatewayClient) { const ui = useStore($uiState) const overlay = useStore($overlayState) - const turn = useStore($turnState) - const turnLiveTailActive = useTurnSelector(state => Boolean( state.streaming || @@ -503,7 +501,7 @@ export function useMainApp(gw: GatewayClient) { } }, [gw, sys]) - useLongRunToolCharms(ui.busy, turn.tools) + useLongRunToolCharms() const slash = useMemo( () => From a30db69dd576f874b0e821fc3af4d3d421134057 Mon Sep 17 00:00:00 2001 From: Brooklyn Nicholson Date: Sun, 26 Apr 2026 15:42:07 -0500 Subject: [PATCH 43/87] chore(tui): clean live progress lint --- ui-tui/src/app/useLongRunToolCharms.ts | 1 - ui-tui/src/app/useMainApp.ts | 1 + 2 files changed, 1 insertion(+), 1 deletion(-) diff --git a/ui-tui/src/app/useLongRunToolCharms.ts b/ui-tui/src/app/useLongRunToolCharms.ts index 9135abf4..5d2f0d66 100644 --- a/ui-tui/src/app/useLongRunToolCharms.ts +++ b/ui-tui/src/app/useLongRunToolCharms.ts @@ -2,7 +2,6 @@ import { useEffect, useRef } from 'react' import { LONG_RUN_CHARMS } from '../content/charms.js' import { pick, toolTrailLabel } from '../lib/text.js' -import type { ActiveTool } from '../types.js' import { turnController } from './turnController.js' import { useTurnSelector } from './turnStore.js' diff --git a/ui-tui/src/app/useMainApp.ts b/ui-tui/src/app/useMainApp.ts index 26431264..f3967c96 100644 --- a/ui-tui/src/app/useMainApp.ts +++ b/ui-tui/src/app/useMainApp.ts @@ -107,6 +107,7 @@ export function useMainApp(gw: GatewayClient) { const ui = useStore($uiState) const overlay = useStore($overlayState) + const turnLiveTailActive = useTurnSelector(state => Boolean( state.streaming || From 1566f1eeccfffd3b72ac70777d70014bd050084a Mon Sep 17 00:00:00 2001 From: Brooklyn Nicholson Date: Sun, 26 Apr 2026 15:55:01 -0500 Subject: [PATCH 44/87] fix(tui): report actual session on exit --- hermes_cli/main.py | 29 +++++++++++++-- tests/hermes_cli/test_tui_resume_flow.py | 35 +++++++++++++++++++ .../src/__tests__/useSessionLifecycle.test.ts | 27 ++++++++++++++ ui-tui/src/app/useSessionLifecycle.ts | 16 +++++++++ 4 files changed, 104 insertions(+), 3 deletions(-) create mode 100644 ui-tui/src/__tests__/useSessionLifecycle.test.ts diff --git a/hermes_cli/main.py b/hermes_cli/main.py index e10af44c..96874570 100644 --- a/hermes_cli/main.py +++ b/hermes_cli/main.py @@ -44,6 +44,7 @@ Usage: """ import argparse +import json import os import shutil import subprocess @@ -760,9 +761,20 @@ def _resolve_session_by_name_or_id(name_or_id: str) -> Optional[str]: return None -def _print_tui_exit_summary(session_id: Optional[str]) -> None: +def _read_tui_active_session_file(path: Optional[str]) -> Optional[str]: + if not path: + return None + try: + data = json.loads(Path(path).read_text(encoding="utf-8")) + sid = str(data.get("session_id") or "").strip() + return sid or None + except Exception: + return None + + +def _print_tui_exit_summary(session_id: Optional[str], active_session_file: Optional[str] = None) -> None: """Print a shell-visible epilogue after TUI exits.""" - target = session_id or _resolve_last_session(source="tui") + target = _read_tui_active_session_file(active_session_file) or session_id or _resolve_last_session(source="tui") if not target: return @@ -1037,7 +1049,13 @@ def _launch_tui( """Replace current process with the TUI.""" tui_dir = PROJECT_ROOT / "ui-tui" + import tempfile + env = os.environ.copy() + active_session_file = os.path.join( + tempfile.gettempdir(), f"hermes-tui-active-session-{os.getpid()}.json" + ) + env["HERMES_TUI_ACTIVE_SESSION_FILE"] = active_session_file env["HERMES_PYTHON_SRC_ROOT"] = os.environ.get( "HERMES_PYTHON_SRC_ROOT", str(PROJECT_ROOT) ) @@ -1070,7 +1088,12 @@ def _launch_tui( code = 130 if code in (0, 130): - _print_tui_exit_summary(resume_session_id) + _print_tui_exit_summary(resume_session_id, active_session_file) + + try: + os.unlink(active_session_file) + except OSError: + pass sys.exit(code) diff --git a/tests/hermes_cli/test_tui_resume_flow.py b/tests/hermes_cli/test_tui_resume_flow.py index 6044b04a..a8a2d3aa 100644 --- a/tests/hermes_cli/test_tui_resume_flow.py +++ b/tests/hermes_cli/test_tui_resume_flow.py @@ -177,3 +177,38 @@ def test_print_tui_exit_summary_includes_resume_and_token_totals(monkeypatch, ca assert "hermes --tui --resume 20260409_000001_abc123" in out assert 'hermes --tui -c "demo title"' in out assert "Tokens: 21 (in 10, out 6, cache 4, reasoning 1)" in out + + +def test_print_tui_exit_summary_prefers_actual_active_session_file(monkeypatch, capsys, tmp_path): + import hermes_cli.main as main_mod + + seen = [] + + class _FakeDB: + def get_session(self, session_id): + seen.append(session_id) + return { + "message_count": 1, + "input_tokens": 0, + "output_tokens": 0, + "cache_read_tokens": 0, + "cache_write_tokens": 0, + "reasoning_tokens": 0, + } + + def get_session_title(self, _session_id): + return "actual" + + def close(self): + return None + + active = tmp_path / "active.json" + active.write_text('{"session_id":"actual_session"}', encoding="utf-8") + monkeypatch.setitem(sys.modules, "hermes_state", types.SimpleNamespace(SessionDB=lambda: _FakeDB())) + + main_mod._print_tui_exit_summary("startup_resume", str(active)) + out = capsys.readouterr().out + + assert seen == ["actual_session"] + assert "hermes --tui --resume actual_session" in out + assert "startup_resume" not in out diff --git a/ui-tui/src/__tests__/useSessionLifecycle.test.ts b/ui-tui/src/__tests__/useSessionLifecycle.test.ts new file mode 100644 index 00000000..8d797742 --- /dev/null +++ b/ui-tui/src/__tests__/useSessionLifecycle.test.ts @@ -0,0 +1,27 @@ +import { mkdtempSync, readFileSync, rmSync } from 'node:fs' +import { tmpdir } from 'node:os' +import { join } from 'node:path' + +import { afterEach, describe, expect, it } from 'vitest' + +import { writeActiveSessionFile } from '../app/useSessionLifecycle.js' + +describe('writeActiveSessionFile', () => { + let dir = '' + + afterEach(() => { + if (dir) { + rmSync(dir, { force: true, recursive: true }) + dir = '' + } + }) + + it('writes the actual resumed session id for the shell exit summary', () => { + dir = mkdtempSync(join(tmpdir(), 'hermes-tui-active-')) + const path = join(dir, 'active.json') + + writeActiveSessionFile('actual_session', path) + + expect(JSON.parse(readFileSync(path, 'utf8'))).toEqual({ session_id: 'actual_session' }) + }) +}) diff --git a/ui-tui/src/app/useSessionLifecycle.ts b/ui-tui/src/app/useSessionLifecycle.ts index baaf3fc3..b475533a 100644 --- a/ui-tui/src/app/useSessionLifecycle.ts +++ b/ui-tui/src/app/useSessionLifecycle.ts @@ -1,3 +1,5 @@ +import { writeFileSync } from 'node:fs' + import type { ScrollBoxHandle } from '@hermes/ink' import { type RefObject, useCallback } from 'react' @@ -22,6 +24,18 @@ import { getUiState, patchUiState } from './uiStore.js' const usageFrom = (info: null | SessionInfo): Usage => (info?.usage ? { ...ZERO, ...info.usage } : ZERO) +export const writeActiveSessionFile = (sessionId: null | string, file = process.env.HERMES_TUI_ACTIVE_SESSION_FILE) => { + if (!file || !sessionId) { + return + } + + try { + writeFileSync(file, JSON.stringify({ session_id: sessionId }), { mode: 0o600 }) + } catch { + // Best-effort shell epilogue hint only; never break live session changes. + } +} + const trimTail = (items: Msg[]) => { const q = [...items] @@ -127,6 +141,7 @@ export function useSessionLifecycle(opts: UseSessionLifecycleOptions) { resetSession() setSessionStartedAt(Date.now()) + writeActiveSessionFile(r.session_id) patchUiState({ info, sid: r.session_id, @@ -184,6 +199,7 @@ export function useSessionLifecycle(opts: UseSessionLifecycleOptions) { const resumed = toTranscriptMessages(r.messages) setHistoryItems(r.info ? [introMsg(r.info), ...resumed] : resumed) + writeActiveSessionFile(r.resumed ?? r.session_id) patchUiState({ info: r.info ?? null, sid: r.session_id, From f5552f92e2b935a2f404cb7cee3179927ab143fd Mon Sep 17 00:00:00 2001 From: Brooklyn Nicholson Date: Sun, 26 Apr 2026 15:55:38 -0500 Subject: [PATCH 45/87] fix(tui): stabilize live todo progress --- ui-tui/src/__tests__/turnStore.test.ts | 60 +++++++++++++++++++ ui-tui/src/app/createGatewayEventHandler.ts | 2 + ui-tui/src/app/turnController.ts | 63 +++++++------------- ui-tui/src/app/turnStore.ts | 22 +++++++ ui-tui/src/components/appLayout.tsx | 8 +-- ui-tui/src/components/messageLine.tsx | 5 ++ ui-tui/src/components/streamingAssistant.tsx | 5 +- ui-tui/src/components/todoPanel.tsx | 59 +++++++++++------- ui-tui/src/lib/liveLayout.ts | 2 +- ui-tui/src/lib/liveProgress.test.ts | 48 +++++++++++++++ ui-tui/src/lib/liveProgress.ts | 34 +++++++++++ ui-tui/src/lib/messages.test.ts | 20 ++++--- ui-tui/src/lib/messages.ts | 13 +--- ui-tui/src/types.ts | 1 + 14 files changed, 256 insertions(+), 86 deletions(-) create mode 100644 ui-tui/src/__tests__/turnStore.test.ts create mode 100644 ui-tui/src/lib/liveProgress.test.ts create mode 100644 ui-tui/src/lib/liveProgress.ts diff --git a/ui-tui/src/__tests__/turnStore.test.ts b/ui-tui/src/__tests__/turnStore.test.ts new file mode 100644 index 00000000..006a1288 --- /dev/null +++ b/ui-tui/src/__tests__/turnStore.test.ts @@ -0,0 +1,60 @@ +import { beforeEach, describe, expect, it } from 'vitest' + +import { + appendTurnSegment, + archiveDoneTodos, + getTurnState, + patchTurnState, + resetTurnState, + toggleTodoCollapsed +} from '../app/turnStore.js' + +describe('turnStore live progress helpers', () => { + beforeEach(() => resetTurnState()) + + it('archives completed todos into a transcript trail and clears the live anchor', () => { + patchTurnState({ + todos: [ + { content: 'prep', id: 'prep', status: 'completed' }, + { content: 'serve', id: 'serve', status: 'completed' } + ] + }) + + expect(archiveDoneTodos()).toEqual([ + { + kind: 'trail', + role: 'system', + text: '', + todos: [ + { content: 'prep', id: 'prep', status: 'completed' }, + { content: 'serve', id: 'serve', status: 'completed' } + ] + } + ]) + expect(getTurnState().todos).toEqual([]) + }) + + it('does not archive active todos', () => { + patchTurnState({ todos: [{ content: 'cook', id: 'cook', status: 'in_progress' }] }) + + expect(archiveDoneTodos()).toEqual([]) + expect(getTurnState().todos).toHaveLength(1) + }) + + it('tracks collapsed state independently of todo content', () => { + toggleTodoCollapsed() + expect(getTurnState().todoCollapsed).toBe(true) + + toggleTodoCollapsed() + expect(getTurnState().todoCollapsed).toBe(false) + }) + + it('merges adjacent live tool shelves before rendering', () => { + appendTurnSegment({ kind: 'trail', role: 'system', text: '', tools: ['one ✓'] }) + appendTurnSegment({ kind: 'trail', role: 'system', text: '', tools: ['two ✓'] }) + + expect(getTurnState().streamSegments).toEqual([ + { kind: 'trail', role: 'system', text: '', tools: ['one ✓', 'two ✓'] } + ]) + }) +}) diff --git a/ui-tui/src/app/createGatewayEventHandler.ts b/ui-tui/src/app/createGatewayEventHandler.ts index 267bf8c1..d1e9d633 100644 --- a/ui-tui/src/app/createGatewayEventHandler.ts +++ b/ui-tui/src/app/createGatewayEventHandler.ts @@ -11,6 +11,7 @@ import { applyDelegationStatus, getDelegationState } from './delegationStore.js' import type { GatewayEventHandlerContext } from './interfaces.js' import { patchOverlayState } from './overlayStore.js' import { turnController } from './turnController.js' +import { archiveDoneTodos } from './turnStore.js' import { getUiState, patchUiState } from './uiStore.js' const NO_PROVIDER_RE = /\bNo (?:LLM|inference) provider configured\b/i @@ -538,6 +539,7 @@ export function createGatewayEventHandler(ctx: GatewayEventHandlerContext): (ev: if (!wasInterrupted) { const msgs: Msg[] = finalMessages.length ? finalMessages : [{ role: 'assistant', text: finalText }] msgs.forEach(appendMessage) + archiveDoneTodos().forEach(appendMessage) if (bellOnComplete && stdout?.isTTY) { stdout.write('\x07') diff --git a/ui-tui/src/app/turnController.ts b/ui-tui/src/app/turnController.ts index 8d9d2e13..9bc87ea8 100644 --- a/ui-tui/src/app/turnController.ts +++ b/ui-tui/src/app/turnController.ts @@ -7,6 +7,7 @@ import { } from '../config/timing.js' import type { SessionInterruptResponse, SubagentEventPayload } from '../gatewayTypes.js' import { hasReasoningTag, splitReasoning } from '../lib/reasoning.js' +import { appendToolShelfMessage, isToolShelfMessage } from '../lib/liveProgress.js' import { boundedLiveRenderText, buildToolTrailLine, @@ -19,7 +20,7 @@ import type { ActiveTool, ActivityItem, Msg, SubagentProgress, TodoItem } from ' import { resetFlowOverlays } from './overlayStore.js' import { pushSnapshot } from './spawnHistoryStore.js' -import { getTurnState, patchTurnState, resetTurnState } from './turnStore.js' +import { archiveDoneTodos, getTurnState, patchTurnState, resetTurnState } from './turnStore.js' import { getUiState, patchUiState } from './uiStore.js' const INTERRUPT_COOLDOWN_MS = 1500 @@ -42,20 +43,6 @@ const diffSegmentBody = (msg: Msg): null | string => { const hasDetails = (msg: Msg): boolean => Boolean(msg.thinking || msg.tools?.length || msg.toolTokens) -const isToolOnly = (msg: Msg | undefined) => - Boolean(msg && msg.kind === 'trail' && !msg.thinking?.trim() && !msg.text && msg.tools?.length) - -const mergeSequentialToolOnly = (segments: Msg[]) => - segments.reduce((acc, msg) => { - if (isToolOnly(msg) && isToolOnly(acc.at(-1))) { - const prev = acc.at(-1)! - - return [...acc.slice(0, -1), { ...prev, tools: [...(prev.tools ?? []), ...(msg.tools ?? [])] }] - } - - return [...acc, msg] - }, []) - const isTodoStatus = (status: unknown): status is TodoItem['status'] => status === 'pending' || status === 'in_progress' || status === 'completed' || status === 'cancelled' @@ -281,17 +268,7 @@ class TurnController { } private pushSegment(msg: Msg) { - if (isToolOnly(msg) && isToolOnly(this.segmentMessages.at(-1)!)) { - const prev = this.segmentMessages.at(-1)! - this.segmentMessages = [ - ...this.segmentMessages.slice(0, -1), - { ...prev, tools: [...(prev.tools ?? []), ...(msg.tools ?? [])] } - ] - - return - } - - this.segmentMessages = [...this.segmentMessages, msg] + this.segmentMessages = appendToolShelfMessage(this.segmentMessages, msg) } flushStreamingSegment() { @@ -347,16 +324,22 @@ class TurnController { } private flushPendingToolsIntoLastSegment() { - const last = this.segmentMessages[this.segmentMessages.length - 1] - - if (!this.pendingSegmentTools.length || !isToolOnly(last)) { + if (!this.pendingSegmentTools.length) { return false } - this.segmentMessages = [ - ...this.segmentMessages.slice(0, -1), - { ...last, tools: [...(last.tools ?? []), ...this.pendingSegmentTools] } - ] + const next = appendToolShelfMessage(this.segmentMessages, { + kind: 'trail', + role: 'system', + text: '', + tools: this.pendingSegmentTools + }) + + if (next.length === this.segmentMessages.length + 1) { + return false + } + + this.segmentMessages = next this.pendingSegmentTools = [] patchTurnState({ streamPendingTools: [], streamSegments: this.segmentMessages }) @@ -449,7 +432,7 @@ class TurnController { let tools = this.pendingSegmentTools const last = this.segmentMessages[this.segmentMessages.length - 1] - if (tools.length && isToolOnly(last)) { + if (tools.length && isToolShelfMessage(last)) { this.segmentMessages = [ ...this.segmentMessages.slice(0, -1), { ...last, tools: [...(last.tools ?? []), ...tools] } @@ -465,13 +448,11 @@ class TurnController { // assistant narration stays put. const finalHasOwnDiffFence = /```(?:diff|patch)\b/i.test(finalText) - const segments = mergeSequentialToolOnly( - this.segmentMessages.filter(msg => { - const body = diffSegmentBody(msg) + const segments = this.segmentMessages.filter(msg => { + const body = diffSegmentBody(msg) - return body === null || (!finalHasOwnDiffFence && !finalText.includes(body)) - }) - ) + return body === null || (!finalHasOwnDiffFence && !finalText.includes(body)) + }) const hasReasoningSegment = this.reasoningSegmentIndex !== null || segments.some(msg => Boolean(msg.thinking?.trim())) @@ -490,6 +471,8 @@ class TurnController { const finalMessages = hasDetails(finalDetails) ? [...segments, finalDetails] : [...segments] + finalMessages.push(...archiveDoneTodos()) + if (finalText) { finalMessages.push({ role: 'assistant', text: finalText }) } diff --git a/ui-tui/src/app/turnStore.ts b/ui-tui/src/app/turnStore.ts index f6d40bd3..9700f953 100644 --- a/ui-tui/src/app/turnStore.ts +++ b/ui-tui/src/app/turnStore.ts @@ -1,6 +1,7 @@ import { atom } from 'nanostores' import { useSyncExternalStore } from 'react' +import { appendToolShelfMessage, isTodoDone } from '../lib/liveProgress.js' import type { ActiveTool, ActivityItem, Msg, SubagentProgress, TodoItem } from '../types.js' const buildTurnState = (): TurnState => ({ @@ -14,6 +15,7 @@ const buildTurnState = (): TurnState => ({ streamSegments: [], streaming: '', subagents: [], + todoCollapsed: false, todos: [], toolTokens: 0, tools: [], @@ -36,6 +38,25 @@ export const useTurnSelector = (selector: (state: TurnState) => T): T => export const patchTurnState = (next: Partial | ((state: TurnState) => TurnState)) => $turnState.set(typeof next === 'function' ? next($turnState.get()) : { ...$turnState.get(), ...next }) +export const toggleTodoCollapsed = () => patchTurnState(state => ({ ...state, todoCollapsed: !state.todoCollapsed })) + +export const archiveDoneTodos = () => { + const state = $turnState.get() + + if (!isTodoDone(state.todos)) { + return [] + } + + const msg: Msg = { kind: 'trail', role: 'system', text: '', todos: state.todos } + + patchTurnState({ todoCollapsed: false, todos: [] }) + + return [msg] +} + +export const appendTurnSegment = (msg: Msg) => + patchTurnState(state => ({ ...state, streamSegments: appendToolShelfMessage(state.streamSegments, msg) })) + export const resetTurnState = () => $turnState.set(buildTurnState()) export interface TurnState { @@ -49,6 +70,7 @@ export interface TurnState { streamSegments: Msg[] streaming: string subagents: SubagentProgress[] + todoCollapsed: boolean todos: TodoItem[] toolTokens: number tools: ActiveTool[] diff --git a/ui-tui/src/components/appLayout.tsx b/ui-tui/src/components/appLayout.tsx index 5c63c0e2..2608a9da 100644 --- a/ui-tui/src/components/appLayout.tsx +++ b/ui-tui/src/components/appLayout.tsx @@ -28,10 +28,6 @@ const TranscriptPane = memo(function TranscriptPane({ return ( <> - - - - {transcript.virtualHistory.topSpacer > 0 ? : null} @@ -73,6 +69,10 @@ const TranscriptPane = memo(function TranscriptPane({ + + + + diff --git a/ui-tui/src/components/messageLine.tsx b/ui-tui/src/components/messageLine.tsx index e827dd5f..7465a088 100644 --- a/ui-tui/src/components/messageLine.tsx +++ b/ui-tui/src/components/messageLine.tsx @@ -10,6 +10,7 @@ import type { Theme } from '../theme.js' import type { ActiveTool, DetailsMode, Msg, SectionVisibility } from '../types.js' import { Md } from './markdown.js' +import { TodoPanel } from './todoPanel.js' import { ToolTrail } from './thinking.js' export const MessageLine = memo(function MessageLine({ @@ -35,6 +36,10 @@ export const MessageLine = memo(function MessageLine({ const activityMode = sectionMode('activity', detailsMode, sections, detailsModeCommandOverride) const thinking = msg.thinking?.trim() ?? '' + if (msg.kind === 'trail' && msg.todos?.length) { + return + } + if (msg.kind === 'trail' && (msg.tools?.length || tools.length || thinking)) { return thinkingMode !== 'hidden' || toolsMode !== 'hidden' || activityMode !== 'hidden' ? ( diff --git a/ui-tui/src/components/streamingAssistant.tsx b/ui-tui/src/components/streamingAssistant.tsx index b0279986..8b5f2611 100644 --- a/ui-tui/src/components/streamingAssistant.tsx +++ b/ui-tui/src/components/streamingAssistant.tsx @@ -2,7 +2,7 @@ import { useStore } from '@nanostores/react' import { memo } from 'react' import type { AppLayoutProgressProps } from '../app/interfaces.js' -import { useTurnSelector } from '../app/turnStore.js' +import { toggleTodoCollapsed, useTurnSelector } from '../app/turnStore.js' import { $uiState } from '../app/uiStore.js' import type { DetailsMode, Msg, SectionVisibility } from '../types.js' @@ -105,8 +105,9 @@ export const StreamingAssistant = memo(function StreamingAssistant({ export const LiveTodoPanel = memo(function LiveTodoPanel() { const ui = useStore($uiState) const todos = useTurnSelector(state => state.todos) + const collapsed = useTurnSelector(state => state.todoCollapsed) - return + return }) interface StreamingAssistantProps { diff --git a/ui-tui/src/components/todoPanel.tsx b/ui-tui/src/components/todoPanel.tsx index cb8ccd80..964512d8 100644 --- a/ui-tui/src/components/todoPanel.tsx +++ b/ui-tui/src/components/todoPanel.tsx @@ -11,35 +11,52 @@ const rowColor = (t: Theme, status: TodoItem['status']) => { return tone === 'active' ? t.color.cornsilk : tone === 'body' ? t.color.statusFg : t.color.dim } -export const TodoPanel = memo(function TodoPanel({ t, todos }: { t: Theme; todos: TodoItem[] }) { +export const TodoPanel = memo(function TodoPanel({ + collapsed = false, + onToggle, + t, + todos +}: { + collapsed?: boolean + onToggle?: () => void + t: Theme + todos: TodoItem[] +}) { if (!todos.length) { return null } + const done = todos.filter(todo => todo.status === 'completed').length + return ( - - - - Todo - {' '} - - ({todos.filter(todo => todo.status === 'completed').length}/{todos.length}) + + + {collapsed ? '▸ ' : '▾ '} + + Todo + {' '} + + ({done}/{todos.length}) + - - - {todos.map(todo => { - const tone = todoTone(todo.status) - const color = rowColor(t, todo.status) - - return ( - - {todoGlyph(todo.status)} - {todo.content} - - ) - })} + + {!collapsed && ( + + {todos.map(todo => { + const tone = todoTone(todo.status) + const color = rowColor(t, todo.status) + + return ( + + {todoGlyph(todo.status)} + {todo.content} + + ) + })} + + )} ) }) diff --git a/ui-tui/src/lib/liveLayout.ts b/ui-tui/src/lib/liveLayout.ts index a990b06d..13856f5c 100644 --- a/ui-tui/src/lib/liveLayout.ts +++ b/ui-tui/src/lib/liveLayout.ts @@ -1 +1 @@ -export const liveTailOrder = () => ['todo', 'scroll-history', 'assistant'] as const +export const liveTailOrder = () => ['scroll-history', 'assistant', 'live-todo'] as const diff --git a/ui-tui/src/lib/liveProgress.test.ts b/ui-tui/src/lib/liveProgress.test.ts new file mode 100644 index 00000000..d10e1bb9 --- /dev/null +++ b/ui-tui/src/lib/liveProgress.test.ts @@ -0,0 +1,48 @@ +import { describe, expect, it } from 'vitest' + +import { appendToolShelfMessage, isTodoDone } from './liveProgress.js' + +describe('isTodoDone', () => { + it('only treats non-empty all-completed/cancelled lists as done', () => { + expect(isTodoDone([])).toBe(false) + expect(isTodoDone([{ content: 'x', id: 'x', status: 'completed' }])).toBe(true) + expect(isTodoDone([{ content: 'x', id: 'x', status: 'in_progress' }])).toBe(false) + expect( + isTodoDone([ + { content: 'x', id: 'x', status: 'completed' }, + { content: 'y', id: 'y', status: 'cancelled' } + ]) + ).toBe(true) + }) +}) + +describe('appendToolShelfMessage', () => { + it('merges adjacent tool shelves into one contextual shelf', () => { + const merged = appendToolShelfMessage([{ kind: 'trail', role: 'system', text: '', tools: ['one ✓'] }], { + kind: 'trail', + role: 'system', + text: '', + tools: ['two ✓'] + }) + + expect(merged).toEqual([{ kind: 'trail', role: 'system', text: '', tools: ['one ✓', 'two ✓'] }]) + }) + + it('adds tools to the nearest contextual thinking shelf', () => { + const merged = appendToolShelfMessage( + [{ kind: 'trail', role: 'system', text: '', thinking: 'plan', tools: ['one ✓'] }], + { kind: 'trail', role: 'system', text: '', tools: ['two ✓'] } + ) + + expect(merged).toEqual([{ kind: 'trail', role: 'system', text: '', thinking: 'plan', tools: ['one ✓', 'two ✓'] }]) + }) + + it('starts a new shelf across assistant text boundaries', () => { + const merged = appendToolShelfMessage( + [{ kind: 'trail', role: 'system', text: '', tools: ['one ✓'] }, { role: 'assistant', text: 'done' }], + { kind: 'trail', role: 'system', text: '', tools: ['two ✓'] } + ) + + expect(merged).toHaveLength(3) + }) +}) diff --git a/ui-tui/src/lib/liveProgress.ts b/ui-tui/src/lib/liveProgress.ts new file mode 100644 index 00000000..62f74163 --- /dev/null +++ b/ui-tui/src/lib/liveProgress.ts @@ -0,0 +1,34 @@ +import type { Msg, TodoItem } from '../types.js' + +export const isTodoDone = (todos: readonly TodoItem[]) => + todos.length > 0 && todos.every(todo => todo.status === 'completed' || todo.status === 'cancelled') + +export const isToolShelfMessage = (msg: Msg | undefined) => + Boolean(msg?.kind === 'trail' && !msg.text && !msg.thinking?.trim() && msg.tools?.length) + +const canHoldToolShelf = (msg: Msg | undefined) => + Boolean(msg?.kind === 'trail' && !msg.text && (msg.thinking?.trim() || msg.tools?.length)) + +export const appendToolShelfMessage = (prev: readonly Msg[], msg: Msg): Msg[] => { + if (!isToolShelfMessage(msg)) { + return [...prev, msg] + } + + for (let index = prev.length - 1; index >= 0; index--) { + const candidate = prev[index] + + if (canHoldToolShelf(candidate)) { + const next = [...prev] + + next[index] = { ...candidate!, tools: [...(candidate!.tools ?? []), ...(msg.tools ?? [])] } + + return next + } + + if (candidate?.kind !== 'trail' || candidate.text) { + break + } + } + + return [...prev, msg] +} diff --git a/ui-tui/src/lib/messages.test.ts b/ui-tui/src/lib/messages.test.ts index 6194311c..422ddb1a 100644 --- a/ui-tui/src/lib/messages.test.ts +++ b/ui-tui/src/lib/messages.test.ts @@ -4,20 +4,26 @@ import { appendTranscriptMessage } from './messages.js' describe('appendTranscriptMessage', () => { it('merges adjacent tool-only shelves into one transcript row', () => { - const out = appendTranscriptMessage( - [{ kind: 'trail', role: 'system', text: '', tools: ['Terminal("one") ✓'] }], - { kind: 'trail', role: 'system', text: '', tools: ['Terminal("two") ✓'] } - ) + const out = appendTranscriptMessage([{ kind: 'trail', role: 'system', text: '', tools: ['Terminal("one") ✓'] }], { + kind: 'trail', + role: 'system', + text: '', + tools: ['Terminal("two") ✓'] + }) - expect(out).toEqual([{ kind: 'trail', role: 'system', text: '', tools: ['Terminal("one") ✓', 'Terminal("two") ✓'] }]) + expect(out).toEqual([ + { kind: 'trail', role: 'system', text: '', tools: ['Terminal("one") ✓', 'Terminal("two") ✓'] } + ]) }) - it('does not merge tool shelves across thinking text', () => { + it('merges tool shelves into the nearest thinking shelf', () => { const out = appendTranscriptMessage( [{ kind: 'trail', role: 'system', text: '', thinking: 'plan', tools: ['Terminal("one") ✓'] }], { kind: 'trail', role: 'system', text: '', tools: ['Terminal("two") ✓'] } ) - expect(out).toHaveLength(2) + expect(out).toEqual([ + { kind: 'trail', role: 'system', text: '', thinking: 'plan', tools: ['Terminal("one") ✓', 'Terminal("two") ✓'] } + ]) }) }) diff --git a/ui-tui/src/lib/messages.ts b/ui-tui/src/lib/messages.ts index 60fc4b76..b8e89421 100644 --- a/ui-tui/src/lib/messages.ts +++ b/ui-tui/src/lib/messages.ts @@ -1,17 +1,8 @@ import type { Msg, Role } from '../types.js' -const isToolShelf = (msg: Msg | undefined) => - Boolean(msg?.kind === 'trail' && !msg.text && !msg.thinking?.trim() && msg.tools?.length) +import { appendToolShelfMessage } from './liveProgress.js' -export const appendTranscriptMessage = (prev: Msg[], msg: Msg): Msg[] => { - if (isToolShelf(msg) && isToolShelf(prev.at(-1))) { - const last = prev.at(-1)! - - return [...prev.slice(0, -1), { ...last, tools: [...(last.tools ?? []), ...(msg.tools ?? [])] }] - } - - return [...prev, msg] -} +export const appendTranscriptMessage = (prev: Msg[], msg: Msg): Msg[] => appendToolShelfMessage(prev, msg) export const upsert = (prev: Msg[], role: Role, text: string): Msg[] => prev.at(-1)?.role === role ? [...prev.slice(0, -1), { role, text }] : [...prev, { role, text }] diff --git a/ui-tui/src/types.ts b/ui-tui/src/types.ts index 89c83856..ac61868b 100644 --- a/ui-tui/src/types.ts +++ b/ui-tui/src/types.ts @@ -116,6 +116,7 @@ export interface Msg { thinkingTokens?: number toolTokens?: number tools?: string[] + todos?: TodoItem[] } export type Role = 'assistant' | 'system' | 'tool' | 'user' From a5319fb7afb2decd3f1f510fc4cac3601d1d3b42 Mon Sep 17 00:00:00 2001 From: Brooklyn Nicholson Date: Sun, 26 Apr 2026 15:56:08 -0500 Subject: [PATCH 46/87] test(tui): cover live todo completion flow --- .../src/__tests__/createGatewayEventHandler.test.ts | 12 ++++++++++++ ui-tui/src/lib/liveLayout.test.ts | 4 ++-- 2 files changed, 14 insertions(+), 2 deletions(-) diff --git a/ui-tui/src/__tests__/createGatewayEventHandler.test.ts b/ui-tui/src/__tests__/createGatewayEventHandler.test.ts index ad4a8f8e..7640c2bf 100644 --- a/ui-tui/src/__tests__/createGatewayEventHandler.test.ts +++ b/ui-tui/src/__tests__/createGatewayEventHandler.test.ts @@ -80,6 +80,18 @@ describe('createGatewayEventHandler', () => { expect(getTurnState().todos).toEqual(todos) }) + it('archives completed todos into transcript flow at end of turn', () => { + const appended: Msg[] = [] + const todos = [{ content: 'Serve tiny latte', id: 'serve', status: 'completed' }] + const onEvent = createGatewayEventHandler(buildCtx(appended)) + + onEvent({ payload: { name: 'todo', todos, tool_id: 'todo-1' }, type: 'tool.start' } as any) + onEvent({ payload: { text: 'done' }, type: 'message.complete' } as any) + + expect(getTurnState().todos).toEqual([]) + expect(appended).toContainEqual({ kind: 'trail', role: 'system', text: '', todos }) + }) + it('keeps the current todo list visible when the next message starts', () => { const appended: Msg[] = [] const todos = [{ content: 'Boil water', id: 'boil', status: 'in_progress' }] diff --git a/ui-tui/src/lib/liveLayout.test.ts b/ui-tui/src/lib/liveLayout.test.ts index 24426efe..9faa1dae 100644 --- a/ui-tui/src/lib/liveLayout.test.ts +++ b/ui-tui/src/lib/liveLayout.test.ts @@ -3,7 +3,7 @@ import { describe, expect, it } from 'vitest' import { liveTailOrder } from './liveLayout.js' describe('liveTailOrder', () => { - it('keeps todo before transcript and assistant live output', () => { - expect(liveTailOrder()).toEqual(['todo', 'scroll-history', 'assistant']) + it('anchors live todo after scroll history and assistant output', () => { + expect(liveTailOrder()).toEqual(['scroll-history', 'assistant', 'live-todo']) }) }) From 4d3e3a738dabddf1dda336efd796f4710640e153 Mon Sep 17 00:00:00 2001 From: Brooklyn Nicholson Date: Sun, 26 Apr 2026 15:56:47 -0500 Subject: [PATCH 47/87] chore(tui): sort imports --- ui-tui/src/app/turnController.ts | 2 +- ui-tui/src/components/messageLine.tsx | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/ui-tui/src/app/turnController.ts b/ui-tui/src/app/turnController.ts index 9bc87ea8..4c8a728a 100644 --- a/ui-tui/src/app/turnController.ts +++ b/ui-tui/src/app/turnController.ts @@ -6,8 +6,8 @@ import { STREAM_TYPING_BATCH_MS } from '../config/timing.js' import type { SessionInterruptResponse, SubagentEventPayload } from '../gatewayTypes.js' -import { hasReasoningTag, splitReasoning } from '../lib/reasoning.js' import { appendToolShelfMessage, isToolShelfMessage } from '../lib/liveProgress.js' +import { hasReasoningTag, splitReasoning } from '../lib/reasoning.js' import { boundedLiveRenderText, buildToolTrailLine, diff --git a/ui-tui/src/components/messageLine.tsx b/ui-tui/src/components/messageLine.tsx index 7465a088..43e619f4 100644 --- a/ui-tui/src/components/messageLine.tsx +++ b/ui-tui/src/components/messageLine.tsx @@ -10,8 +10,8 @@ import type { Theme } from '../theme.js' import type { ActiveTool, DetailsMode, Msg, SectionVisibility } from '../types.js' import { Md } from './markdown.js' -import { TodoPanel } from './todoPanel.js' import { ToolTrail } from './thinking.js' +import { TodoPanel } from './todoPanel.js' export const MessageLine = memo(function MessageLine({ cols, From 4943ea2a7c4220251b52099fae7a9b01813b272a Mon Sep 17 00:00:00 2001 From: Brooklyn Nicholson Date: Sun, 26 Apr 2026 16:00:38 -0500 Subject: [PATCH 48/87] fix(tui): merge tools into contextual shelves --- ui-tui/src/lib/liveProgress.test.ts | 19 ++++++++++++++++++- ui-tui/src/lib/liveProgress.ts | 9 +++++++-- 2 files changed, 25 insertions(+), 3 deletions(-) diff --git a/ui-tui/src/lib/liveProgress.test.ts b/ui-tui/src/lib/liveProgress.test.ts index d10e1bb9..141fb7ac 100644 --- a/ui-tui/src/lib/liveProgress.test.ts +++ b/ui-tui/src/lib/liveProgress.test.ts @@ -1,6 +1,6 @@ import { describe, expect, it } from 'vitest' -import { appendToolShelfMessage, isTodoDone } from './liveProgress.js' +import { appendToolShelfMessage, canHoldToolShelf, isTodoDone, mergeToolShelfInto } from './liveProgress.js' describe('isTodoDone', () => { it('only treats non-empty all-completed/cancelled lists as done', () => { @@ -16,6 +16,23 @@ describe('isTodoDone', () => { }) }) +describe('tool shelf helpers', () => { + it('recognizes contextual thinking shelves as holders', () => { + expect(canHoldToolShelf({ kind: 'trail', role: 'system', text: '', thinking: 'plan' })).toBe(true) + expect(canHoldToolShelf({ kind: 'trail', role: 'system', text: '', tools: ['one ✓'] })).toBe(true) + expect(canHoldToolShelf({ role: 'assistant', text: 'done' })).toBe(false) + }) + + it('merges source rows into an existing shelf', () => { + expect( + mergeToolShelfInto( + { kind: 'trail', role: 'system', text: '', thinking: 'plan', tools: ['one ✓'] }, + { kind: 'trail', role: 'system', text: '', tools: ['two ✓'] } + ) + ).toEqual({ kind: 'trail', role: 'system', text: '', thinking: 'plan', tools: ['one ✓', 'two ✓'] }) + }) +}) + describe('appendToolShelfMessage', () => { it('merges adjacent tool shelves into one contextual shelf', () => { const merged = appendToolShelfMessage([{ kind: 'trail', role: 'system', text: '', tools: ['one ✓'] }], { diff --git a/ui-tui/src/lib/liveProgress.ts b/ui-tui/src/lib/liveProgress.ts index 62f74163..9666e431 100644 --- a/ui-tui/src/lib/liveProgress.ts +++ b/ui-tui/src/lib/liveProgress.ts @@ -6,9 +6,14 @@ export const isTodoDone = (todos: readonly TodoItem[]) => export const isToolShelfMessage = (msg: Msg | undefined) => Boolean(msg?.kind === 'trail' && !msg.text && !msg.thinking?.trim() && msg.tools?.length) -const canHoldToolShelf = (msg: Msg | undefined) => +export const canHoldToolShelf = (msg: Msg | undefined) => Boolean(msg?.kind === 'trail' && !msg.text && (msg.thinking?.trim() || msg.tools?.length)) +export const mergeToolShelfInto = (target: Msg, source: Msg): Msg => ({ + ...target, + tools: [...(target.tools ?? []), ...(source.tools ?? [])] +}) + export const appendToolShelfMessage = (prev: readonly Msg[], msg: Msg): Msg[] => { if (!isToolShelfMessage(msg)) { return [...prev, msg] @@ -20,7 +25,7 @@ export const appendToolShelfMessage = (prev: readonly Msg[], msg: Msg): Msg[] => if (canHoldToolShelf(candidate)) { const next = [...prev] - next[index] = { ...candidate!, tools: [...(candidate!.tools ?? []), ...(msg.tools ?? [])] } + next[index] = mergeToolShelfInto(candidate!, msg) return next } From 319c1c1691847d2df20e79cdd708b56b6f92a54c Mon Sep 17 00:00:00 2001 From: Brooklyn Nicholson Date: Sun, 26 Apr 2026 16:09:28 -0500 Subject: [PATCH 49/87] fix(tui): inline todo in transcript, group across thinking --- ui-tui/src/components/agentsOverlay.tsx | 16 +++---- ui-tui/src/components/appChrome.tsx | 6 +-- ui-tui/src/components/appLayout.tsx | 6 +-- ui-tui/src/components/streamingAssistant.tsx | 16 ++----- ui-tui/src/lib/liveProgress.test.ts | 48 ++++++++++++++++++++ ui-tui/src/lib/liveProgress.ts | 42 ++++++++++++++++- 6 files changed, 104 insertions(+), 30 deletions(-) diff --git a/ui-tui/src/components/agentsOverlay.tsx b/ui-tui/src/components/agentsOverlay.tsx index a8ad9175..6d3917bf 100644 --- a/ui-tui/src/components/agentsOverlay.tsx +++ b/ui-tui/src/components/agentsOverlay.tsx @@ -10,7 +10,7 @@ import { } from '../app/delegationStore.js' import { patchOverlayState } from '../app/overlayStore.js' import { $spawnDiff, $spawnHistory, clearDiffPair, type SpawnSnapshot } from '../app/spawnHistoryStore.js' -import { $turnState } from '../app/turnStore.js' +import { useTurnSelector } from '../app/turnStore.js' import type { GatewayClient } from '../gatewayClient.js' import type { DelegationPauseResponse, DelegationStatusResponse, SubagentInterruptResponse } from '../gatewayTypes.js' import { asRpcResult } from '../lib/rpc.js' @@ -683,7 +683,7 @@ function DiffView({ // ── Main overlay ───────────────────────────────────────────────────── export function AgentsOverlay({ gw, initialHistoryIndex = 0, onClose, t }: AgentsOverlayProps) { - const turn = useStore($turnState) + const liveSubagents = useTurnSelector(state => state.subagents) const delegation = useStore($delegationState) const history = useStore($spawnHistory) const diffPair = useStore($spawnDiff) @@ -705,17 +705,17 @@ export function AgentsOverlay({ gw, initialHistoryIndex = 0, onClose, t }: Agent const [mode, setMode] = useState<'detail' | 'list'>('list') const detailScrollRef = useRef(null) - const prevLiveCountRef = useRef(turn.subagents.length) + const prevLiveCountRef = useRef(liveSubagents.length) // ── Derived state ────────────────────────────────────────────────── const activeSnapshot = historyIndex > 0 ? history[historyIndex - 1] : null // Instant fallback to history[0] the moment the live list clears — avoids // a one-frame "no subagents" flash while the auto-follow effect fires. - const justFinishedSnapshot = historyIndex === 0 && turn.subagents.length === 0 ? (history[0] ?? null) : null + const justFinishedSnapshot = historyIndex === 0 && liveSubagents.length === 0 ? (history[0] ?? null) : null const effectiveSnapshot = activeSnapshot ?? justFinishedSnapshot const replayMode = effectiveSnapshot != null - const subagents = replayMode ? effectiveSnapshot.subagents : turn.subagents + const subagents = replayMode ? effectiveSnapshot.subagents : liveSubagents const tree = useMemo(() => buildSubagentTree(subagents), [subagents]) const totals = useMemo(() => treeTotals(tree), [tree]) @@ -753,14 +753,14 @@ export function AgentsOverlay({ gw, initialHistoryIndex = 0, onClose, t }: Agent // dropped into an empty live view. Fires only when transitioning from // "had live subagents" → "live empty" while in live mode. const prev = prevLiveCountRef.current - prevLiveCountRef.current = turn.subagents.length + prevLiveCountRef.current = liveSubagents.length - if (historyIndex === 0 && prev > 0 && turn.subagents.length === 0 && history.length > 0) { + if (historyIndex === 0 && prev > 0 && liveSubagents.length === 0 && history.length > 0) { setHistoryIndex(1) setCursor(0) setFlash('turn finished · inspect freely · q to close') } - }, [history.length, historyIndex, turn.subagents.length]) + }, [history.length, historyIndex, liveSubagents.length]) useEffect(() => { // Reset detail scroll on navigation so the top of the new node shows. diff --git a/ui-tui/src/components/appChrome.tsx b/ui-tui/src/components/appChrome.tsx index 17ba966d..42015e11 100644 --- a/ui-tui/src/components/appChrome.tsx +++ b/ui-tui/src/components/appChrome.tsx @@ -3,7 +3,7 @@ import { useStore } from '@nanostores/react' import { type ReactNode, type RefObject, useEffect, useMemo, useState } from 'react' import { $delegationState } from '../app/delegationStore.js' -import { $turnState } from '../app/turnStore.js' +import { useTurnSelector } from '../app/turnStore.js' import { FACES } from '../content/faces.js' import { VERBS } from '../content/verbs.js' import { fmtDuration } from '../domain/messages.js' @@ -69,9 +69,9 @@ function SpawnHud({ t }: { t: Theme }) { // Tight HUD that only appears when the session is actually fanning out. // Colour escalates to warn/error as depth or concurrency approaches the cap. const delegation = useStore($delegationState) - const turn = useStore($turnState) + const subagents = useTurnSelector(state => state.subagents) - const tree = useMemo(() => buildSubagentTree(turn.subagents), [turn.subagents]) + const tree = useMemo(() => buildSubagentTree(subagents), [subagents]) const totals = useMemo(() => treeTotals(tree), [tree]) if (!totals.descendantCount && !delegation.paused) { diff --git a/ui-tui/src/components/appLayout.tsx b/ui-tui/src/components/appLayout.tsx index 2608a9da..9e716583 100644 --- a/ui-tui/src/components/appLayout.tsx +++ b/ui-tui/src/components/appLayout.tsx @@ -66,13 +66,11 @@ const TranscriptPane = memo(function TranscriptPane({ progress={progress} sections={ui.sections} /> + + - - - - diff --git a/ui-tui/src/components/streamingAssistant.tsx b/ui-tui/src/components/streamingAssistant.tsx index 8b5f2611..d691138b 100644 --- a/ui-tui/src/components/streamingAssistant.tsx +++ b/ui-tui/src/components/streamingAssistant.tsx @@ -4,24 +4,14 @@ import { memo } from 'react' import type { AppLayoutProgressProps } from '../app/interfaces.js' import { toggleTodoCollapsed, useTurnSelector } from '../app/turnStore.js' import { $uiState } from '../app/uiStore.js' +import { appendToolShelfMessage } from '../lib/liveProgress.js' import type { DetailsMode, Msg, SectionVisibility } from '../types.js' import { MessageLine } from './messageLine.js' import { TodoPanel } from './todoPanel.js' -const isToolOnly = (msg: Msg | undefined) => - Boolean(msg && msg.kind === 'trail' && !msg.thinking?.trim() && !msg.text && msg.tools?.length) - -const groupedSegments = (segments: Msg[]) => - segments.reduce((acc, msg) => { - if (isToolOnly(msg) && isToolOnly(acc.at(-1))) { - const prev = acc.at(-1)! - - return [...acc.slice(0, -1), { ...prev, tools: [...(prev.tools ?? []), ...(msg.tools ?? [])] }] - } - - return [...acc, msg] - }, []) +const groupedSegments = (segments: Msg[]): Msg[] => + segments.reduce((acc, msg) => appendToolShelfMessage(acc, msg), []) export const StreamingAssistant = memo(function StreamingAssistant({ cols, diff --git a/ui-tui/src/lib/liveProgress.test.ts b/ui-tui/src/lib/liveProgress.test.ts index 141fb7ac..eec209ba 100644 --- a/ui-tui/src/lib/liveProgress.test.ts +++ b/ui-tui/src/lib/liveProgress.test.ts @@ -1,5 +1,7 @@ import { describe, expect, it } from 'vitest' +import type { Msg } from '../types.js' + import { appendToolShelfMessage, canHoldToolShelf, isTodoDone, mergeToolShelfInto } from './liveProgress.js' describe('isTodoDone', () => { @@ -54,6 +56,52 @@ describe('appendToolShelfMessage', () => { expect(merged).toEqual([{ kind: 'trail', role: 'system', text: '', thinking: 'plan', tools: ['one ✓', 'two ✓'] }]) }) + it('merges through intervening thinking-only rows back into the nearest holder', () => { + const prev: Msg[] = [ + { kind: 'trail', role: 'system', text: '', thinking: 'plan', tools: ['one ✓'] }, + { kind: 'trail', role: 'system', text: '', thinking: 'more plan' } + ] + + const merged = appendToolShelfMessage(prev, { + kind: 'trail', + role: 'system', + text: '', + tools: ['two ✓'] + }) + + expect(merged).toHaveLength(2) + expect(merged[0]).toEqual({ + kind: 'trail', + role: 'system', + text: '', + thinking: 'plan', + tools: ['one ✓', 'two ✓'] + }) + expect(merged[1]).toEqual({ kind: 'trail', role: 'system', text: '', thinking: 'more plan' }) + }) + + it('collapses a chronological thinking/tool/thinking/tool stream into one shelf', () => { + const events: Msg[] = [ + { kind: 'trail', role: 'system', text: '', thinking: 'plan' }, + { kind: 'trail', role: 'system', text: '', tools: ['one ✓'] }, + { kind: 'trail', role: 'system', text: '', thinking: 'more plan' }, + { kind: 'trail', role: 'system', text: '', tools: ['two ✓'] }, + { kind: 'trail', role: 'system', text: '', tools: ['three ✓'] } + ] + + const reduced = events.reduce((acc, msg) => appendToolShelfMessage(acc, msg), []) + + expect(reduced).toHaveLength(2) + expect(reduced[0]).toEqual({ + kind: 'trail', + role: 'system', + text: '', + thinking: 'plan', + tools: ['one ✓', 'two ✓', 'three ✓'] + }) + expect(reduced[1]).toEqual({ kind: 'trail', role: 'system', text: '', thinking: 'more plan' }) + }) + it('starts a new shelf across assistant text boundaries', () => { const merged = appendToolShelfMessage( [{ kind: 'trail', role: 'system', text: '', tools: ['one ✓'] }, { role: 'assistant', text: 'done' }], diff --git a/ui-tui/src/lib/liveProgress.ts b/ui-tui/src/lib/liveProgress.ts index 9666e431..2177d213 100644 --- a/ui-tui/src/lib/liveProgress.ts +++ b/ui-tui/src/lib/liveProgress.ts @@ -14,15 +14,41 @@ export const mergeToolShelfInto = (target: Msg, source: Msg): Msg => ({ tools: [...(target.tools ?? []), ...(source.tools ?? [])] }) +const isBarrierMessage = (msg: Msg | undefined) => { + if (!msg) { + return true + } + + // Assistant text, user input, intro/panel rows all terminate the shelf. + if (msg.kind === 'intro' || msg.kind === 'panel' || msg.kind === 'diff') { + return true + } + + if (msg.role && msg.role !== 'system') { + return true + } + + if (msg.text) { + return true + } + + return false +} + +const isToolCarryingTrail = (msg: Msg | undefined) => + Boolean(msg?.kind === 'trail' && !msg.text && msg.tools?.length) + export const appendToolShelfMessage = (prev: readonly Msg[], msg: Msg): Msg[] => { if (!isToolShelfMessage(msg)) { return [...prev, msg] } + let fallbackHolder: number | null = null + for (let index = prev.length - 1; index >= 0; index--) { const candidate = prev[index] - if (canHoldToolShelf(candidate)) { + if (isToolCarryingTrail(candidate)) { const next = [...prev] next[index] = mergeToolShelfInto(candidate!, msg) @@ -30,10 +56,22 @@ export const appendToolShelfMessage = (prev: readonly Msg[], msg: Msg): Msg[] => return next } - if (candidate?.kind !== 'trail' || candidate.text) { + if (fallbackHolder === null && canHoldToolShelf(candidate)) { + fallbackHolder = index + } + + if (isBarrierMessage(candidate)) { break } } + if (fallbackHolder !== null) { + const next = [...prev] + + next[fallbackHolder] = mergeToolShelfInto(prev[fallbackHolder]!, msg) + + return next + } + return [...prev, msg] } From c78b528125c597ae41d624b4fc87125fc0d29d9c Mon Sep 17 00:00:00 2001 From: Brooklyn Nicholson Date: Sun, 26 Apr 2026 16:14:58 -0500 Subject: [PATCH 50/87] feat(tui): archive todos at turn end with incomplete hint --- ui-tui/babel.compiler.config.cjs | 32 + ui-tui/package-lock.json | 601 ++++++++++++++++++ ui-tui/package.json | 5 + .../createGatewayEventHandler.test.ts | 10 +- ui-tui/src/__tests__/turnStore.test.ts | 23 +- ui-tui/src/app/createGatewayEventHandler.ts | 4 +- ui-tui/src/app/turnStore.ts | 14 +- ui-tui/src/components/messageLine.tsx | 2 +- ui-tui/src/components/todoPanel.tsx | 10 + ui-tui/src/hooks/useVirtualHistory.ts | 313 +++++++-- ui-tui/src/lib/liveProgress.ts | 3 + ui-tui/src/types.ts | 1 + 12 files changed, 948 insertions(+), 70 deletions(-) create mode 100644 ui-tui/babel.compiler.config.cjs diff --git a/ui-tui/babel.compiler.config.cjs b/ui-tui/babel.compiler.config.cjs new file mode 100644 index 00000000..b81ff954 --- /dev/null +++ b/ui-tui/babel.compiler.config.cjs @@ -0,0 +1,32 @@ +// React Compiler runs as a post-pass over tsc's `dist/` output. +// +// tsc emits JSX as _jsx() calls (jsx: "react-jsx"). babel-plugin-react-compiler +// accepts that shape and auto-memoizes every component it recognizes via the +// default `infer` compilation mode (PascalCase components + use-prefixed +// hooks). The `sources` filter keeps it from walking node_modules files that +// end up in source maps. +// +// target=19 matches our react ^19.2.4 dependency. +module.exports = { + assumptions: { + setPublicClassFields: true + }, + plugins: [ + [ + 'babel-plugin-react-compiler', + { + target: '19', + sources: (filename) => { + if (!filename) return false + if (filename.includes('node_modules')) return false + return true + } + } + ] + ], + // We feed already-compiled JS into babel; don't re-parse as TS/JSX. + // @babel/preset-env etc. would over-transform — the compiler is our only + // transform here. + babelrc: false, + configFile: false +} diff --git a/ui-tui/package-lock.json b/ui-tui/package-lock.json index 46c83d19..017e9913 100644 --- a/ui-tui/package-lock.json +++ b/ui-tui/package-lock.json @@ -16,14 +16,19 @@ "unicode-animations": "^1.0.3" }, "devDependencies": { + "@babel/cli": "^7.28.6", + "@babel/core": "^7.29.0", + "@babel/plugin-syntax-jsx": "^7.28.6", "@eslint/js": "^9", "@types/node": "^25.5.0", "@types/react": "^19.2.14", "@typescript-eslint/eslint-plugin": "^8", "@typescript-eslint/parser": "^8", + "babel-plugin-react-compiler": "^1.0.0", "eslint": "^9", "eslint-plugin-perfectionist": "^5", "eslint-plugin-react": "^7", + "eslint-plugin-react-compiler": "^19.1.0-rc.2", "eslint-plugin-react-hooks": "^7", "eslint-plugin-unused-imports": "^4", "globals": "^16", @@ -58,6 +63,36 @@ "url": "https://github.com/chalk/ansi-styles?sponsor=1" } }, + "node_modules/@babel/cli": { + "version": "7.28.6", + "resolved": "https://registry.npmjs.org/@babel/cli/-/cli-7.28.6.tgz", + "integrity": "sha512-6EUNcuBbNkj08Oj4gAZ+BUU8yLCgKzgVX4gaTh09Ya2C8ICM4P+G30g4m3akRxSYAp3A/gnWchrNst7px4/nUQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/trace-mapping": "^0.3.28", + "commander": "^6.2.0", + "convert-source-map": "^2.0.0", + "fs-readdir-recursive": "^1.1.0", + "glob": "^7.2.0", + "make-dir": "^2.1.0", + "slash": "^2.0.0" + }, + "bin": { + "babel": "bin/babel.js", + "babel-external-helpers": "bin/babel-external-helpers.js" + }, + "engines": { + "node": ">=6.9.0" + }, + "optionalDependencies": { + "@nicolo-ribaudo/chokidar-2": "2.1.8-no-fsevents.3", + "chokidar": "^3.6.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, "node_modules/@babel/code-frame": { "version": "7.29.0", "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.29.0.tgz", @@ -141,6 +176,19 @@ "node": ">=6.9.0" } }, + "node_modules/@babel/helper-annotate-as-pure": { + "version": "7.27.3", + "resolved": "https://registry.npmjs.org/@babel/helper-annotate-as-pure/-/helper-annotate-as-pure-7.27.3.tgz", + "integrity": "sha512-fXSwMQqitTGeHLBC08Eq5yXz2m37E4pJX1qAU1+2cNedz/ifv/bVXft90VeSav5nFO61EcNgwr0aJxbyPaWBPg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/types": "^7.27.3" + }, + "engines": { + "node": ">=6.9.0" + } + }, "node_modules/@babel/helper-compilation-targets": { "version": "7.28.6", "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.28.6.tgz", @@ -168,6 +216,38 @@ "semver": "bin/semver.js" } }, + "node_modules/@babel/helper-create-class-features-plugin": { + "version": "7.28.6", + "resolved": "https://registry.npmjs.org/@babel/helper-create-class-features-plugin/-/helper-create-class-features-plugin-7.28.6.tgz", + "integrity": "sha512-dTOdvsjnG3xNT9Y0AUg1wAl38y+4Rl4sf9caSQZOXdNqVn+H+HbbJ4IyyHaIqNR6SW9oJpA/RuRjsjCw2IdIow==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-annotate-as-pure": "^7.27.3", + "@babel/helper-member-expression-to-functions": "^7.28.5", + "@babel/helper-optimise-call-expression": "^7.27.1", + "@babel/helper-replace-supers": "^7.28.6", + "@babel/helper-skip-transparent-expression-wrappers": "^7.27.1", + "@babel/traverse": "^7.28.6", + "semver": "^6.3.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/@babel/helper-create-class-features-plugin/node_modules/semver": { + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + } + }, "node_modules/@babel/helper-globals": { "version": "7.28.0", "resolved": "https://registry.npmjs.org/@babel/helper-globals/-/helper-globals-7.28.0.tgz", @@ -178,6 +258,20 @@ "node": ">=6.9.0" } }, + "node_modules/@babel/helper-member-expression-to-functions": { + "version": "7.28.5", + "resolved": "https://registry.npmjs.org/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.28.5.tgz", + "integrity": "sha512-cwM7SBRZcPCLgl8a7cY0soT1SptSzAlMH39vwiRpOQkJlh53r5hdHwLSCZpQdVLT39sZt+CRpNwYG4Y2v77atg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/traverse": "^7.28.5", + "@babel/types": "^7.28.5" + }, + "engines": { + "node": ">=6.9.0" + } + }, "node_modules/@babel/helper-module-imports": { "version": "7.28.6", "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.28.6.tgz", @@ -210,6 +304,61 @@ "@babel/core": "^7.0.0" } }, + "node_modules/@babel/helper-optimise-call-expression": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/helper-optimise-call-expression/-/helper-optimise-call-expression-7.27.1.tgz", + "integrity": "sha512-URMGH08NzYFhubNSGJrpUEphGKQwMQYBySzat5cAByY1/YgIRkULnIy3tAMeszlL/so2HbeilYloUmSpd7GdVw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/types": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-plugin-utils": { + "version": "7.28.6", + "resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.28.6.tgz", + "integrity": "sha512-S9gzZ/bz83GRysI7gAD4wPT/AI3uCnY+9xn+Mx/KPs2JwHJIz1W8PZkg2cqyt3RNOBM8ejcXhV6y8Og7ly/Dug==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-replace-supers": { + "version": "7.28.6", + "resolved": "https://registry.npmjs.org/@babel/helper-replace-supers/-/helper-replace-supers-7.28.6.tgz", + "integrity": "sha512-mq8e+laIk94/yFec3DxSjCRD2Z0TAjhVbEJY3UQrlwVo15Lmt7C2wAUbK4bjnTs4APkwsYLTahXRraQXhb1WCg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-member-expression-to-functions": "^7.28.5", + "@babel/helper-optimise-call-expression": "^7.27.1", + "@babel/traverse": "^7.28.6" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/@babel/helper-skip-transparent-expression-wrappers": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/helper-skip-transparent-expression-wrappers/-/helper-skip-transparent-expression-wrappers-7.27.1.tgz", + "integrity": "sha512-Tub4ZKEXqbPjXgWLl2+3JpQAYBJ8+ikpQ2Ocj/q/r0LwE3UhENh7EUabyHjz2kCEsrRY83ew2DQdHluuiDQFzg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/traverse": "^7.27.1", + "@babel/types": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, "node_modules/@babel/helper-string-parser": { "version": "7.27.1", "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.27.1.tgz", @@ -270,6 +419,40 @@ "node": ">=6.0.0" } }, + "node_modules/@babel/plugin-proposal-private-methods": { + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-private-methods/-/plugin-proposal-private-methods-7.18.6.tgz", + "integrity": "sha512-nutsvktDItsNn4rpGItSNV2sz1XwS+nfU0Rg8aCx3W3NOKVzdMjJRu0O5OkgDp3ZGICSTbgRpxZoWsxoKRvbeA==", + "deprecated": "This proposal has been merged to the ECMAScript standard and thus this plugin is no longer maintained. Please use @babel/plugin-transform-private-methods instead.", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-create-class-features-plugin": "^7.18.6", + "@babel/helper-plugin-utils": "^7.18.6" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-jsx": { + "version": "7.28.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-jsx/-/plugin-syntax-jsx-7.28.6.tgz", + "integrity": "sha512-wgEmr06G6sIpqr8YDwA2dSRTE3bJ+V0IfpzfSY3Lfgd7YWOaAdlykvJi13ZKBt8cZHfgH1IXN+CL656W3uUa4w==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.28.6" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, "node_modules/@babel/template": { "version": "7.28.6", "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.28.6.tgz", @@ -1156,6 +1339,14 @@ "@emnapi/runtime": "^1.7.1" } }, + "node_modules/@nicolo-ribaudo/chokidar-2": { + "version": "2.1.8-no-fsevents.3", + "resolved": "https://registry.npmjs.org/@nicolo-ribaudo/chokidar-2/-/chokidar-2-2.1.8-no-fsevents.3.tgz", + "integrity": "sha512-s88O1aVtXftvp5bCPB7WnmXc5IwOZZ7YPuwNPt+GtOOXpPvad1LfbmjYv+qII7zP6RU2QGnqve27dnLycEnyEQ==", + "dev": true, + "license": "MIT", + "optional": true + }, "node_modules/@oxc-project/types": { "version": "0.124.0", "resolved": "https://registry.npmjs.org/@oxc-project/types/-/types-0.124.0.tgz", @@ -1952,6 +2143,35 @@ "url": "https://github.com/chalk/ansi-styles?sponsor=1" } }, + "node_modules/anymatch": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.3.tgz", + "integrity": "sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==", + "dev": true, + "license": "ISC", + "optional": true, + "dependencies": { + "normalize-path": "^3.0.0", + "picomatch": "^2.0.4" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/anymatch/node_modules/picomatch": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.2.tgz", + "integrity": "sha512-V7+vQEJ06Z+c5tSye8S+nHUfI51xoXIXjHQ99cQtKUkQqqO1kO/KCJUfZXuB47h/YBlDhah2H3hdUGXn8ie0oA==", + "dev": true, + "license": "MIT", + "optional": true, + "engines": { + "node": ">=8.6" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, "node_modules/argparse": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", @@ -2145,6 +2365,16 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/babel-plugin-react-compiler": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/babel-plugin-react-compiler/-/babel-plugin-react-compiler-1.0.0.tgz", + "integrity": "sha512-Ixm8tFfoKKIPYdCCKYTsqv+Fd4IJ0DQqMyEimo+pxUOMUR9cVPlwTrFt9Avu+3cb6Zp3mAzl+t1MrG2fxxKsxw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/types": "^7.26.0" + } + }, "node_modules/balanced-match": { "version": "4.0.4", "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-4.0.4.tgz", @@ -2177,6 +2407,20 @@ "require-from-string": "^2.0.2" } }, + "node_modules/binary-extensions": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.3.0.tgz", + "integrity": "sha512-Ceh+7ox5qe7LJuLHoY0feh3pHuUDHAcRUeyL2VYghZwfpkNIy/+8Ocg0a3UuSoYzavmylwuLWQOf3hl0jjMMIw==", + "dev": true, + "license": "MIT", + "optional": true, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/brace-expansion": { "version": "5.0.5", "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-5.0.5.tgz", @@ -2190,6 +2434,20 @@ "node": "18 || 20 || >=22" } }, + "node_modules/braces": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz", + "integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "fill-range": "^7.1.1" + }, + "engines": { + "node": ">=8" + } + }, "node_modules/browserslist": { "version": "4.28.2", "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.28.2.tgz", @@ -2332,6 +2590,46 @@ "url": "https://github.com/chalk/chalk?sponsor=1" } }, + "node_modules/chokidar": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.6.0.tgz", + "integrity": "sha512-7VT13fmjotKpGipCW9JEQAusEPE+Ei8nl6/g4FBAmIm0GOOLMua9NDDo/DWp0ZAxCr3cPq5ZpBqmPAQgDda2Pw==", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "anymatch": "~3.1.2", + "braces": "~3.0.2", + "glob-parent": "~5.1.2", + "is-binary-path": "~2.1.0", + "is-glob": "~4.0.1", + "normalize-path": "~3.0.0", + "readdirp": "~3.6.0" + }, + "engines": { + "node": ">= 8.10.0" + }, + "funding": { + "url": "https://paulmillr.com/funding/" + }, + "optionalDependencies": { + "fsevents": "~2.3.2" + } + }, + "node_modules/chokidar/node_modules/glob-parent": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "dev": true, + "license": "ISC", + "optional": true, + "dependencies": { + "is-glob": "^4.0.1" + }, + "engines": { + "node": ">= 6" + } + }, "node_modules/cli-boxes": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/cli-boxes/-/cli-boxes-3.0.0.tgz", @@ -2407,6 +2705,16 @@ "dev": true, "license": "MIT" }, + "node_modules/commander": { + "version": "6.2.1", + "resolved": "https://registry.npmjs.org/commander/-/commander-6.2.1.tgz", + "integrity": "sha512-U7VdrJFnJgo4xjrHpTzu0yrHPGImdsmD95ZlgYSEajAn2JKzDhDTPG9kBTefmObL2w/ngeZnilk+OV9CG3d7UA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 6" + } + }, "node_modules/concat-map": { "version": "0.0.1", "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", @@ -2999,6 +3307,50 @@ "eslint": "^3 || ^4 || ^5 || ^6 || ^7 || ^8 || ^9.7" } }, + "node_modules/eslint-plugin-react-compiler": { + "version": "19.1.0-rc.2", + "resolved": "https://registry.npmjs.org/eslint-plugin-react-compiler/-/eslint-plugin-react-compiler-19.1.0-rc.2.tgz", + "integrity": "sha512-oKalwDGcD+RX9mf3NEO4zOoUMeLvjSvcbbEOpquzmzqEEM2MQdp7/FY/Hx9NzmUwFzH1W9SKTz5fihfMldpEYw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/core": "^7.24.4", + "@babel/parser": "^7.24.4", + "@babel/plugin-proposal-private-methods": "^7.18.6", + "hermes-parser": "^0.25.1", + "zod": "^3.22.4", + "zod-validation-error": "^3.0.3" + }, + "engines": { + "node": "^14.17.0 || ^16.0.0 || >= 18.0.0" + }, + "peerDependencies": { + "eslint": ">=7" + } + }, + "node_modules/eslint-plugin-react-compiler/node_modules/zod": { + "version": "3.25.76", + "resolved": "https://registry.npmjs.org/zod/-/zod-3.25.76.tgz", + "integrity": "sha512-gzUt/qt81nXsFGKIFcC3YnfEAx5NkunCfnDlvuBSSFS02bcXu4Lmea0AFIUwbLWxWPx3d9p8S5QoaujKcNQxcQ==", + "dev": true, + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/colinhacks" + } + }, + "node_modules/eslint-plugin-react-compiler/node_modules/zod-validation-error": { + "version": "3.5.4", + "resolved": "https://registry.npmjs.org/zod-validation-error/-/zod-validation-error-3.5.4.tgz", + "integrity": "sha512-+hEiRIiPobgyuFlEojnqjJnhFvg4r/i3cqgcm67eehZf/WBaK3g6cD02YU9mtdVxZjv8CzCA9n/Rhrs3yAAvAw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18.0.0" + }, + "peerDependencies": { + "zod": "^3.24.4" + } + }, "node_modules/eslint-plugin-react-hooks": { "version": "7.0.1", "resolved": "https://registry.npmjs.org/eslint-plugin-react-hooks/-/eslint-plugin-react-hooks-7.0.1.tgz", @@ -3309,6 +3661,20 @@ "node": ">=16.0.0" } }, + "node_modules/fill-range": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz", + "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "to-regex-range": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, "node_modules/find-up": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz", @@ -3363,6 +3729,20 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/fs-readdir-recursive": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/fs-readdir-recursive/-/fs-readdir-recursive-1.1.0.tgz", + "integrity": "sha512-GNanXlVr2pf02+sPN40XN8HG+ePaNcvM0q5mZBd668Obwb0yD5GiUbZOFgwn8kGMY6I3mdyDJzieUy3PTYyTRA==", + "dev": true, + "license": "MIT" + }, + "node_modules/fs.realpath": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", + "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==", + "dev": true, + "license": "ISC" + }, "node_modules/fsevents": { "version": "2.3.3", "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", @@ -3521,6 +3901,28 @@ "url": "https://github.com/privatenumber/get-tsconfig?sponsor=1" } }, + "node_modules/glob": { + "version": "7.2.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", + "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", + "deprecated": "Old versions of glob are not supported, and contain widely publicized security vulnerabilities, which have been fixed in the current version. Please update. Support for old versions may be purchased (at exorbitant rates) by contacting i@izs.me", + "dev": true, + "license": "ISC", + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.1.1", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + }, + "engines": { + "node": "*" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, "node_modules/glob-parent": { "version": "6.0.2", "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz", @@ -3534,6 +3936,37 @@ "node": ">=10.13.0" } }, + "node_modules/glob/node_modules/balanced-match": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", + "dev": true, + "license": "MIT" + }, + "node_modules/glob/node_modules/brace-expansion": { + "version": "1.1.14", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.14.tgz", + "integrity": "sha512-MWPGfDxnyzKU7rNOW9SP/c50vi3xrmrua/+6hfPbCS2ABNWfx24vPidzvC7krjU/RTo235sV776ymlsMtGKj8g==", + "dev": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/glob/node_modules/minimatch": { + "version": "3.1.5", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.5.tgz", + "integrity": "sha512-VgjWUsnnT6n+NUk6eZq77zeFdpW2LWDzP6zFGrCbHXiYNul5Dzqk2HHQ5uFH2DNW5Xbp8+jVzaeNt94ssEEl4w==", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, "node_modules/globals": { "version": "16.5.0", "resolved": "https://registry.npmjs.org/globals/-/globals-16.5.0.tgz", @@ -3736,6 +4169,25 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/inflight": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==", + "deprecated": "This module is not supported, and leaks memory. Do not use it. Check out lru-cache if you want a good and tested way to coalesce async requests by a key value, which is much more comprehensive and powerful.", + "dev": true, + "license": "ISC", + "dependencies": { + "once": "^1.3.0", + "wrappy": "1" + } + }, + "node_modules/inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", + "dev": true, + "license": "ISC" + }, "node_modules/ink": { "version": "6.8.0", "resolved": "https://registry.npmjs.org/ink/-/ink-6.8.0.tgz", @@ -3919,6 +4371,20 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/is-binary-path": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz", + "integrity": "sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "binary-extensions": "^2.0.0" + }, + "engines": { + "node": ">=8" + } + }, "node_modules/is-boolean-object": { "version": "1.2.2", "resolved": "https://registry.npmjs.org/is-boolean-object/-/is-boolean-object-1.2.2.tgz", @@ -4115,6 +4581,17 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/is-number": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", + "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", + "dev": true, + "license": "MIT", + "optional": true, + "engines": { + "node": ">=0.12.0" + } + }, "node_modules/is-number-object": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/is-number-object/-/is-number-object-1.1.1.tgz", @@ -4745,6 +5222,30 @@ "@jridgewell/sourcemap-codec": "^1.5.5" } }, + "node_modules/make-dir": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-2.1.0.tgz", + "integrity": "sha512-LS9X+dc8KLxXCb8dni79fLIIUA5VyZoyjSMCwTluaXA0o27cCK0bhXkpgw+sTXVpPy/lSO57ilRixqk0vDmtRA==", + "dev": true, + "license": "MIT", + "dependencies": { + "pify": "^4.0.1", + "semver": "^5.6.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/make-dir/node_modules/semver": { + "version": "5.7.2", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.2.tgz", + "integrity": "sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g==", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver" + } + }, "node_modules/math-intrinsics": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz", @@ -4875,6 +5376,17 @@ "dev": true, "license": "MIT" }, + "node_modules/normalize-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", + "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==", + "dev": true, + "license": "MIT", + "optional": true, + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/object-assign": { "version": "4.1.1", "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", @@ -4994,6 +5506,16 @@ ], "license": "MIT" }, + "node_modules/once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", + "dev": true, + "license": "ISC", + "dependencies": { + "wrappy": "1" + } + }, "node_modules/onetime": { "version": "5.1.2", "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.2.tgz", @@ -5109,6 +5631,16 @@ "node": ">=8" } }, + "node_modules/path-is-absolute": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", + "integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/path-key": { "version": "3.1.1", "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", @@ -5153,6 +5685,16 @@ "url": "https://github.com/sponsors/jonschlinkert" } }, + "node_modules/pify": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/pify/-/pify-4.0.1.tgz", + "integrity": "sha512-uB80kBFb/tfd68bVleG9T5GGsGPjJrLAUpR5PZIrhBnIaRTQRjqdJSsIKkOP6OAIFbj7GOrcudc5pNjZ+geV2g==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, "node_modules/possible-typed-array-names": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/possible-typed-array-names/-/possible-typed-array-names-1.1.0.tgz", @@ -5271,6 +5813,34 @@ "react": "^19.2.0" } }, + "node_modules/readdirp": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz", + "integrity": "sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "picomatch": "^2.2.1" + }, + "engines": { + "node": ">=8.10.0" + } + }, + "node_modules/readdirp/node_modules/picomatch": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.2.tgz", + "integrity": "sha512-V7+vQEJ06Z+c5tSye8S+nHUfI51xoXIXjHQ99cQtKUkQqqO1kO/KCJUfZXuB47h/YBlDhah2H3hdUGXn8ie0oA==", + "dev": true, + "license": "MIT", + "optional": true, + "engines": { + "node": ">=8.6" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, "node_modules/reflect.getprototypeof": { "version": "1.0.10", "resolved": "https://registry.npmjs.org/reflect.getprototypeof/-/reflect.getprototypeof-1.0.10.tgz", @@ -5652,6 +6222,16 @@ "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==", "license": "ISC" }, + "node_modules/slash": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-2.0.0.tgz", + "integrity": "sha512-ZYKh3Wh2z1PpEXWr0MpSBZ0V6mZHAQfYevttO11c51CaWjGTaadiKZ+wVt1PbMlDV5qhMFslpZCemhwOK7C89A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, "node_modules/slice-ansi": { "version": "8.0.0", "resolved": "https://registry.npmjs.org/slice-ansi/-/slice-ansi-8.0.0.tgz", @@ -5990,6 +6570,20 @@ "node": ">=14.0.0" } }, + "node_modules/to-regex-range": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", + "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "is-number": "^7.0.0" + }, + "engines": { + "node": ">=8.0" + } + }, "node_modules/ts-api-utils": { "version": "2.5.0", "resolved": "https://registry.npmjs.org/ts-api-utils/-/ts-api-utils-2.5.0.tgz", @@ -6607,6 +7201,13 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==", + "dev": true, + "license": "ISC" + }, "node_modules/ws": { "version": "8.20.0", "resolved": "https://registry.npmjs.org/ws/-/ws-8.20.0.tgz", diff --git a/ui-tui/package.json b/ui-tui/package.json index 4776f083..4a16c9c3 100644 --- a/ui-tui/package.json +++ b/ui-tui/package.json @@ -24,14 +24,19 @@ "unicode-animations": "^1.0.3" }, "devDependencies": { + "@babel/cli": "^7.28.6", + "@babel/core": "^7.29.0", + "@babel/plugin-syntax-jsx": "^7.28.6", "@eslint/js": "^9", "@types/node": "^25.5.0", "@types/react": "^19.2.14", "@typescript-eslint/eslint-plugin": "^8", "@typescript-eslint/parser": "^8", + "babel-plugin-react-compiler": "^1.0.0", "eslint": "^9", "eslint-plugin-perfectionist": "^5", "eslint-plugin-react": "^7", + "eslint-plugin-react-compiler": "^19.1.0-rc.2", "eslint-plugin-react-hooks": "^7", "eslint-plugin-unused-imports": "^4", "globals": "^16", diff --git a/ui-tui/src/__tests__/createGatewayEventHandler.test.ts b/ui-tui/src/__tests__/createGatewayEventHandler.test.ts index 7640c2bf..0c0537a8 100644 --- a/ui-tui/src/__tests__/createGatewayEventHandler.test.ts +++ b/ui-tui/src/__tests__/createGatewayEventHandler.test.ts @@ -59,7 +59,7 @@ describe('createGatewayEventHandler', () => { patchUiState({ showReasoning: true }) }) - it('keeps todo list visible after final assistant text completes', () => { + it('archives incomplete todos into transcript flow at end of turn so they scroll up', () => { const appended: Msg[] = [] const todos = [ @@ -76,8 +76,12 @@ describe('createGatewayEventHandler', () => { onEvent({ payload: { text: 'Started a todo list.' }, type: 'message.complete' } as any) - expect(appended[appended.length - 1]).toMatchObject({ role: 'assistant', text: 'Started a todo list.' }) - expect(getTurnState().todos).toEqual(todos) + const trail = appended.find(msg => msg.kind === 'trail' && msg.todos?.length) + const finalText = appended.find(msg => msg.role === 'assistant' && msg.text === 'Started a todo list.') + + expect(finalText).toBeDefined() + expect(trail).toMatchObject({ kind: 'trail', role: 'system', todos, todoIncomplete: true }) + expect(getTurnState().todos).toEqual([]) }) it('archives completed todos into transcript flow at end of turn', () => { diff --git a/ui-tui/src/__tests__/turnStore.test.ts b/ui-tui/src/__tests__/turnStore.test.ts index 006a1288..b1b48565 100644 --- a/ui-tui/src/__tests__/turnStore.test.ts +++ b/ui-tui/src/__tests__/turnStore.test.ts @@ -3,6 +3,7 @@ import { beforeEach, describe, expect, it } from 'vitest' import { appendTurnSegment, archiveDoneTodos, + archiveTodosAtTurnEnd, getTurnState, patchTurnState, resetTurnState, @@ -20,7 +21,7 @@ describe('turnStore live progress helpers', () => { ] }) - expect(archiveDoneTodos()).toEqual([ + expect(archiveTodosAtTurnEnd()).toEqual([ { kind: 'trail', role: 'system', @@ -34,11 +35,25 @@ describe('turnStore live progress helpers', () => { expect(getTurnState().todos).toEqual([]) }) - it('does not archive active todos', () => { - patchTurnState({ todos: [{ content: 'cook', id: 'cook', status: 'in_progress' }] }) + it('archives incomplete todos with an incomplete flag so the hint renders', () => { + patchTurnState({ + todos: [ + { content: 'cook', id: 'cook', status: 'completed' }, + { content: 'serve', id: 'serve', status: 'in_progress' }, + { content: 'eat', id: 'eat', status: 'pending' } + ] + }) + const archived = archiveTodosAtTurnEnd() + expect(archived).toHaveLength(1) + expect(archived[0]!.todoIncomplete).toBe(true) + expect(archived[0]!.todos?.map(t => t.id)).toEqual(['cook', 'serve', 'eat']) + expect(getTurnState().todos).toEqual([]) + }) + + it('returns nothing when there are no todos at turn end', () => { + expect(archiveTodosAtTurnEnd()).toEqual([]) expect(archiveDoneTodos()).toEqual([]) - expect(getTurnState().todos).toHaveLength(1) }) it('tracks collapsed state independently of todo content', () => { diff --git a/ui-tui/src/app/createGatewayEventHandler.ts b/ui-tui/src/app/createGatewayEventHandler.ts index d1e9d633..c314fc10 100644 --- a/ui-tui/src/app/createGatewayEventHandler.ts +++ b/ui-tui/src/app/createGatewayEventHandler.ts @@ -11,7 +11,7 @@ import { applyDelegationStatus, getDelegationState } from './delegationStore.js' import type { GatewayEventHandlerContext } from './interfaces.js' import { patchOverlayState } from './overlayStore.js' import { turnController } from './turnController.js' -import { archiveDoneTodos } from './turnStore.js' +import { archiveTodosAtTurnEnd } from './turnStore.js' import { getUiState, patchUiState } from './uiStore.js' const NO_PROVIDER_RE = /\bNo (?:LLM|inference) provider configured\b/i @@ -539,7 +539,7 @@ export function createGatewayEventHandler(ctx: GatewayEventHandlerContext): (ev: if (!wasInterrupted) { const msgs: Msg[] = finalMessages.length ? finalMessages : [{ role: 'assistant', text: finalText }] msgs.forEach(appendMessage) - archiveDoneTodos().forEach(appendMessage) + archiveTodosAtTurnEnd().forEach(appendMessage) if (bellOnComplete && stdout?.isTTY) { stdout.write('\x07') diff --git a/ui-tui/src/app/turnStore.ts b/ui-tui/src/app/turnStore.ts index 9700f953..da4484ab 100644 --- a/ui-tui/src/app/turnStore.ts +++ b/ui-tui/src/app/turnStore.ts @@ -40,14 +40,22 @@ export const patchTurnState = (next: Partial | ((state: TurnState) => export const toggleTodoCollapsed = () => patchTurnState(state => ({ ...state, todoCollapsed: !state.todoCollapsed })) -export const archiveDoneTodos = () => { +export const archiveDoneTodos = () => archiveTodosAtTurnEnd() + +export const archiveTodosAtTurnEnd = () => { const state = $turnState.get() - if (!isTodoDone(state.todos)) { + if (!state.todos.length) { return [] } - const msg: Msg = { kind: 'trail', role: 'system', text: '', todos: state.todos } + const msg: Msg = { + kind: 'trail', + role: 'system', + text: '', + todos: state.todos, + ...(isTodoDone(state.todos) ? {} : { todoIncomplete: true }) + } patchTurnState({ todoCollapsed: false, todos: [] }) diff --git a/ui-tui/src/components/messageLine.tsx b/ui-tui/src/components/messageLine.tsx index 43e619f4..dddf0a59 100644 --- a/ui-tui/src/components/messageLine.tsx +++ b/ui-tui/src/components/messageLine.tsx @@ -37,7 +37,7 @@ export const MessageLine = memo(function MessageLine({ const thinking = msg.thinking?.trim() ?? '' if (msg.kind === 'trail' && msg.todos?.length) { - return + return } if (msg.kind === 'trail' && (msg.tools?.length || tools.length || thinking)) { diff --git a/ui-tui/src/components/todoPanel.tsx b/ui-tui/src/components/todoPanel.tsx index 964512d8..567050a3 100644 --- a/ui-tui/src/components/todoPanel.tsx +++ b/ui-tui/src/components/todoPanel.tsx @@ -1,6 +1,7 @@ import { Box, Text } from '@hermes/ink' import { memo } from 'react' +import { countPendingTodos } from '../lib/liveProgress.js' import { todoGlyph, todoTone } from '../lib/todo.js' import type { Theme } from '../theme.js' import type { TodoItem } from '../types.js' @@ -13,11 +14,13 @@ const rowColor = (t: Theme, status: TodoItem['status']) => { export const TodoPanel = memo(function TodoPanel({ collapsed = false, + incomplete = false, onToggle, t, todos }: { collapsed?: boolean + incomplete?: boolean onToggle?: () => void t: Theme todos: TodoItem[] @@ -27,6 +30,7 @@ export const TodoPanel = memo(function TodoPanel({ } const done = todos.filter(todo => todo.status === 'completed').length + const pending = countPendingTodos(todos) return ( @@ -39,6 +43,12 @@ export const TodoPanel = memo(function TodoPanel({ ({done}/{todos.length}) + {incomplete && pending > 0 && ( + + {' '} + · incomplete · {pending} still {pending === 1 ? 'pending' : 'pending/in_progress'} + + )} diff --git a/ui-tui/src/hooks/useVirtualHistory.ts b/ui-tui/src/hooks/useVirtualHistory.ts index 0d98ca5e..656df542 100644 --- a/ui-tui/src/hooks/useVirtualHistory.ts +++ b/ui-tui/src/hooks/useVirtualHistory.ts @@ -2,9 +2,9 @@ import type { ScrollBoxHandle } from '@hermes/ink' import { type RefObject, useCallback, + useDeferredValue, useEffect, useLayoutEffect, - useMemo, useRef, useState, useSyncExternalStore @@ -14,8 +14,43 @@ const ESTIMATE = 4 const OVERSCAN = 40 const MAX_MOUNTED = 260 const COLD_START = 40 +// Floor on unmeasured row height used when computing coverage — guarantees +// the mounted span physically reaches the viewport bottom regardless of how +// small items actually are (at the cost of over-mounting when items are +// larger; overscan absorbs that). +const PESSIMISTIC = 1 +// Tightest safe scrollTop bin for the useSyncExternalStore snapshot. Small +// wheel ticks that don't cross a bin short-circuit React's commit entirely; +// Ink keeps painting via ScrollBox.forceRender + direct scrollTop reads. +// Half of OVERSCAN keeps ≥20 rows of cushion before the mounted range +// would actually need to shift. const QUANTUM = OVERSCAN >> 1 +// Renders to keep the mount range frozen after width change (heights scaled +// but not yet re-measured). Render #1 skips measurement so pre-resize Yoga +// doesn't poison the scaled cache; render #2's useLayoutEffect captures +// post-resize heights; render #3 recomputes range with accurate data. const FREEZE_RENDERS = 2 +// Cap on NEW items mounted per commit when scrolling fast. Without this, +// a single PageUp into unmeasured territory mounts ~190 rows with +// PESSIMISTIC=1 coverage — each row running marked lexer + syntax +// highlighting for ~3ms = ~600ms sync block. Sliding toward the target +// over several commits keeps per-commit mount cost bounded. +const SLIDE_STEP = 25 + +const NOOP = () => {} + +const upperBound = (arr: ArrayLike, target: number) => { + let lo = 0 + let hi = arr.length + + while (lo < hi) { + const mid = (lo + hi) >> 1 + + arr[mid]! <= target ? (lo = mid + 1) : (hi = mid) + } + + return lo +} export const shouldSetVirtualClamp = ({ itemCount, @@ -29,19 +64,6 @@ export const shouldSetVirtualClamp = ({ viewportHeight: number }) => itemCount > 0 && viewportHeight > 0 && !sticky && !liveTailActive -const upperBound = (arr: number[], target: number) => { - let lo = 0 - let hi = arr.length - - while (lo < hi) { - const mid = (lo + hi) >> 1 - - arr[mid]! <= target ? (lo = mid + 1) : (hi = mid) - } - - return lo -} - export function useVirtualHistory( scrollRef: RefObject, items: readonly { key: string }[], @@ -57,15 +79,28 @@ export function useVirtualHistory( const nodes = useRef(new Map()) const heights = useRef(new Map()) const refs = useRef(new Map void>()) - const [ver, setVer] = useState(0) + // Bump whenever heightCache mutates so offsets rebuild on next read. + // Ref (not state) — checked during render phase, zero extra commits. + const offsetVersion = useRef(0) + // Cached offsets: reused Float64Array keyed on (itemCount, version) so we + // only rebuild when something actually changed. Previous approach allocated + // a fresh Array(n+1) every render — at n=10k that's ~80KB/render of GC + // pressure during streaming. + const offsetsCache = useRef<{ arr: Float64Array; n: number; version: number }>({ + arr: new Float64Array(0), + n: -1, + version: -1 + }) const [hasScrollRef, setHasScrollRef] = useState(false) const metrics = useRef({ sticky: true, top: 0, vp: 0 }) + const lastScrollTopRef = useRef(0) - // Width change: scale cached heights (not clear — clearing forces a - // pessimistic back-walk mounting ~190 rows at once, each a fresh - // marked.lexer + syntax highlight ≈ 3ms). Freeze mount range for 2 - // renders so warm memos survive; skip one measurement so useLayoutEffect - // doesn't poison the scaled cache with pre-resize Yoga heights. + // Width change: scale cached heights by oldCols/newCols instead of clearing + // (clearing forces a pessimistic back-walk mounting ~190 rows at once, each + // a fresh marked.lexer + syntax highlight ≈ 3ms). Freeze the mount range + // for 2 renders so warm memos survive; skip one measurement pass so + // useLayoutEffect doesn't poison the scaled cache with pre-resize Yoga + // heights. const prevColumns = useRef(columns) const skipMeasurement = useRef(false) const prevRange = useRef(null) @@ -80,6 +115,7 @@ export function useVirtualHistory( heights.current.set(k, Math.max(1, Math.round(h * ratio))) } + offsetVersion.current++ skipMeasurement.current = true freezeRenders.current = FREEZE_RENDERS } @@ -88,11 +124,18 @@ export function useVirtualHistory( setHasScrollRef(Boolean(scrollRef.current)) }, [scrollRef]) + // Quantized snapshot: same-bin scrolls (most wheel ticks) produce the same + // number → React.Object.is short-circuits the commit entirely. sticky state + // is folded in via the sign bit so sticky→broken transitions also trigger. + // Uses the TARGET (committed + pendingDelta), not committed scrollTop, so + // scrollBy notifications immediately remount for the destination before + // Ink's drain frames need the children. + const subscribe = useCallback( + (cb: () => void) => (hasScrollRef ? scrollRef.current?.subscribe(cb) : null) ?? NOOP, + [hasScrollRef, scrollRef] + ) useSyncExternalStore( - useCallback( - (cb: () => void) => (hasScrollRef ? scrollRef.current?.subscribe(cb) : null) ?? (() => () => {}), - [hasScrollRef, scrollRef] - ), + subscribe, () => { const s = scrollRef.current @@ -100,9 +143,10 @@ export function useVirtualHistory( return NaN } - const b = Math.floor((s.getScrollTop() + s.getPendingDelta()) / QUANTUM) + const target = s.getScrollTop() + s.getPendingDelta() + const bin = Math.floor(target / QUANTUM) - return s.isSticky() ? -b - 1 : b + return s.isSticky() ? ~bin : bin }, () => NaN ) @@ -121,26 +165,33 @@ export function useVirtualHistory( } if (dirty) { - setVer(v => v + 1) + offsetVersion.current++ } }, [items]) - const offsets = useMemo(() => { - void ver - const out = new Array(items.length + 1).fill(0) + // Offsets: Float64Array reused across renders, invalidated by offsetVersion + // bumps from heightCache writers (measureRef, resize-scale, GC). Binary + // search tolerates either monotone source, so no need to rebuild unless + // something changed. + const n = items.length - for (let i = 0; i < items.length; i++) { - out[i + 1] = out[i]! + Math.max(1, Math.floor(heights.current.get(items[i]!.key) ?? estimate)) + if (offsetsCache.current.version !== offsetVersion.current || offsetsCache.current.n !== n) { + const arr = offsetsCache.current.arr.length >= n + 1 ? offsetsCache.current.arr : new Float64Array(n + 1) + + arr[0] = 0 + + for (let i = 0; i < n; i++) { + arr[i + 1] = arr[i]! + Math.max(1, Math.floor(heights.current.get(items[i]!.key) ?? estimate)) } - return out - }, [estimate, items, ver]) + offsetsCache.current = { arr, n, version: offsetVersion.current } + } - const n = items.length + const offsets = offsetsCache.current.arr const total = offsets[n] ?? 0 const top = Math.max(0, scrollRef.current?.getScrollTop() ?? 0) - const pending = scrollRef.current?.getPendingDelta() ?? 0 - const target = Math.max(0, top + pending) + const pendingDelta = scrollRef.current?.getPendingDelta() ?? 0 + const target = Math.max(0, top + pendingDelta) const vp = Math.max(0, scrollRef.current?.getViewportHeight() ?? 0) const sticky = scrollRef.current?.isSticky() ?? true const recentManual = Date.now() - (scrollRef.current?.getLastManualScrollAt() ?? 0) < 1200 @@ -168,9 +219,22 @@ export function useVirtualHistory( start-- } } else { - const lo = Math.max(0, Math.min(top, target) - overscan) - const hi = Math.max(top, target) + vp + overscan + // User scrolled up. Span [committed..target] so every drain frame is + // covered. Claude-code caps the span at 3×viewport so pendingDelta + // growing unbounded (MX Master free-spin) doesn't blow the mount + // budget; the clamp (setClampBounds) shows edge-of-mounted content + // during catch-up. + const MAX_SPAN = vp * 3 + const rawLo = Math.min(top, target) + const rawHi = Math.max(top, target) + const span = rawHi - rawLo + const clampedLo = span > MAX_SPAN ? (pendingDelta < 0 ? rawHi - MAX_SPAN : rawLo) : rawLo + const clampedHi = clampedLo + Math.min(span, MAX_SPAN) + const lo = Math.max(0, clampedLo - overscan) + const hi = clampedHi + vp + overscan + // Binary search — offsets is monotone. Linear walk was O(n) at n=10k+, + // ~2ms per render during scroll. start = Math.max(0, Math.min(n - 1, upperBound(offsets, lo) - 1)) end = Math.max(start + 1, Math.min(n, upperBound(offsets, hi))) } @@ -180,17 +244,144 @@ export function useVirtualHistory( sticky ? (start = Math.max(0, end - maxMounted)) : (end = Math.min(n, start + maxMounted)) } + // Coverage guarantee: ensure sum(real or pessimistic heights) ≥ + // viewportH + 2*overscan so the viewport is physically covered even when + // items are tiny. Pessimistic because uncached items use a floor of 1 — + // over-mounts when items are large, never leaves blank spacer showing. + if (n > 0 && vp > 0 && !frozenRange) { + const needed = vp + 2 * overscan + let coverage = 0 + + for (let i = start; i < end; i++) { + coverage += heights.current.get(items[i]!.key) ?? PESSIMISTIC + } + + if (sticky) { + const minStart = Math.max(0, end - maxMounted) + + while (start > minStart && coverage < needed) { + start-- + coverage += heights.current.get(items[start]!.key) ?? PESSIMISTIC + } + } else { + const maxEnd = Math.min(n, start + maxMounted) + + while (end < maxEnd && coverage < needed) { + coverage += heights.current.get(items[end]!.key) ?? PESSIMISTIC + end++ + } + } + } + + // Slide cap: limit how many NEW items mount this commit. Gates on scroll + // VELOCITY (|scrollTop delta since last commit| + |pendingDelta| > + // 2×viewport — key-repeat PageUp moves ~viewport/2 per press). Covers + // both scrollBy (pendingDelta) and scrollTo (direct write). Normal single + // PageUp skips this; the clamp holds the viewport at the mounted edge + // during catch-up so there's no blank screen. Only caps range GROWTH; + // shrinking is unbounded. + if (!frozenRange && prevRange.current && vp > 0) { + const velocity = Math.abs(top - lastScrollTopRef.current) + Math.abs(pendingDelta) + + if (velocity > vp * 2) { + const [pS, pE] = prevRange.current + + if (start < pS - SLIDE_STEP) { + start = pS - SLIDE_STEP + } + + if (end > pE + SLIDE_STEP) { + end = pE + SLIDE_STEP + } + + // A large jump past the capped end can invert (start > end); mount + // SLIDE_STEP items from the new start so the viewport isn't blank + // during catch-up. + if (start > end) { + end = Math.min(start + SLIDE_STEP, n) + } + } + } + + lastScrollTopRef.current = top + if (freezeRenders.current > 0) { freezeRenders.current-- } else { prevRange.current = [start, end] } + // Time-slice range growth via useDeferredValue. Urgent render keeps Ink + // painting with the OLD range (all memo hits, fast); deferred render + // transitions to the NEW range (fresh mounts: Md, syntax highlight) in a + // non-blocking background commit. The clamp (setClampBounds) pins the + // viewport to the mounted edge so there's no visual artifact from the + // deferred range lagging briefly. Only deferral range GROWTH — shrinking + // is cheap (unmount = remove fiber, no parse). + const dStart = useDeferredValue(start) + const dEnd = useDeferredValue(end) + let effStart = start < dStart ? dStart : start + let effEnd = end > dEnd ? dEnd : end + + // Inverted range (large jump with deferred value lagging) or sticky snap + // (scrollToBottom needs the tail mounted NOW so maxScroll lands on content, + // not bottomSpacer) — skip deferral. + if (effStart > effEnd || sticky) { + effStart = start + effEnd = end + } + + // Scrolling DOWN — bypass effEnd deferral so the tail mounts immediately. + // Without this, the clamp holds scrollTop short of the real bottom and + // the user feels "stuck before bottom". effStart stays deferred so scroll- + // UP keeps time-slicing (older messages parse on mount). + if (pendingDelta > 0) { + effEnd = end + } + + // Final O(viewport) enforcement. Deferred+bypass combinations above can + // leak: during sustained PageUp, concurrent mode interleaves dStart updates + // with effEnd=end bypasses across commits and the effective window drifts + // wider than either bound alone. Trim the far edge by viewport position + // (not pendingDelta direction — that flips mid-settle under concurrent + // scheduling and yanks scrollTop). + if (effEnd - effStart > maxMounted && vp > 0) { + const mid = (offsets[effStart]! + offsets[effEnd]!) / 2 + + if (top < mid) { + effEnd = effStart + maxMounted + } else { + effStart = effEnd - maxMounted + } + } + const measureRef = useCallback((key: string) => { let fn = refs.current.get(key) if (!fn) { - fn = (el: unknown) => (el ? nodes.current.set(key, el) : nodes.current.delete(key)) + fn = (el: unknown) => { + if (el) { + nodes.current.set(key, el) + + return + } + + // Measure-at-unmount: the yogaNode is still valid here (reconciler + // calls ref(null) before removeChild → freeRecursive), so we grab + // the final height before WASM release. Without this, items + // scrolled out during fast pan keep a stale estimate in heightCache + // and offset math drifts until the next mount/remount cycle. + const existing = nodes.current.get(key) as MeasuredNode | undefined + const h = Math.ceil(existing?.yogaNode?.getComputedHeight?.() ?? 0) + + if (h > 0 && heights.current.get(key) !== h) { + heights.current.set(key, h) + offsetVersion.current++ + } + + nodes.current.delete(key) + } + refs.current.set(key, fn) } @@ -202,25 +393,33 @@ export function useVirtualHistory( let dirty = false // Give the renderer the mounted-row coverage for passive scroll clamping. - // Without this, burst wheel/page scroll can race past the React commit that - // updates the virtual range and paint spacer-only frames. + // Clamp MUST use the EFFECTIVE (deferred) range, not the immediate one. + // During fast scroll, immediate [start,end] may already cover the new + // scrollTop position, but children still render at the deferred range. + // If clamp used immediate bounds, render-node-to-output's drain-gate + // would drain past the deferred children's span → viewport lands in + // spacer → white flash. if (s && shouldSetVirtualClamp({ itemCount: n, liveTailActive, sticky, viewportHeight: vp })) { - const min = offsets[start] ?? 0 - const max = Math.max(min, (offsets[end] ?? total) - vp) - s.setClampBounds(min, max) + const effTopSpacer = offsets[effStart] ?? 0 + const effBottom = offsets[effEnd] ?? total + // At effEnd=n there's no bottomSpacer — use Infinity so render-node- + // to-output's own Math.min(cur, maxScroll) governs. Using offsets[n] + // here would bake in heightCache (one render behind Yoga), and during + // streaming the tail item's cached height lags its real height — + // sticky-break would then clamp below the real max and push + // streaming text off-viewport. + const clampMin = effStart === 0 ? 0 : effTopSpacer + const clampMax = effEnd === n ? Infinity : Math.max(effTopSpacer, effBottom - vp) + + s.setClampBounds(clampMin, clampMax) } else { - // Sticky bottom often has live, non-virtualized tail content after the - // virtual transcript (streaming answer / thinking / tools). A clamp based - // only on virtual history would cap rendering before that tail and make - // live thinking appear to vanish. No burst-scroll clamp is needed while - // sticky anyway. s?.setClampBounds(undefined, undefined) } if (skipMeasurement.current) { skipMeasurement.current = false } else { - for (let i = start; i < end; i++) { + for (let i = effStart; i < effEnd; i++) { const k = items[i]?.key if (!k) { @@ -254,17 +453,17 @@ export function useVirtualHistory( } if (dirty) { - setVer(v => v + 1) + offsetVersion.current++ } - }, [end, hasScrollRef, items, liveTailActive, n, offsets, recentManual, scrollRef, start, sticky, total, vp]) + }) return { - bottomSpacer: Math.max(0, total - (offsets[end] ?? total)), - end, + bottomSpacer: Math.max(0, total - (offsets[effEnd] ?? total)), + end: effEnd, measureRef, offsets, - start, - topSpacer: offsets[start] ?? 0 + start: effStart, + topSpacer: offsets[effStart] ?? 0 } } diff --git a/ui-tui/src/lib/liveProgress.ts b/ui-tui/src/lib/liveProgress.ts index 2177d213..1407682f 100644 --- a/ui-tui/src/lib/liveProgress.ts +++ b/ui-tui/src/lib/liveProgress.ts @@ -1,5 +1,8 @@ import type { Msg, TodoItem } from '../types.js' +export const countPendingTodos = (todos: readonly TodoItem[]) => + todos.filter(todo => todo.status === 'in_progress' || todo.status === 'pending').length + export const isTodoDone = (todos: readonly TodoItem[]) => todos.length > 0 && todos.every(todo => todo.status === 'completed' || todo.status === 'cancelled') diff --git a/ui-tui/src/types.ts b/ui-tui/src/types.ts index ac61868b..62c4fd3e 100644 --- a/ui-tui/src/types.ts +++ b/ui-tui/src/types.ts @@ -117,6 +117,7 @@ export interface Msg { toolTokens?: number tools?: string[] todos?: TodoItem[] + todoIncomplete?: boolean } export type Role = 'assistant' | 'system' | 'tool' | 'user' From b36007b24679214746787f450b192f47f13c0c16 Mon Sep 17 00:00:00 2001 From: Brooklyn Nicholson Date: Sun, 26 Apr 2026 16:15:59 -0500 Subject: [PATCH 51/87] feat(tui): allow collapsing archived todo panels --- ui-tui/babel.compiler.config.cjs | 6 +++--- ui-tui/eslint.config.mjs | 11 +++++++++++ ui-tui/package.json | 3 ++- ui-tui/src/components/todoPanel.tsx | 29 ++++++++++++++++++++++++----- 4 files changed, 40 insertions(+), 9 deletions(-) diff --git a/ui-tui/babel.compiler.config.cjs b/ui-tui/babel.compiler.config.cjs index b81ff954..ab41a82e 100644 --- a/ui-tui/babel.compiler.config.cjs +++ b/ui-tui/babel.compiler.config.cjs @@ -26,7 +26,7 @@ module.exports = { ], // We feed already-compiled JS into babel; don't re-parse as TS/JSX. // @babel/preset-env etc. would over-transform — the compiler is our only - // transform here. - babelrc: false, - configFile: false + // transform here. babelrc:false stops @babel/cli from walking up the + // filesystem looking for other configs (the parent repo might add one). + babelrc: false } diff --git a/ui-tui/eslint.config.mjs b/ui-tui/eslint.config.mjs index 1b20c324..4452f49f 100644 --- a/ui-tui/eslint.config.mjs +++ b/ui-tui/eslint.config.mjs @@ -3,6 +3,7 @@ import typescriptEslint from '@typescript-eslint/eslint-plugin' import typescriptParser from '@typescript-eslint/parser' import perfectionist from 'eslint-plugin-perfectionist' import reactPlugin from 'eslint-plugin-react' +import reactCompiler from 'eslint-plugin-react-compiler' import hooksPlugin from 'eslint-plugin-react-hooks' import unusedImports from 'eslint-plugin-unused-imports' import globals from 'globals' @@ -43,6 +44,7 @@ export default [ 'custom-rules': customRules, perfectionist, react: reactPlugin, + 'react-compiler': reactCompiler, 'react-hooks': hooksPlugin, 'unused-imports': unusedImports }, @@ -53,6 +55,12 @@ export default [ '@typescript-eslint/no-unused-vars': 'off', 'no-undef': 'off', 'no-unused-vars': 'off', + // React Compiler: warn (not error) so the gate doesn't block merges + // while we migrate. Flags patterns that would break the compiler at + // runtime (mutating refs during render, non-PascalCase components, + // etc.). See audit §5 — we run the compiler in `npm run build` as a + // post-pass over tsc's `dist/` output. + 'react-compiler/react-compiler': 'warn', 'padding-line-between-statements': [ 1, { blankLine: 'always', next: ['block-like', 'block', 'return', 'if', 'class', 'continue', 'debugger', 'break', 'multiline-const', 'multiline-let'], prev: '*' }, @@ -89,6 +97,9 @@ export default [ 'no-constant-condition': 'off', 'no-empty': 'off', 'no-redeclare': 'off', + // Ink internals: reconciler, style pool, DOM node impl — full of + // intentional side effects the compiler rules reject. + 'react-compiler/react-compiler': 'off', 'react-hooks/exhaustive-deps': 'off' } }, diff --git a/ui-tui/package.json b/ui-tui/package.json index 4a16c9c3..061e3bc4 100644 --- a/ui-tui/package.json +++ b/ui-tui/package.json @@ -6,7 +6,8 @@ "scripts": { "dev": "npm run build --prefix packages/hermes-ink && tsx --watch src/entry.tsx", "start": "tsx src/entry.tsx", - "build": "npm run build --prefix packages/hermes-ink && tsc -p tsconfig.build.json && chmod +x dist/entry.js", + "build": "npm run build --prefix packages/hermes-ink && tsc -p tsconfig.build.json && npm run build:compile && chmod +x dist/entry.js", + "build:compile": "babel dist --out-dir dist --config-file ./babel.compiler.config.cjs --extensions .js --keep-file-extension", "type-check": "tsc --noEmit -p tsconfig.json", "lint": "eslint src/ packages/", "lint:fix": "eslint src/ packages/ --fix", diff --git a/ui-tui/src/components/todoPanel.tsx b/ui-tui/src/components/todoPanel.tsx index 567050a3..8b5b59b6 100644 --- a/ui-tui/src/components/todoPanel.tsx +++ b/ui-tui/src/components/todoPanel.tsx @@ -1,5 +1,5 @@ import { Box, Text } from '@hermes/ink' -import { memo } from 'react' +import { memo, useState } from 'react' import { countPendingTodos } from '../lib/liveProgress.js' import { todoGlyph, todoTone } from '../lib/todo.js' @@ -13,7 +13,7 @@ const rowColor = (t: Theme, status: TodoItem['status']) => { } export const TodoPanel = memo(function TodoPanel({ - collapsed = false, + collapsed, incomplete = false, onToggle, t, @@ -25,6 +25,25 @@ export const TodoPanel = memo(function TodoPanel({ t: Theme todos: TodoItem[] }) { + // Fallback local state for archived todos in transcript where there's no + // external controller. Live TodoPanel passes collapsed+onToggle from the + // turn store so clicks still work there. + const [localCollapsed, setLocalCollapsed] = useState(false) + const isControlled = typeof collapsed === 'boolean' + const effectiveCollapsed = isControlled ? collapsed : localCollapsed + + const handleToggle = () => { + if (onToggle) { + onToggle() + + return + } + + if (!isControlled) { + setLocalCollapsed(v => !v) + } + } + if (!todos.length) { return null } @@ -34,9 +53,9 @@ export const TodoPanel = memo(function TodoPanel({ return ( - + - {collapsed ? '▸ ' : '▾ '} + {effectiveCollapsed ? '▸ ' : '▾ '} Todo {' '} @@ -52,7 +71,7 @@ export const TodoPanel = memo(function TodoPanel({ - {!collapsed && ( + {!effectiveCollapsed && ( {todos.map(todo => { const tone = todoTone(todo.status) From bde89c169bdcc7d34839a8706b142929782c615f Mon Sep 17 00:00:00 2001 From: Brooklyn Nicholson Date: Sun, 26 Apr 2026 16:17:39 -0500 Subject: [PATCH 52/87] fix(cli): -c picks the most recently used session --- hermes_cli/main.py | 25 +++++- tests/hermes_cli/test_resolve_last_session.py | 61 ++++++++++++++ ui-tui/src/components/appLayout.tsx | 29 ++++--- ui-tui/src/lib/perfPane.tsx | 82 +++++++++++++++++++ 4 files changed, 183 insertions(+), 14 deletions(-) create mode 100644 tests/hermes_cli/test_resolve_last_session.py create mode 100644 ui-tui/src/lib/perfPane.tsx diff --git a/hermes_cli/main.py b/hermes_cli/main.py index 96874570..40de1f12 100644 --- a/hermes_cli/main.py +++ b/hermes_cli/main.py @@ -596,15 +596,32 @@ def _session_browse_picker(sessions: list) -> Optional[str]: def _resolve_last_session(source: str = "cli") -> Optional[str]: - """Look up the most recent session ID for a source.""" + """Look up the most recently *used* session ID for a source. + + Previously this returned the most recently *started* session, which meant + `hermes -c` could skip the session you just closed if a newer one had been + opened earlier in a different window. We now order by last_active + (max message timestamp, falling back to started_at) so -c always resumes + the most recent conversation you actually touched. + """ try: from hermes_state import SessionDB db = SessionDB() - sessions = db.search_sessions(source=source, limit=1) + sessions = db.search_sessions(source=source, limit=20) db.close() - if sessions: - return sessions[0]["id"] + if not sessions: + return None + + def _last_active(s: dict) -> float: + v = s.get("last_active") or s.get("started_at") or 0 + try: + return float(v) + except (TypeError, ValueError): + return 0.0 + + sessions.sort(key=_last_active, reverse=True) + return sessions[0]["id"] except Exception: pass return None diff --git a/tests/hermes_cli/test_resolve_last_session.py b/tests/hermes_cli/test_resolve_last_session.py new file mode 100644 index 00000000..68abc3df --- /dev/null +++ b/tests/hermes_cli/test_resolve_last_session.py @@ -0,0 +1,61 @@ +"""Verify `hermes -c` picks the session the user most recently used.""" + +from __future__ import annotations + +from hermes_cli.main import _resolve_last_session + + +class _FakeDB: + def __init__(self, rows): + self._rows = rows + self.closed = False + + def search_sessions(self, source=None, limit=20, **_kw): + rows = [r for r in self._rows if r.get("source") == source] if source else list(self._rows) + return rows[:limit] + + def close(self): + self.closed = True + + +def test_resolve_last_session_prefers_last_active_over_started_at(monkeypatch): + # `search_sessions` returns in started_at DESC order, but the most recently + # *touched* session may have been started earlier. -c should pick by + # last_active so closing the active session and typing `hermes -c` resumes + # that one, not an older-but-newer-started session from another window. + rows = [ + { + "id": "new_started_old_active", + "source": "cli", + "started_at": 1000.0, + "last_active": 100.0, + }, + { + "id": "old_started_recently_active", + "source": "cli", + "started_at": 500.0, + "last_active": 999.0, + }, + ] + + fake_db = _FakeDB(rows) + monkeypatch.setattr("hermes_state.SessionDB", lambda: fake_db) + + assert _resolve_last_session("cli") == "old_started_recently_active" + assert fake_db.closed + + +def test_resolve_last_session_returns_none_when_empty(monkeypatch): + monkeypatch.setattr("hermes_state.SessionDB", lambda: _FakeDB([])) + assert _resolve_last_session("cli") is None + + +def test_resolve_last_session_falls_back_to_started_at(monkeypatch): + # When last_active is missing entirely (legacy row), fall back to + # started_at so the helper still picks the newest session. + rows = [ + {"id": "older", "source": "cli", "started_at": 10.0}, + {"id": "newer", "source": "cli", "started_at": 20.0}, + ] + monkeypatch.setattr("hermes_state.SessionDB", lambda: _FakeDB(rows)) + assert _resolve_last_session("cli") == "newer" diff --git a/ui-tui/src/components/appLayout.tsx b/ui-tui/src/components/appLayout.tsx index 9e716583..0c136407 100644 --- a/ui-tui/src/components/appLayout.tsx +++ b/ui-tui/src/components/appLayout.tsx @@ -8,6 +8,7 @@ import { $isBlocked, $overlayState, patchOverlayState } from '../app/overlayStor import { $uiState } from '../app/uiStore.js' import { PLACEHOLDER } from '../content/placeholders.js' import { inputVisualHeight, stableComposerColumns } from '../lib/inputMetrics.js' +import { PerfPane } from '../lib/perfPane.js' import { AgentsOverlay } from './agentsOverlay.js' import { GoodVibesHeart, StatusRule, StickyPromptTracker, TranscriptScrollbar } from './appChrome.js' @@ -248,23 +249,31 @@ export const AppLayout = memo(function AppLayout({ {overlay.agents ? ( - + + + ) : ( - + + + )} {!overlay.agents && ( <> - + + + - + + + )} diff --git a/ui-tui/src/lib/perfPane.tsx b/ui-tui/src/lib/perfPane.tsx new file mode 100644 index 00000000..32b260b7 --- /dev/null +++ b/ui-tui/src/lib/perfPane.tsx @@ -0,0 +1,82 @@ +// Perf instrumentation: wraps React.Profiler around named panes and writes +// commit timings to a log file when HERMES_DEV_PERF is set. Enabled per-run +// via the env var; zero-cost (Profiler is replaced by a Fragment) when off. +// +// Log format: one JSON object per line, for easy `jq` filtering. We only +// log commits that exceed a threshold (default 2ms) so the file doesn't +// fill up with sub-millisecond idle renders. Tune via HERMES_DEV_PERF_MS. +// +// Usage in consumers: +// import { PerfPane } from './perfPane.js' +// ... +// +// Inspect with: +// tail -f ~/.hermes/perf.log | jq -c 'select(.actualMs > 8)' +// jq -s 'group_by(.id) | map({id: .[0].id, count: length, p50: (sort_by(.actualMs) | .[length/2|floor].actualMs), p99: (sort_by(.actualMs) | .[length*0.99|floor].actualMs)})' ~/.hermes/perf.log + +import { appendFileSync, mkdirSync } from 'node:fs' +import { homedir } from 'node:os' +import { dirname, join } from 'node:path' + +import { Profiler, type ProfilerOnRenderCallback, type ReactNode } from 'react' + +const ENABLED = /^(?:1|true|yes|on)$/i.test((process.env.HERMES_DEV_PERF ?? '').trim()) +const THRESHOLD_MS = Number(process.env.HERMES_DEV_PERF_MS ?? '2') || 2 +const LOG_PATH = process.env.HERMES_DEV_PERF_LOG?.trim() || join(homedir(), '.hermes', 'perf.log') + +let initialized = false + +const ensureLogDir = () => { + if (initialized) { + return + } + + initialized = true + + try { + mkdirSync(dirname(LOG_PATH), { recursive: true }) + } catch { + // Best-effort — if we can't create the dir (readonly fs, /tmp, etc.) + // the appendFileSync calls below will throw silently and we drop the + // sample. Perf logging should never crash the TUI. + } +} + +const onRender: ProfilerOnRenderCallback = (id, phase, actualMs, baseMs, startTime, commitTime) => { + if (actualMs < THRESHOLD_MS) { + return + } + + ensureLogDir() + + const row = { + actualMs: Math.round(actualMs * 100) / 100, + baseMs: Math.round(baseMs * 100) / 100, + commitMs: Math.round(commitTime * 100) / 100, + id, + phase, + startMs: Math.round(startTime * 100) / 100, + ts: Date.now() + } + + try { + appendFileSync(LOG_PATH, `${JSON.stringify(row)}\n`) + } catch { + // Same rationale as ensureLogDir — never crash the UI to log a sample. + } +} + +export function PerfPane({ children, id }: { children: ReactNode; id: string }) { + if (!ENABLED) { + return children + } + + return ( + + {children} + + ) +} + +export const PERF_ENABLED = ENABLED +export const PERF_LOG_PATH = LOG_PATH From debae25f1c4b7e7e47600a249c662fb880b26522 Mon Sep 17 00:00:00 2001 From: Brooklyn Nicholson Date: Sun, 26 Apr 2026 16:21:34 -0500 Subject: [PATCH 53/87] perf(tui): incremental markdown during streaming MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Split in-flight assistant text at the last stable block boundary so only the unclosed tail re-tokenizes per stream delta. Previously the full text was rendered as plain during streaming and only flipped to at message.complete — cheap per delta but loses live markdown formatting. New StreamingMd component holds a monotonically-growing stablePrefix in a ref (idempotent under StrictMode double-render), renders it as one that memoizes across deltas, and renders the unstable suffix as a second that re-parses on each delta. Cost per delta drops from O(total length) to O(unstable length). findStableBoundary walks back to the last "\n\n" outside an open fenced code block — splitting inside an open fence would orphan the opener and break highlighting in the prefix. Adapted from claude-code's src/components/Markdown.tsx:186 but built on our line-based tokenizer instead of marked.lexer. 9 new tests cover fence balance, boundary walk, and empty input. Part of the --tui perf audit (see audit #7). --- .../src/__tests__/streamingMarkdown.test.ts | 79 +++++++++++ ui-tui/src/components/messageLine.tsx | 6 +- ui-tui/src/components/streamingMarkdown.tsx | 127 ++++++++++++++++++ 3 files changed, 211 insertions(+), 1 deletion(-) create mode 100644 ui-tui/src/__tests__/streamingMarkdown.test.ts create mode 100644 ui-tui/src/components/streamingMarkdown.tsx diff --git a/ui-tui/src/__tests__/streamingMarkdown.test.ts b/ui-tui/src/__tests__/streamingMarkdown.test.ts new file mode 100644 index 00000000..cd283d8a --- /dev/null +++ b/ui-tui/src/__tests__/streamingMarkdown.test.ts @@ -0,0 +1,79 @@ +import { describe, expect, it } from 'vitest' + +import { findStableBoundary } from '../components/streamingMarkdown.js' +// We test the pure boundary logic by rendering the component's ref +// behaviour through repeated calls. Since React isn't being rendered here, +// we reach into the module to test findStableBoundary via its exported +// behaviour — but the pure helper isn't exported. So test the component's +// observable output: pass sequential text values and verify the stable +// prefix never retreats. +// +// Strategy: mount StreamingMd in isolation and observe which +// instances it renders (by text prop). Without a DOM renderer that's +// heavy, so we validate the helper behaviour by directly invoking the +// fence/boundary logic via a re-exported surface. +import { DEFAULT_THEME } from '../theme.js' + +describe('findStableBoundary', () => { + it('returns -1 when no blank line exists yet', () => { + expect(findStableBoundary('partial line with no newline yet')).toBe(-1) + }) + + it('returns -1 when only single newlines exist', () => { + expect(findStableBoundary('line one\nline two\nline three')).toBe(-1) + }) + + it('splits after the last blank line separator', () => { + // 'first\n\nsecond\n\nthird' → last blank = before 'third' + const text = 'first paragraph\n\nsecond paragraph\n\nthird' + const idx = findStableBoundary(text) + + expect(text.slice(0, idx)).toBe('first paragraph\n\nsecond paragraph\n\n') + expect(text.slice(idx)).toBe('third') + }) + + it('refuses to split inside an open fenced block', () => { + // Fence opens, contains a blank line inside the code, no close yet. + const text = '```ts\nfn();\n\nmore code here' + + expect(findStableBoundary(text)).toBe(-1) + }) + + it('splits before an open fenced block but not inside', () => { + const text = 'intro paragraph\n\n```ts\nfn();\n\nmore code' + const idx = findStableBoundary(text) + + expect(text.slice(0, idx)).toBe('intro paragraph\n\n') + expect(text.slice(idx).startsWith('```ts')).toBe(true) + }) + + it('allows splitting after a fenced block closes', () => { + const text = '```ts\nfn();\n```\n\nnarration continues' + const idx = findStableBoundary(text) + + expect(text.slice(0, idx)).toBe('```ts\nfn();\n```\n\n') + expect(text.slice(idx)).toBe('narration continues') + }) + + it('walks backwards through nested fence boundaries safely', () => { + // Two closed fences + narration + one new open fence. The only legal + // split is before the open fence, not between the closed ones. + const text = '```js\na\n```\n\nmid text\n\n```python\nstill open' + const idx = findStableBoundary(text) + + expect(text.slice(0, idx)).toBe('```js\na\n```\n\nmid text\n\n') + }) + + it('handles empty input', () => { + expect(findStableBoundary('')).toBe(-1) + }) +}) + +describe('streaming theme assumption', () => { + it('theme is exportable (component import sanity check)', () => { + // Sanity that the theme we pass doesn't change shape. Component import + // already happens above — this is a smoke test that the module graph + // for streamingMarkdown wires up without cycles. + expect(DEFAULT_THEME.color.amber).toBeTruthy() + }) +}) diff --git a/ui-tui/src/components/messageLine.tsx b/ui-tui/src/components/messageLine.tsx index dddf0a59..0be28410 100644 --- a/ui-tui/src/components/messageLine.tsx +++ b/ui-tui/src/components/messageLine.tsx @@ -10,6 +10,7 @@ import type { Theme } from '../theme.js' import type { ActiveTool, DetailsMode, Msg, SectionVisibility } from '../types.js' import { Md } from './markdown.js' +import { StreamingMd } from './streamingMarkdown.js' import { ToolTrail } from './thinking.js' import { TodoPanel } from './todoPanel.js' @@ -94,7 +95,10 @@ export const MessageLine = memo(function MessageLine({ if (msg.role === 'assistant') { return isStreaming ? ( - {boundedLiveRenderText(msg.text)} + // Incremental markdown: split at the last stable block boundary so + // only the in-flight tail re-tokenizes per delta. See + // streamingMarkdown.tsx for the cost model. + ) : ( ) diff --git a/ui-tui/src/components/streamingMarkdown.tsx b/ui-tui/src/components/streamingMarkdown.tsx new file mode 100644 index 00000000..e6dcbbfb --- /dev/null +++ b/ui-tui/src/components/streamingMarkdown.tsx @@ -0,0 +1,127 @@ +// StreamingMd — incremental markdown renderer for in-flight assistant text. +// +// Naive approach (render ) re-tokenizes the entire message +// on every stream delta. At 20-char batches over a 3 KB response that's 150 +// full re-parses. +// +// This splits `text` at the last stable top-level block boundary (blank +// line outside a fenced code span) into: +// stablePrefix — passed to an inner , memoized on its exact text +// value. During the turn, the prefix only grows monotonically, +// so its memo key matches the previous render and React +// reuses the cached subtree — zero re-tokenization. +// unstableSuffix — the in-flight block(s). A separate re-parses just +// this tail on every delta (O(unstable length) vs. +// O(total length)). +// +// The boundary is stored in a ref so it only advances — idempotent under +// StrictMode double-render. Component unmounts between turns (isStreaming +// flips off → message moves to history and renders via directly), so +// the ref resets naturally. +// +// See src/app/useMainApp.ts for the reasoning on why we don't memoize the +// whole Md text during streaming: that cache never hits because `text` is +// growing. Mirror claude-code's `StreamingMarkdown` approach adapted to +// our line-based tokenizer. + +import { memo, useRef } from 'react' + +import type { Theme } from '../theme.js' + +import { Md } from './markdown.js' + +// Count ``` or ~~~ fence toggles in `s` up to `end`. Odd = currently inside +// a fenced block; we can't split the prefix there or we'd orphan the fence. +const fenceOpenAt = (s: string, end: number) => { + let open = false + let i = 0 + + while (i < end) { + const nl = s.indexOf('\n', i) + const lineEnd = nl < 0 || nl > end ? end : nl + const line = s.slice(i, lineEnd) + + if (/^\s*(?:`{3,}|~{3,})/.test(line)) { + open = !open + } + + if (nl < 0 || nl >= end) { + break + } + + i = nl + 1 + } + + return open +} + +// Find the last "\n\n" boundary before `end` that is OUTSIDE a fenced code +// block. Returns the index AFTER the second newline (start of the next +// block), or -1 if no safe boundary exists yet. +export const findStableBoundary = (text: string) => { + let idx = text.length + + while (idx > 0) { + const boundary = text.lastIndexOf('\n\n', idx - 1) + + if (boundary < 0) { + return -1 + } + + // Boundary candidate: end of stable prefix is boundary + 2 (start of + // next block). Check fence balance up to that point. + const splitAt = boundary + 2 + + if (!fenceOpenAt(text, splitAt)) { + return splitAt + } + + idx = boundary + } + + return -1 +} + +export const StreamingMd = memo(function StreamingMd({ compact, t, text }: StreamingMdProps) { + const stablePrefixRef = useRef('') + + // Reset if the text no longer starts with our recorded prefix (defensive; + // normally the component unmounts between turns so this shouldn't trigger). + if (!text.startsWith(stablePrefixRef.current)) { + stablePrefixRef.current = '' + } + + const boundary = findStableBoundary(text) + + // Only advance the prefix — never retreat. The boundary math looks at the + // FULL text each call; if it returns a larger index than before, we grow + // the cached prefix. Monotonic growth makes the memo key stable across + // deltas (identical string → same subtree → no re-render). + if (boundary > stablePrefixRef.current.length) { + stablePrefixRef.current = text.slice(0, boundary) + } + + const stablePrefix = stablePrefixRef.current + const unstableSuffix = text.slice(stablePrefix.length) + + if (!stablePrefix) { + return + } + + if (!unstableSuffix) { + return + } + + return ( + <> + + + + ) +}) + +interface StreamingMdProps { + compact?: boolean + t: Theme + text: string +} From cb7cfba6ded3b071be74d3218d96741f12c7e56b Mon Sep 17 00:00:00 2001 From: Brooklyn Nicholson Date: Sun, 26 Apr 2026 16:21:57 -0500 Subject: [PATCH 54/87] fix(cli): surface last_active in search_sessions so -c works --- hermes_state.py | 20 ++++++++-- tests/hermes_cli/test_resolve_last_session.py | 40 +++++++++++++++++++ 2 files changed, 57 insertions(+), 3 deletions(-) diff --git a/hermes_state.py b/hermes_state.py index 3e5914c5..e92d5a30 100644 --- a/hermes_state.py +++ b/hermes_state.py @@ -1483,16 +1483,30 @@ class SessionDB: limit: int = 20, offset: int = 0, ) -> List[Dict[str, Any]]: - """List sessions, optionally filtered by source.""" + """List sessions, optionally filtered by source. + + Returns rows enriched with a computed ``last_active`` column (the + latest message timestamp for the session, falling back to + ``started_at``) so callers can sort by "most recently used" instead + of "most recently started". + """ + select_last_active = ( + "COALESCE(" + "(SELECT MAX(m.timestamp) FROM messages m WHERE m.session_id = s.id)," + " s.started_at" + ") AS last_active" + ) with self._lock: if source: cursor = self._conn.execute( - "SELECT * FROM sessions WHERE source = ? ORDER BY started_at DESC LIMIT ? OFFSET ?", + f"SELECT s.*, {select_last_active} FROM sessions s " + "WHERE s.source = ? ORDER BY s.started_at DESC LIMIT ? OFFSET ?", (source, limit, offset), ) else: cursor = self._conn.execute( - "SELECT * FROM sessions ORDER BY started_at DESC LIMIT ? OFFSET ?", + f"SELECT s.*, {select_last_active} FROM sessions s " + "ORDER BY s.started_at DESC LIMIT ? OFFSET ?", (limit, offset), ) return [dict(row) for row in cursor.fetchall()] diff --git a/tests/hermes_cli/test_resolve_last_session.py b/tests/hermes_cli/test_resolve_last_session.py index 68abc3df..db4d321c 100644 --- a/tests/hermes_cli/test_resolve_last_session.py +++ b/tests/hermes_cli/test_resolve_last_session.py @@ -45,6 +45,46 @@ def test_resolve_last_session_prefers_last_active_over_started_at(monkeypatch): assert fake_db.closed +def test_search_sessions_exposes_last_active_column(tmp_path, monkeypatch): + # End-to-end: the actual SessionDB must surface a last_active column so + # _resolve_last_session's sort works. A previous bug had last_active=None + # on every row because search_sessions used `SELECT *` with no computed + # column, silently breaking the -c resume behavior. + monkeypatch.setenv("HERMES_HOME", str(tmp_path)) + monkeypatch.setattr("pathlib.Path.home", lambda: tmp_path) + + import hermes_state + + from pathlib import Path + + db = hermes_state.SessionDB(db_path=Path(tmp_path / "state.db")) + try: + db.create_session("s_started_later", source="cli") + db.create_session("s_active_later", source="cli") + # Force started_at ordering so the test is deterministic regardless + # of how quickly the two inserts land. + with db._lock: + db._conn.execute("UPDATE sessions SET started_at=? WHERE id=?", (2000.0, "s_started_later")) + db._conn.execute("UPDATE sessions SET started_at=? WHERE id=?", (1000.0, "s_active_later")) + db._conn.commit() + + db.append_message("s_active_later", role="user", content="hi") + with db._lock: + db._conn.execute( + "UPDATE messages SET timestamp=? WHERE session_id=?", + (3000.0, "s_active_later"), + ) + db._conn.commit() + + rows = db.search_sessions(source="cli", limit=5) + ids = {r["id"]: r.get("last_active") for r in rows} + + assert ids["s_started_later"] == 2000.0 + assert ids["s_active_later"] == 3000.0 + finally: + db.close() + + def test_resolve_last_session_returns_none_when_empty(monkeypatch): monkeypatch.setattr("hermes_state.SessionDB", lambda: _FakeDB([])) assert _resolve_last_session("cli") is None From 2259eac49e5ee78b4549fd225317a18958f35891 Mon Sep 17 00:00:00 2001 From: Brooklyn Nicholson Date: Sun, 26 Apr 2026 16:24:15 -0500 Subject: [PATCH 55/87] feat(tui): collapse completed todo panel on turn end --- ui-tui/src/__tests__/createGatewayEventHandler.test.ts | 8 +++++++- ui-tui/src/__tests__/turnStore.test.ts | 1 + ui-tui/src/app/turnStore.ts | 3 ++- ui-tui/src/components/messageLine.tsx | 9 ++++++++- ui-tui/src/components/todoPanel.tsx | 4 +++- ui-tui/src/types.ts | 1 + 6 files changed, 22 insertions(+), 4 deletions(-) diff --git a/ui-tui/src/__tests__/createGatewayEventHandler.test.ts b/ui-tui/src/__tests__/createGatewayEventHandler.test.ts index 0c0537a8..c17aa565 100644 --- a/ui-tui/src/__tests__/createGatewayEventHandler.test.ts +++ b/ui-tui/src/__tests__/createGatewayEventHandler.test.ts @@ -93,7 +93,13 @@ describe('createGatewayEventHandler', () => { onEvent({ payload: { text: 'done' }, type: 'message.complete' } as any) expect(getTurnState().todos).toEqual([]) - expect(appended).toContainEqual({ kind: 'trail', role: 'system', text: '', todos }) + expect(appended).toContainEqual({ + kind: 'trail', + role: 'system', + text: '', + todoCollapsedByDefault: true, + todos + }) }) it('keeps the current todo list visible when the next message starts', () => { diff --git a/ui-tui/src/__tests__/turnStore.test.ts b/ui-tui/src/__tests__/turnStore.test.ts index b1b48565..04797fd1 100644 --- a/ui-tui/src/__tests__/turnStore.test.ts +++ b/ui-tui/src/__tests__/turnStore.test.ts @@ -26,6 +26,7 @@ describe('turnStore live progress helpers', () => { kind: 'trail', role: 'system', text: '', + todoCollapsedByDefault: true, todos: [ { content: 'prep', id: 'prep', status: 'completed' }, { content: 'serve', id: 'serve', status: 'completed' } diff --git a/ui-tui/src/app/turnStore.ts b/ui-tui/src/app/turnStore.ts index da4484ab..e7f3366a 100644 --- a/ui-tui/src/app/turnStore.ts +++ b/ui-tui/src/app/turnStore.ts @@ -49,12 +49,13 @@ export const archiveTodosAtTurnEnd = () => { return [] } + const done = isTodoDone(state.todos) const msg: Msg = { kind: 'trail', role: 'system', text: '', todos: state.todos, - ...(isTodoDone(state.todos) ? {} : { todoIncomplete: true }) + ...(done ? { todoCollapsedByDefault: true } : { todoIncomplete: true }) } patchTurnState({ todoCollapsed: false, todos: [] }) diff --git a/ui-tui/src/components/messageLine.tsx b/ui-tui/src/components/messageLine.tsx index 0be28410..a3d3f584 100644 --- a/ui-tui/src/components/messageLine.tsx +++ b/ui-tui/src/components/messageLine.tsx @@ -38,7 +38,14 @@ export const MessageLine = memo(function MessageLine({ const thinking = msg.thinking?.trim() ?? '' if (msg.kind === 'trail' && msg.todos?.length) { - return + return ( + + ) } if (msg.kind === 'trail' && (msg.tools?.length || tools.length || thinking)) { diff --git a/ui-tui/src/components/todoPanel.tsx b/ui-tui/src/components/todoPanel.tsx index 8b5b59b6..9480ee0a 100644 --- a/ui-tui/src/components/todoPanel.tsx +++ b/ui-tui/src/components/todoPanel.tsx @@ -14,12 +14,14 @@ const rowColor = (t: Theme, status: TodoItem['status']) => { export const TodoPanel = memo(function TodoPanel({ collapsed, + defaultCollapsed = false, incomplete = false, onToggle, t, todos }: { collapsed?: boolean + defaultCollapsed?: boolean incomplete?: boolean onToggle?: () => void t: Theme @@ -28,7 +30,7 @@ export const TodoPanel = memo(function TodoPanel({ // Fallback local state for archived todos in transcript where there's no // external controller. Live TodoPanel passes collapsed+onToggle from the // turn store so clicks still work there. - const [localCollapsed, setLocalCollapsed] = useState(false) + const [localCollapsed, setLocalCollapsed] = useState(defaultCollapsed) const isControlled = typeof collapsed === 'boolean' const effectiveCollapsed = isControlled ? collapsed : localCollapsed diff --git a/ui-tui/src/types.ts b/ui-tui/src/types.ts index 62c4fd3e..6aea78e3 100644 --- a/ui-tui/src/types.ts +++ b/ui-tui/src/types.ts @@ -118,6 +118,7 @@ export interface Msg { tools?: string[] todos?: TodoItem[] todoIncomplete?: boolean + todoCollapsedByDefault?: boolean } export type Role = 'assistant' | 'system' | 'tool' | 'user' From 69ff2010509fd81f01af533ca47ab4881ea9eeee Mon Sep 17 00:00:00 2001 From: Brooklyn Nicholson Date: Sun, 26 Apr 2026 16:26:50 -0500 Subject: [PATCH 56/87] feat(tui): anchor todo panel above streaming output --- ui-tui/src/__tests__/createGatewayEventHandler.test.ts | 3 +++ ui-tui/src/app/createGatewayEventHandler.ts | 7 ++++++- ui-tui/src/components/appLayout.tsx | 4 ++-- 3 files changed, 11 insertions(+), 3 deletions(-) diff --git a/ui-tui/src/__tests__/createGatewayEventHandler.test.ts b/ui-tui/src/__tests__/createGatewayEventHandler.test.ts index c17aa565..c09bd4ee 100644 --- a/ui-tui/src/__tests__/createGatewayEventHandler.test.ts +++ b/ui-tui/src/__tests__/createGatewayEventHandler.test.ts @@ -81,6 +81,9 @@ describe('createGatewayEventHandler', () => { expect(finalText).toBeDefined() expect(trail).toMatchObject({ kind: 'trail', role: 'system', todos, todoIncomplete: true }) + // Todo archive must sit ABOVE the final assistant text so the panel + // doesn't visibly jump across the final answer at end-of-turn. + expect(appended.indexOf(trail!)).toBeLessThan(appended.indexOf(finalText!)) expect(getTurnState().todos).toEqual([]) }) diff --git a/ui-tui/src/app/createGatewayEventHandler.ts b/ui-tui/src/app/createGatewayEventHandler.ts index c314fc10..b0ef2daf 100644 --- a/ui-tui/src/app/createGatewayEventHandler.ts +++ b/ui-tui/src/app/createGatewayEventHandler.ts @@ -537,9 +537,14 @@ export function createGatewayEventHandler(ctx: GatewayEventHandlerContext): (ev: const { finalMessages, finalText, wasInterrupted } = turnController.recordMessageComplete(ev.payload ?? {}) if (!wasInterrupted) { + // Archive the todo list FIRST so it sits above the final assistant + // text in the transcript — same position it held during streaming. + // Otherwise the panel would visibly jump from "above live answer" to + // "below final answer" at message.complete. + archiveTodosAtTurnEnd().forEach(appendMessage) + const msgs: Msg[] = finalMessages.length ? finalMessages : [{ role: 'assistant', text: finalText }] msgs.forEach(appendMessage) - archiveTodosAtTurnEnd().forEach(appendMessage) if (bellOnComplete && stdout?.isTTY) { stdout.write('\x07') diff --git a/ui-tui/src/components/appLayout.tsx b/ui-tui/src/components/appLayout.tsx index 0c136407..50a99e23 100644 --- a/ui-tui/src/components/appLayout.tsx +++ b/ui-tui/src/components/appLayout.tsx @@ -59,6 +59,8 @@ const TranscriptPane = memo(function TranscriptPane({ {transcript.virtualHistory.bottomSpacer > 0 ? : null} + + - - From 71eee2664022d97a0d509f25c867b30ad1f4e904 Mon Sep 17 00:00:00 2001 From: Brooklyn Nicholson Date: Sun, 26 Apr 2026 16:36:25 -0500 Subject: [PATCH 57/87] perf(tui): full-pipeline instrumentation + profiling harness MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Extends HERMES_DEV_PERF to capture the complete render pipeline, not just React commits. Adds scripts/profile-tui.py to drive repeatable hold-PageUp stress tests against a real long session. perfPane.tsx: Wires ink's onFrame callback (already plumbed through the fork) into the same perf.log as the React.Profiler samples. Captures per-phase timing (yoga calculateLayout, renderNodeToOutput, screen diff, patch optimize, stdout write) plus yoga counters (visited/measured/cache- Hits/live) and patch counts per frame. Events are tagged {src: 'react'|'frame'} so jq can split them. logFrameEvent is undefined when HERMES_DEV_PERF is unset, so ink doesn't even attach the callback. entry.tsx: Passes logFrameEvent into render(). types/hermes-ink.d.ts: Declares FrameEvent + onFrame on RenderOptions so the ui-tui side type-checks against the plumbed-through ink option. scripts/profile-tui.py: New harness. Launches the built TUI under a PTY with the longest session in state.db resumed, holds PageUp/PageDown/etc at a configurable Hz for N seconds, then parses perf.log and prints per-phase p50/p95/p99/max plus yoga-counter summaries. Zero deps beyond stdlib. Exit 2 if nothing was captured (wiring broken). Initial findings (1106-msg session, 6s PageUp hold at 30Hz): - Steady state: 10 fps; renderer phase p99=63ms, write p99=0.2ms - 4/107 heavy frames (>=16ms), all dominated by renderNodeToOutput - One pathological 97ms frame with yoga measuring 70,415 text cells and Yoga visiting 225k nodes — the cold-unmeasured-region hit - Ink's scroll fast-path (DECSTBM blit from prevScreen) is disqualified because our spacer-based virtual history doesn't keep heightDelta in sync with scroll.delta, so every PageUp step falls through to a full 2000-4800 patch re-render instead of ~40 --- scripts/profile-tui.py | 335 +++++++++++++++++++++++++++++++ ui-tui/src/entry.tsx | 8 +- ui-tui/src/lib/perfPane.tsx | 124 +++++++++--- ui-tui/src/types/hermes-ink.d.ts | 24 +++ 4 files changed, 461 insertions(+), 30 deletions(-) create mode 100755 scripts/profile-tui.py diff --git a/scripts/profile-tui.py b/scripts/profile-tui.py new file mode 100755 index 00000000..5ef987c6 --- /dev/null +++ b/scripts/profile-tui.py @@ -0,0 +1,335 @@ +#!/usr/bin/env python3 +"""Drive the Hermes TUI under HERMES_DEV_PERF and summarize the pipeline. + +Usage: + scripts/profile-tui.py [--session SID] [--hold KEY] [--seconds N] [--rate HZ] + +Defaults: picks the session with the most messages, holds PageUp for 8s at +~30 Hz (matching xterm key-repeat), summarizes ~/.hermes/perf.log on exit. + +The --tui build must exist (run `npm run build` in ui-tui first). This script +launches `node dist/entry.js` directly with HERMES_TUI_RESUME set so it +bypasses the hermes_cli wrapper — we want repeatable timing, not the CLI's +session-picker flow. + +Environment overrides: + HERMES_PERF_LOG (default ~/.hermes/perf.log) + HERMES_PERF_NODE (default node from $PATH) + HERMES_TUI_DIR (default /home/bb/hermes-agent/ui-tui) + +Exit code is 0 if the harness ran and parsed results, 2 if the TUI crashed +or produced no perf data (suggests HERMES_DEV_PERF wiring is broken). +""" + +from __future__ import annotations + +import argparse +import json +import os +import pty +import select +import signal +import sqlite3 +import statistics +import sys +import time +from pathlib import Path +from typing import Any + + +DEFAULT_TUI_DIR = Path(os.environ.get("HERMES_TUI_DIR", "/home/bb/hermes-agent/ui-tui")) +DEFAULT_LOG = Path(os.environ.get("HERMES_PERF_LOG", str(Path.home() / ".hermes" / "perf.log"))) +DEFAULT_STATE_DB = Path.home() / ".hermes" / "state.db" + +# Keystroke escape sequences. Matches what xterm/VT220 send when the +# terminal has bracketed-paste disabled and the key-repeat handler fires. +KEYS = { + "page_up": b"\x1b[5~", + "page_down": b"\x1b[6~", + "wheel_up": b"\x1b[M`!!", # mouse wheel up (SGR-less) — best-effort + "shift_up": b"\x1b[1;2A", + "shift_down": b"\x1b[1;2B", +} + + +def pick_longest_session(db: Path) -> str: + conn = sqlite3.connect(db) + row = conn.execute( + "SELECT id FROM sessions s ORDER BY " + "(SELECT COUNT(*) FROM messages m WHERE m.session_id = s.id) DESC LIMIT 1" + ).fetchone() + if not row: + sys.exit(f"no sessions in {db}") + return row[0] + + +def drain(fd: int, timeout: float) -> bytes: + """Read whatever's available from fd within `timeout`, then return.""" + chunks = [] + end = time.monotonic() + timeout + while time.monotonic() < end: + r, _, _ = select.select([fd], [], [], max(0.0, end - time.monotonic())) + if not r: + break + try: + data = os.read(fd, 4096) + except OSError: + break + if not data: + break + chunks.append(data) + return b"".join(chunks) + + +def hold_key(fd: int, seq: bytes, seconds: float, rate_hz: int) -> int: + """Write `seq` to fd at ~rate_hz for `seconds`. Returns keystrokes sent.""" + interval = 1.0 / max(1, rate_hz) + end = time.monotonic() + seconds + sent = 0 + while time.monotonic() < end: + try: + os.write(fd, seq) + sent += 1 + except OSError: + break + # Drain stdout to keep the PTY buffer flowing; ignore content. + drain(fd, 0) + time.sleep(interval) + return sent + + +def summarize(log: Path, since_ts_ms: int) -> dict[str, Any]: + """Parse perf.log, keep only events newer than since_ts_ms, return stats.""" + react_events: list[dict[str, Any]] = [] + frame_events: list[dict[str, Any]] = [] + if not log.exists(): + return {"error": f"no log at {log}", "react": [], "frame": []} + for line in log.read_text().splitlines(): + line = line.strip() + if not line: + continue + try: + row = json.loads(line) + except json.JSONDecodeError: + continue + if int(row.get("ts", 0)) < since_ts_ms: + continue + src = row.get("src") + if src == "react": + react_events.append(row) + elif src == "frame": + frame_events.append(row) + + return { + "react": react_events, + "frame": frame_events, + } + + +def pct(values: list[float], p: float) -> float: + if not values: + return 0.0 + s = sorted(values) + idx = min(len(s) - 1, int(len(s) * p)) + return s[idx] + + +def format_report(data: dict[str, Any]) -> str: + react = data.get("react") or [] + frames = data.get("frame") or [] + out = [] + + out.append("═══ React Profiler ═══") + if not react: + out.append(" (no react events — HERMES_DEV_PERF wired? threshold too high?)") + else: + by_id: dict[str, list[float]] = {} + for r in react: + by_id.setdefault(r["id"], []).append(r["actualMs"]) + out.append(f" {'pane':<14} {'count':>6} {'p50':>8} {'p95':>8} {'p99':>8} {'max':>8}") + for pid, ms in sorted(by_id.items(), key=lambda kv: -pct(kv[1], 0.99)): + out.append( + f" {pid:<14} {len(ms):>6} {pct(ms,0.50):>8.2f} {pct(ms,0.95):>8.2f} " + f"{pct(ms,0.99):>8.2f} {max(ms):>8.2f}" + ) + + out.append("") + out.append("═══ Ink pipeline ═══") + if not frames: + out.append(" (no frame events — onFrame wiring broken?)") + else: + dur = [f["durationMs"] for f in frames] + phases_present = any(f.get("phases") for f in frames) + out.append(f" frames captured: {len(frames)}") + out.append( + f" durationMs p50={pct(dur,0.50):.2f} p95={pct(dur,0.95):.2f} " + f"p99={pct(dur,0.99):.2f} max={max(dur):.2f}" + ) + # Effective FPS during the run: frames / elapsed seconds. + ts = sorted(f["ts"] for f in frames) + if len(ts) >= 2: + elapsed_s = (ts[-1] - ts[0]) / 1000.0 + fps = len(frames) / elapsed_s if elapsed_s > 0 else float("inf") + out.append(f" throughput: {len(frames)} frames / {elapsed_s:.2f}s = {fps:.1f} fps") + + if phases_present: + fields = ["yoga", "renderer", "diff", "optimize", "write", "commit"] + out.append("") + out.append(f" {'phase':<10} {'p50':>8} {'p95':>8} {'p99':>8} {'max':>8} (ms)") + for field in fields: + vals = [f["phases"][field] for f in frames if f.get("phases")] + if vals: + out.append( + f" {field:<10} {pct(vals,0.50):>8.2f} {pct(vals,0.95):>8.2f} " + f"{pct(vals,0.99):>8.2f} {max(vals):>8.2f}" + ) + # Derived: sum of phases vs durationMs (reveals hidden time). + sum_ps = [ + sum(f["phases"][k] for k in fields) + for f in frames if f.get("phases") + ] + if sum_ps: + dur_match = [f["durationMs"] for f in frames if f.get("phases")] + deltas = [d - s for d, s in zip(dur_match, sum_ps)] + out.append( + f" {'dur-Σphases':<10} {pct(deltas,0.50):>8.2f} {pct(deltas,0.95):>8.2f} " + f"{pct(deltas,0.99):>8.2f} {max(deltas):>8.2f} (unaccounted-for time)" + ) + + # Yoga counters + visited = [f["phases"]["yogaVisited"] for f in frames if f.get("phases")] + measured = [f["phases"]["yogaMeasured"] for f in frames if f.get("phases")] + cache_hits = [f["phases"]["yogaCacheHits"] for f in frames if f.get("phases")] + live = [f["phases"]["yogaLive"] for f in frames if f.get("phases")] + out.append("") + out.append(" Yoga counters (per frame):") + for name, vals in ( + ("visited", visited), + ("measured", measured), + ("cacheHits", cache_hits), + ("live", live), + ): + if vals: + out.append(f" {name:<11} p50={pct(vals,0.5):.0f} p99={pct(vals,0.99):.0f} max={max(vals)}") + + # Patch counts — proxy for "how much changed each frame" + patches = [f["phases"]["patches"] for f in frames if f.get("phases")] + if patches: + out.append( + f" patches p50={pct(patches,0.5):.0f} p99={pct(patches,0.99):.0f} " + f"max={max(patches)} total={sum(patches)}" + ) + + # Flickers + flicker_frames = [f for f in frames if f.get("flickers")] + if flicker_frames: + out.append("") + out.append(f" ⚠ flickers detected in {len(flicker_frames)} frames") + reasons: dict[str, int] = {} + for f in flicker_frames: + for fl in f["flickers"]: + reasons[fl["reason"]] = reasons.get(fl["reason"], 0) + 1 + for reason, n in sorted(reasons.items(), key=lambda kv: -kv[1]): + out.append(f" {reason}: {n}") + + return "\n".join(out) + + +def main() -> int: + p = argparse.ArgumentParser() + p.add_argument("--session", help="session id to resume (default: longest in db)") + p.add_argument("--hold", default="page_up", choices=sorted(KEYS.keys()), help="key to hold") + p.add_argument("--seconds", type=float, default=8.0, help="how long to hold the key") + p.add_argument("--rate", type=int, default=30, help="keystrokes per second") + p.add_argument("--warmup", type=float, default=3.0, help="seconds to wait after launch before input") + p.add_argument("--threshold-ms", type=float, default=0.0, help="HERMES_DEV_PERF_MS (0 = capture all)") + p.add_argument("--cols", type=int, default=120) + p.add_argument("--rows", type=int, default=40) + p.add_argument("--keep-log", action="store_true", help="don't wipe perf.log before run") + p.add_argument("--tui-dir", default=str(DEFAULT_TUI_DIR)) + p.add_argument("--log", default=str(DEFAULT_LOG)) + args = p.parse_args() + + tui_dir = Path(args.tui_dir).resolve() + entry = tui_dir / "dist" / "entry.js" + if not entry.exists(): + sys.exit(f"{entry} missing — run `npm run build` in {tui_dir} first") + + sid = args.session or pick_longest_session(DEFAULT_STATE_DB) + print(f"• session: {sid}") + print(f"• hold: {args.hold} x {args.rate}Hz for {args.seconds}s after {args.warmup}s warmup") + print(f"• terminal: {args.cols}x{args.rows}") + + log = Path(args.log) + if not args.keep_log and log.exists(): + log.unlink() + + since_ms = int(time.time() * 1000) + + env = os.environ.copy() + env["HERMES_DEV_PERF"] = "1" + env["HERMES_DEV_PERF_MS"] = str(args.threshold_ms) + env["HERMES_DEV_PERF_LOG"] = str(log) + env["HERMES_TUI_RESUME"] = sid + env["COLUMNS"] = str(args.cols) + env["LINES"] = str(args.rows) + env["TERM"] = env.get("TERM", "xterm-256color") + # Ensure bracketed-paste doesn't intercept our PageUp writes. + + node = os.environ.get("HERMES_PERF_NODE", "node") + + # Fork under a PTY so the TUI enters alt-screen / raw-mode cleanly. + pid, fd = pty.fork() + if pid == 0: + # Child: exec node. PTY makes stdin/stdout/stderr all TTY. + os.execvpe(node, [node, str(entry)], env) + + try: + # Set initial PTY size via ioctl (TIOCSWINSZ). + import fcntl, struct, termios + winsize = struct.pack("HHHH", args.rows, args.cols, 0, 0) + fcntl.ioctl(fd, termios.TIOCSWINSZ, winsize) + + print(f"• pid: {pid} fd: {fd}") + print(f"• warmup {args.warmup}s (drain startup output)…") + drain(fd, args.warmup) + + print(f"• holding {args.hold}…") + sent = hold_key(fd, KEYS[args.hold], args.seconds, args.rate) + print(f" sent {sent} keystrokes") + + # Small cooldown so trailing frames get written to the log. + drain(fd, 0.5) + finally: + # Kill TUI cleanly. SIGTERM first, SIGKILL if stubborn. + try: + os.kill(pid, signal.SIGTERM) + for _ in range(10): + pid_done, _ = os.waitpid(pid, os.WNOHANG) + if pid_done == pid: + break + time.sleep(0.1) + else: + os.kill(pid, signal.SIGKILL) + os.waitpid(pid, 0) + except (ProcessLookupError, ChildProcessError): + pass + try: + os.close(fd) + except OSError: + pass + + # Give the log a moment to flush. + time.sleep(0.2) + + data = summarize(log, since_ms) + print() + print(format_report(data)) + + if not data["react"] and not data["frame"]: + return 2 + return 0 + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/ui-tui/src/entry.tsx b/ui-tui/src/entry.tsx index 8fdf9f68..92ae4a71 100644 --- a/ui-tui/src/entry.tsx +++ b/ui-tui/src/entry.tsx @@ -41,6 +41,10 @@ if (process.env.HERMES_HEAPDUMP_ON_START === '1') { process.on('beforeExit', () => stopMemoryMonitor()) -const [{ render }, { App }] = await Promise.all([import('@hermes/ink'), import('./app.js')]) +const [{ render }, { App }, { logFrameEvent }] = await Promise.all([ + import('@hermes/ink'), + import('./app.js'), + import('./lib/perfPane.js') +]) -render(, { exitOnCtrlC: false }) +render(, { exitOnCtrlC: false, onFrame: logFrameEvent }) diff --git a/ui-tui/src/lib/perfPane.tsx b/ui-tui/src/lib/perfPane.tsx index 32b260b7..feae1f0b 100644 --- a/ui-tui/src/lib/perfPane.tsx +++ b/ui-tui/src/lib/perfPane.tsx @@ -1,27 +1,53 @@ -// Perf instrumentation: wraps React.Profiler around named panes and writes -// commit timings to a log file when HERMES_DEV_PERF is set. Enabled per-run -// via the env var; zero-cost (Profiler is replaced by a Fragment) when off. +// Perf instrumentation for the full render pipeline. // -// Log format: one JSON object per line, for easy `jq` filtering. We only -// log commits that exceed a threshold (default 2ms) so the file doesn't -// fill up with sub-millisecond idle renders. Tune via HERMES_DEV_PERF_MS. +// Two sources of timing: +// 1. React.Profiler wrapper (PerfPane) → per-pane commit times. Shows +// which subtree is reconciling and for how long. +// 2. Ink onFrame callback (logFrameEvent) → per-frame pipeline phases: +// yoga (calculateLayout), renderer (DOM → screen buffer), diff +// (prev vs current screen → patches), optimize (patch merge/dedupe), +// write (serialize → ANSI → stdout), plus yoga counters (visited, +// measured, cacheHits, live). Shows where the time goes BELOW React. // -// Usage in consumers: -// import { PerfPane } from './perfPane.js' -// ... +// Both sources gate on HERMES_DEV_PERF=1 and dump JSON-lines to the same +// log (default ~/.hermes/perf.log, override via HERMES_DEV_PERF_LOG). +// Events are tagged { src: 'react' | 'frame' } so jq can split them. // -// Inspect with: -// tail -f ~/.hermes/perf.log | jq -c 'select(.actualMs > 8)' -// jq -s 'group_by(.id) | map({id: .[0].id, count: length, p50: (sort_by(.actualMs) | .[length/2|floor].actualMs), p99: (sort_by(.actualMs) | .[length*0.99|floor].actualMs)})' ~/.hermes/perf.log +// Threshold HERMES_DEV_PERF_MS (default 2ms) skips sub-millisecond idle +// frames. For the 2fps-during-PageUp investigation, set +// HERMES_DEV_PERF_MS=0 to capture everything, then filter with jq. +// +// Zero cost when the env var is unset: PerfPane returns children +// directly (no Profiler fiber), logFrameEvent is a noop on the onFrame +// callback — the ink instance isn't given the callback at all. +// +// Usage: +// # entry.tsx wires logFrameEvent into render() +// import { logFrameEvent, PerfPane } from './lib/perfPane.js' +// render(, { onFrame: logFrameEvent }) +// +// Analysis helpers (once you've captured a session): +// tail -f ~/.hermes/perf.log | jq -c 'select(.src=="frame" and .durationMs > 16)' +// # p50/p99 per phase across frame events: +// jq -s '[.[] | select(.src=="frame")] | +// {n: length, +// dur_p50: (sort_by(.durationMs) | .[length/2|floor].durationMs), +// dur_p99: (sort_by(.durationMs) | .[length*0.99|floor].durationMs), +// yoga_p99: (sort_by(.phases.yoga) | .[length*0.99|floor].phases.yoga), +// write_p99: (sort_by(.phases.write) | .[length*0.99|floor].phases.write), +// diff_p99: (sort_by(.phases.diff) | .[length*0.99|floor].phases.diff), +// patches_p99: (sort_by(.phases.patches) | .[length*0.99|floor].phases.patches)}' \ +// ~/.hermes/perf.log import { appendFileSync, mkdirSync } from 'node:fs' import { homedir } from 'node:os' import { dirname, join } from 'node:path' +import type { FrameEvent } from '@hermes/ink' import { Profiler, type ProfilerOnRenderCallback, type ReactNode } from 'react' const ENABLED = /^(?:1|true|yes|on)$/i.test((process.env.HERMES_DEV_PERF ?? '').trim()) -const THRESHOLD_MS = Number(process.env.HERMES_DEV_PERF_MS ?? '2') || 2 +const THRESHOLD_MS = Number(process.env.HERMES_DEV_PERF_MS ?? '2') || 0 const LOG_PATH = process.env.HERMES_DEV_PERF_LOG?.trim() || join(homedir(), '.hermes', 'perf.log') let initialized = false @@ -42,23 +68,9 @@ const ensureLogDir = () => { } } -const onRender: ProfilerOnRenderCallback = (id, phase, actualMs, baseMs, startTime, commitTime) => { - if (actualMs < THRESHOLD_MS) { - return - } - +const writeRow = (row: Record) => { ensureLogDir() - const row = { - actualMs: Math.round(actualMs * 100) / 100, - baseMs: Math.round(baseMs * 100) / 100, - commitMs: Math.round(commitTime * 100) / 100, - id, - phase, - startMs: Math.round(startTime * 100) / 100, - ts: Date.now() - } - try { appendFileSync(LOG_PATH, `${JSON.stringify(row)}\n`) } catch { @@ -66,6 +78,25 @@ const onRender: ProfilerOnRenderCallback = (id, phase, actualMs, baseMs, startTi } } +const round2 = (n: number) => Math.round(n * 100) / 100 + +const onRender: ProfilerOnRenderCallback = (id, phase, actualMs, baseMs, startTime, commitTime) => { + if (actualMs < THRESHOLD_MS) { + return + } + + writeRow({ + actualMs: round2(actualMs), + baseMs: round2(baseMs), + commitMs: round2(commitTime), + id, + phase, + src: 'react', + startMs: round2(startTime), + ts: Date.now() + }) +} + export function PerfPane({ children, id }: { children: ReactNode; id: string }) { if (!ENABLED) { return children @@ -78,5 +109,42 @@ export function PerfPane({ children, id }: { children: ReactNode; id: string }) ) } +/** + * Ink onFrame handler. Captures the FULL render pipeline: yoga calculateLayout, + * DOM → screen buffer, screen diff, patch optimize, and stdout write. + * + * Returns `undefined` when disabled so `render()` doesn't attach the callback — + * ink only pays the timing cost when the callback is truthy. + */ +export const logFrameEvent = ENABLED + ? (event: FrameEvent) => { + if (event.durationMs < THRESHOLD_MS) { + return + } + + writeRow({ + durationMs: round2(event.durationMs), + flickers: event.flickers.length ? event.flickers : undefined, + phases: event.phases + ? { + commit: round2(event.phases.commit), + diff: round2(event.phases.diff), + optimize: round2(event.phases.optimize), + patches: event.phases.patches, + renderer: round2(event.phases.renderer), + write: round2(event.phases.write), + yoga: round2(event.phases.yoga), + yogaCacheHits: event.phases.yogaCacheHits, + yogaLive: event.phases.yogaLive, + yogaMeasured: event.phases.yogaMeasured, + yogaVisited: event.phases.yogaVisited + } + : undefined, + src: 'frame', + ts: Date.now() + }) + } + : undefined + export const PERF_ENABLED = ENABLED export const PERF_LOG_PATH = LOG_PATH diff --git a/ui-tui/src/types/hermes-ink.d.ts b/ui-tui/src/types/hermes-ink.d.ts index c878bdb4..762166af 100644 --- a/ui-tui/src/types/hermes-ink.d.ts +++ b/ui-tui/src/types/hermes-ink.d.ts @@ -33,11 +33,35 @@ declare module '@hermes/ink' { export type InputHandler = (input: string, key: Key, event: InputEvent) => void + export type FrameEvent = { + readonly durationMs: number + readonly phases?: { + readonly renderer: number + readonly diff: number + readonly optimize: number + readonly write: number + readonly patches: number + readonly yoga: number + readonly commit: number + readonly yogaVisited: number + readonly yogaMeasured: number + readonly yogaCacheHits: number + readonly yogaLive: number + } + readonly flickers: ReadonlyArray<{ + readonly desiredHeight: number + readonly availableHeight: number + readonly reason: 'resize' | 'offscreen' | 'clear' + }> + } + export type RenderOptions = { readonly stdin?: NodeJS.ReadStream readonly stdout?: NodeJS.WriteStream readonly stderr?: NodeJS.WriteStream readonly exitOnCtrlC?: boolean + readonly patchConsole?: boolean + readonly onFrame?: (event: FrameEvent) => void } export type Instance = { From cd7a200e6c05d3295027cd231165e2a8b892956b Mon Sep 17 00:00:00 2001 From: Brooklyn Nicholson Date: Sun, 26 Apr 2026 16:45:53 -0500 Subject: [PATCH 58/87] perf(tui): instrument scroll fast-path decline reasons MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Adds scrollFastPathStats counters to render-node-to-output.ts: captures every time a ScrollBox's DECSTBM scroll hint is generated, records whether the fast path took it (blit+shift from prevScreen) or declined, and why. Exposed through hermes-ink's public exports and snapshotted on every FrameEvent so the profiler harness can correlate decline reasons with the actual patch/renderer cost per frame. This is pure observation — no behaviour change. Preparing for the virtual-history rewrite: the hypothesis was that our topSpacer/ bottomSpacer scheme disqualifies every scroll via heightDelta mismatch, but the data shows the fast path is actually taken on most scrolls (19/23 over a 6s PageUp hold through 1100 messages) — the remaining steady-state renderer cost is Yoga tree traversal, not the per-frame full redraw I initially suspected. Declines that do happen correlate with React commits that changed the mounted range mid-scroll (heightDelta=±3 to ±35). Those are the rarer cases the virtualization rewrite still needs to address. No test diffs — instrumentation-only. Build verified: `tsc --noEmit` plus the full `npm run build` compiler post-pass pass cleanly. --- .../packages/hermes-ink/src/entry-exports.ts | 5 ++ .../src/ink/render-node-to-output.ts | 69 +++++++++++++++++++ ui-tui/src/lib/perfPane.tsx | 22 ++++++ ui-tui/src/types/hermes-ink.d.ts | 18 +++++ 4 files changed, 114 insertions(+) diff --git a/ui-tui/packages/hermes-ink/src/entry-exports.ts b/ui-tui/packages/hermes-ink/src/entry-exports.ts index 6ef1fc5f..3d5be7b5 100644 --- a/ui-tui/packages/hermes-ink/src/entry-exports.ts +++ b/ui-tui/packages/hermes-ink/src/entry-exports.ts @@ -21,6 +21,11 @@ export { useTerminalFocus } from './ink/hooks/use-terminal-focus.js' export { useTerminalTitle } from './ink/hooks/use-terminal-title.js' export { useTerminalViewport } from './ink/hooks/use-terminal-viewport.js' export { default as measureElement } from './ink/measure-element.js' +export { + resetScrollFastPathStats, + scrollFastPathStats, + type ScrollFastPathStats +} from './ink/render-node-to-output.js' export { createRoot, default as render, renderSync } from './ink/root.js' export { stringWidth } from './ink/stringWidth.js' export { default as TextInput, UncontrolledTextInput } from 'ink-text-input' diff --git a/ui-tui/packages/hermes-ink/src/ink/render-node-to-output.ts b/ui-tui/packages/hermes-ink/src/ink/render-node-to-output.ts index 12d689c1..cb781f3e 100644 --- a/ui-tui/packages/hermes-ink/src/ink/render-node-to-output.ts +++ b/ui-tui/packages/hermes-ink/src/ink/render-node-to-output.ts @@ -67,6 +67,54 @@ export function resetScrollHint(): void { absoluteRectsCur = [] } +// Fast-path diagnostics. Bumped from the ScrollBox fast-path branch +// whenever a scroll hint was captured. Reveals why a fast path was +// declined (heightDelta mismatch, no prevScreen, etc.) so we can chase +// the last mile of PageUp/wheel latency. Zero cost when no reader — +// it's all integer bumps. Exposed as a counter object so external +// probes can snapshot + diff. +export type ScrollFastPathStats = { + captured: number + taken: number + declined: { + noPrevScreen: number + heightDeltaMismatch: number + noHint: number + other: number + } + lastDeclineReason?: string + lastHeightDelta?: number + lastHintDelta?: number + lastScrollHeight?: number + lastPrevHeight?: number +} + +export const scrollFastPathStats: ScrollFastPathStats = { + captured: 0, + taken: 0, + declined: { + noPrevScreen: 0, + heightDeltaMismatch: 0, + noHint: 0, + other: 0 + } +} + +export function resetScrollFastPathStats(): void { + scrollFastPathStats.captured = 0 + scrollFastPathStats.taken = 0 + scrollFastPathStats.declined.noPrevScreen = 0 + scrollFastPathStats.declined.heightDeltaMismatch = 0 + scrollFastPathStats.declined.noHint = 0 + scrollFastPathStats.declined.other = 0 + scrollFastPathStats.lastDeclineReason = undefined + scrollFastPathStats.lastHeightDelta = undefined + scrollFastPathStats.lastHintDelta = undefined + scrollFastPathStats.lastScrollHeight = undefined + scrollFastPathStats.lastPrevHeight = undefined +} + + export function getScrollHint(): ScrollHint | null { return scrollHint } @@ -927,6 +975,27 @@ function renderNodeToOutput( const safeForFastPath = !hint || heightDelta === 0 || (hint.delta > 0 && heightDelta === hint.delta) + // Diagnostics (opt-in via scrollFastPathStats reader). Only + // counts when a hint was captured — cases where nothing scrolled + // (hint === null) are not declines, just idle frames. + if (hint) { + scrollFastPathStats.captured++ + scrollFastPathStats.lastHintDelta = hint.delta + scrollFastPathStats.lastScrollHeight = scrollHeight + scrollFastPathStats.lastPrevHeight = prevHeight + scrollFastPathStats.lastHeightDelta = heightDelta + + if (!safeForFastPath) { + scrollFastPathStats.declined.heightDeltaMismatch++ + scrollFastPathStats.lastDeclineReason = `heightDelta=${heightDelta} hintDelta=${hint.delta}` + } else if (!prevScreen) { + scrollFastPathStats.declined.noPrevScreen++ + scrollFastPathStats.lastDeclineReason = 'noPrevScreen' + } else { + scrollFastPathStats.taken++ + } + } + // scrollHint is set above when hint is captured. If safeForFastPath // is false the full path renders a next.screen that doesn't match // the DECSTBM shift — emitting DECSTBM leaves stale rows (seen as diff --git a/ui-tui/src/lib/perfPane.tsx b/ui-tui/src/lib/perfPane.tsx index feae1f0b..331fb62d 100644 --- a/ui-tui/src/lib/perfPane.tsx +++ b/ui-tui/src/lib/perfPane.tsx @@ -44,6 +44,7 @@ import { homedir } from 'node:os' import { dirname, join } from 'node:path' import type { FrameEvent } from '@hermes/ink' +import { scrollFastPathStats } from '@hermes/ink' import { Profiler, type ProfilerOnRenderCallback, type ReactNode } from 'react' const ENABLED = /^(?:1|true|yes|on)$/i.test((process.env.HERMES_DEV_PERF ?? '').trim()) @@ -122,8 +123,29 @@ export const logFrameEvent = ENABLED return } + // Snapshot the fast-path counters each frame. Cumulative values — + // consumers diff pairs to get per-frame deltas. Written verbatim + // so we can also see "last*" fields (which decline reason fired, + // and what the height math looked like). + const fastPath = { + captured: scrollFastPathStats.captured, + taken: scrollFastPathStats.taken, + declined: { + heightDeltaMismatch: scrollFastPathStats.declined.heightDeltaMismatch, + noHint: scrollFastPathStats.declined.noHint, + noPrevScreen: scrollFastPathStats.declined.noPrevScreen, + other: scrollFastPathStats.declined.other + }, + lastDeclineReason: scrollFastPathStats.lastDeclineReason, + lastHeightDelta: scrollFastPathStats.lastHeightDelta, + lastHintDelta: scrollFastPathStats.lastHintDelta, + lastPrevHeight: scrollFastPathStats.lastPrevHeight, + lastScrollHeight: scrollFastPathStats.lastScrollHeight + } + writeRow({ durationMs: round2(event.durationMs), + fastPath, flickers: event.flickers.length ? event.flickers : undefined, phases: event.phases ? { diff --git a/ui-tui/src/types/hermes-ink.d.ts b/ui-tui/src/types/hermes-ink.d.ts index 762166af..0ad9a957 100644 --- a/ui-tui/src/types/hermes-ink.d.ts +++ b/ui-tui/src/types/hermes-ink.d.ts @@ -101,6 +101,24 @@ declare module '@hermes/ink' { export const TextInput: React.ComponentType export const stringWidth: (s: string) => number + export type ScrollFastPathStats = { + captured: number + taken: number + declined: { + noPrevScreen: number + heightDeltaMismatch: number + noHint: number + other: number + } + lastDeclineReason?: string + lastHeightDelta?: number + lastHintDelta?: number + lastScrollHeight?: number + lastPrevHeight?: number + } + export const scrollFastPathStats: ScrollFastPathStats + export function resetScrollFastPathStats(): void + export function render(node: React.ReactNode, options?: NodeJS.WriteStream | RenderOptions): Instance export function useApp(): { readonly exit: (error?: Error) => void } From 7242361a6937c7caa34ffbeb55a12276800568da Mon Sep 17 00:00:00 2001 From: Brooklyn Nicholson Date: Sun, 26 Apr 2026 16:55:56 -0500 Subject: [PATCH 59/87] fix(tui): wrap streaming markdown split in column Box MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit StreamingMd returned <> — a bare Fragment with two children. Each returns a , but its parent in messageLine.tsx (line 169) is `` with no flexDirection, which Ink defaults to 'row'. So during streaming the two column boxes rendered side-by-side, producing the visible "tokens jumble into two columns until it fixes itself" bug — the "fix" was message.complete flipping isStreaming→false, which swaps the StreamingMd subtree for a single DeferredMd/Md child (no siblings → row direction is harmless). Wrap the two siblings in a flexDirection="column" Box so they stack. Localized fix so the non-streaming path (single-child, works fine in a row parent) is untouched. Reported by user: > "tokens streaming... going into 2 columns randomly and jumbling > together until it fixes itself" No test changes — findStableBoundary tests still pass (the layout change is parent-structural, not in the boundary logic). Build clean, tsc clean, 352 tests pass. --- ui-tui/src/components/streamingMarkdown.tsx | 17 +++++++++++------ 1 file changed, 11 insertions(+), 6 deletions(-) diff --git a/ui-tui/src/components/streamingMarkdown.tsx b/ui-tui/src/components/streamingMarkdown.tsx index e6dcbbfb..111ed61e 100644 --- a/ui-tui/src/components/streamingMarkdown.tsx +++ b/ui-tui/src/components/streamingMarkdown.tsx @@ -19,11 +19,16 @@ // flips off → message moves to history and renders via directly), so // the ref resets naturally. // -// See src/app/useMainApp.ts for the reasoning on why we don't memoize the -// whole Md text during streaming: that cache never hits because `text` is -// growing. Mirror claude-code's `StreamingMarkdown` approach adapted to -// our line-based tokenizer. +// Layout: the two subtrees MUST render stacked (column). The parent +// container in messageLine.tsx is a default `flexDirection: 'row'` Box +// (Ink's default), so returning a bare Fragment of two siblings +// laid them out side-by-side — producing the "two jumbled columns while +// streaming" rendering bug. Wrapping in a flexDirection="column" Box +// here localizes the fix to the streaming path; the non-streaming +// already returns its own column Box, so its single-child case was never +// affected. +import { Box } from '@hermes/ink' import { memo, useRef } from 'react' import type { Theme } from '../theme.js' @@ -113,10 +118,10 @@ export const StreamingMd = memo(function StreamingMd({ compact, t, text }: Strea } return ( - <> + - + ) }) From 4a9070c9ac24ea8a205825bd8533b5c7b022904e Mon Sep 17 00:00:00 2001 From: Brooklyn Nicholson Date: Sun, 26 Apr 2026 16:56:09 -0500 Subject: [PATCH 60/87] perf(tui): defer Md upgrade for fresh-mounted assistant rows MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Adds DeferredMd — a wrapper around that renders a lightweight placeholder on first mount and upgrades to the full markdown subtree on a queueMicrotask follow-up. Rationale: fresh MessageLine mounts during PageUp hold run our markdown tokenizer + syntax highlighter synchronously, producing the 63-112ms renderer spikes profiled earlier. A plain placeholder only needs Yoga to wrap the pre-stripped string (no tokenizer, no highlight), then the Md subtree builds in a follow-up React commit. Upgrade cache: once a (theme, compact, text) tuple has been upgraded, a WeakMap-keyed Set remembers it so remounts (scroll-out then scroll-back) mount straight into — no placeholder round-trip. WeakMap on theme means palette swaps re-upgrade naturally. Honesty note: profiling under hold-PageUp showed this didn't reduce renderer p99 measurably — the upgrade commit just pays the Md cost on a follow-up frame instead of inline. The bigger bottleneck turned out to be React commit frequency (3.5 commits/sec during 30Hz scroll input, with 200ms+ silent gaps between commits dominating perceived FPS), which this change doesn't address. Keeping the deferred path anyway because: 1. It's correct and tested — no regressions across 352 tests 2. Defensive for pathological fresh-mount cases (giant code blocks, wide tables) that aren't in the current profile fixture 3. Pairs naturally with useVirtualHistory's useDeferredValue to keep React's concurrent scheduler able to interrupt upgrade commits If the follow-up perf investigation (terminal write throughput / patch volume / commit frequency) shows DeferredMd is net-neutral-or-worse in practice, this can be reverted with a one-line swap back to in messageLine.tsx:115. Companion to the streaming 2-column fix in 7242361a — these two touched messageLine.tsx together so they land as a pair. --- ui-tui/src/components/deferredMarkdown.tsx | 90 ++++++++++++++++++++++ ui-tui/src/components/messageLine.tsx | 9 ++- 2 files changed, 97 insertions(+), 2 deletions(-) create mode 100644 ui-tui/src/components/deferredMarkdown.tsx diff --git a/ui-tui/src/components/deferredMarkdown.tsx b/ui-tui/src/components/deferredMarkdown.tsx new file mode 100644 index 00000000..55d984a3 --- /dev/null +++ b/ui-tui/src/components/deferredMarkdown.tsx @@ -0,0 +1,90 @@ +// DeferredMd — renders a lightweight placeholder on first mount and +// upgrades to full markdown + syntax highlighting in a subsequent +// transition commit. Spreads the parse cost off the scroll critical path. +// +// Why: profiling shows the 63-112ms renderer spikes during hold-PageUp +// correlate with fresh MessageLine mounts running the markdown tokenizer +// + syntax highlighting synchronously. The new row is added by +// useVirtualHistory's slide step; React commits the tree; Ink lays out +// Yoga; stdout writes the result. All in one hitch frame. +// +// With this wrapper, the hitch frame lays out a pre-wrapped plain +// (Yoga only needs to wrap width-known strings — no tokenizer, no +// highlighter, no inline regex walk), then a follow-up commit re-renders +// the same row with full markdown. The follow-up is gated on a +// queueMicrotask so Ink has a chance to paint the placeholder before +// React starts the Md-heavy upgrade work. +// +// Upgrade cache: once a given (theme, text, compact) tuple has been +// rendered as full Md, we remember it so remounts (scroll-out then +// scroll-back) don't pay the placeholder round-trip again — they mount +// straight into the upgraded subtree, which Md internally memoizes +// on text identity, so there's no re-tokenization either. + +import { Text } from '@hermes/ink' +import { memo, useEffect, useState } from 'react' + +import type { Theme } from '../theme.js' + +import { Md, stripInlineMarkup } from './markdown.js' + +// Theme object is stable per-session; key upgrades under it so palette +// swaps naturally retrigger (colors differ → render changes). +const upgraded = new WeakMap>() + +const cacheKey = (compact: boolean | undefined, text: string) => (compact ? `c:${text}` : `x:${text}`) + +const hasUpgraded = (t: Theme, key: string) => upgraded.get(t)?.has(key) ?? false + +const markUpgraded = (t: Theme, key: string) => { + const bucket = upgraded.get(t) ?? new Set() + + bucket.add(key) + upgraded.set(t, bucket) +} + +export const DeferredMd = memo(function DeferredMd({ color, compact, t, text }: DeferredMdProps) { + const key = cacheKey(compact, text) + const [ready, setReady] = useState(() => hasUpgraded(t, key) || !text) + + useEffect(() => { + if (ready) { + return + } + + let cancelled = false + + queueMicrotask(() => { + if (cancelled) { + return + } + + markUpgraded(t, key) + setReady(true) + }) + + return () => { + cancelled = true + } + }, [key, ready, t]) + + if (ready) { + return + } + + // Placeholder: strip inline markup so the visible width approximately + // matches the final Md layout (bold/italic/links are width-neutral or + // collapse to anchor text). Line breaks preserved — Ink's wrap="wrap" + // lays the plain text out as blocks at the right column count. + // Using directly (no Box wrapper) so there's no column-flex + // decision for Yoga — it just wraps a string. + return {stripInlineMarkup(text)} +}) + +interface DeferredMdProps { + /** Fallback color for the placeholder text (typically the role's body color). */ + color?: string + compact?: boolean + t: Theme + text: string +} diff --git a/ui-tui/src/components/messageLine.tsx b/ui-tui/src/components/messageLine.tsx index a3d3f584..fe7c8076 100644 --- a/ui-tui/src/components/messageLine.tsx +++ b/ui-tui/src/components/messageLine.tsx @@ -9,7 +9,7 @@ import { boundedLiveRenderText, compactPreview, hasAnsi, isPasteBackedText, stri import type { Theme } from '../theme.js' import type { ActiveTool, DetailsMode, Msg, SectionVisibility } from '../types.js' -import { Md } from './markdown.js' +import { DeferredMd } from './deferredMarkdown.js' import { StreamingMd } from './streamingMarkdown.js' import { ToolTrail } from './thinking.js' import { TodoPanel } from './todoPanel.js' @@ -107,7 +107,12 @@ export const MessageLine = memo(function MessageLine({ // streamingMarkdown.tsx for the cost model. ) : ( - + // Deferred markdown: plain-text placeholder on first mount, upgrade + // to full Md on a queued microtask. Spreads the tokenizer + syntax + // cost off the scroll critical path so hold-PageUp doesn't hitch + // on fresh assistant rows entering overscan. See + // deferredMarkdown.tsx for the trade-offs. + ) } From 7ca16eea56a5f9f79a91ef1eeff3ce763693c745 Mon Sep 17 00:00:00 2001 From: Brooklyn Nicholson Date: Sun, 26 Apr 2026 17:01:22 -0500 Subject: [PATCH 61/87] perf(tui): scroll one row at a time per wheel event, half-viewport per pageUp MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit User observation: "it doesn't scroll line by line/row by row." Was right. Two places hardcoded big deltas: 1. WHEEL_SCROLL_STEP = 6 (config/limits.ts) Each wheel event scrolled 6 rows. A mechanical wheel notch emits 3-5 events → 18-30 rows per click, which visually teleports past content instead of smooth-scrolling it. Drop to 1. Trackpads emit 50-100 events per flick — at step=1 that's still a fast flick (a whole viewport in one flick) but each intermediate frame is visible. Porting claude-code's wheel accel state machine is the right next step if this feels sluggish on precision scrolls. 2. pageUp/pageDown = viewport - 2 (useInputHandlers.ts) Full-viewport jumps replace the entire screen — no visual continuity, can't scan content — AND land right at Ink's fast-path threshold (`delta < innerHeight`), which disqualifies the DECSTBM blit on every press. Half-viewport keeps 50% continuity AND drops well under the threshold. Two presses still cover the same total distance. Profiled against the 1106-msg session, holding the key at 30Hz for 6s: wheel_up (step 6 → 1): frames 142 → 163 (+15%) throughput 10.7 → 15.8 fps (+48%) patches tot 53018→ 36562 (-31%) gap p50 5ms → 16ms (actual rendering ~60fps now) <16ms frames 93 → 76 16-33ms 82 → 76 hitches 3 → 1 pageUp (viewport-2 → viewport/2): throughput 10.7 → 9.5 fps (same ballpark — smaller delta × same event rate = less total scroll) Ink's proportional drain caps at `innerHeight - 1` per frame to keep the DECSTBM fast path firing. With these smaller deltas every event comfortably fits under that cap, so fast-path hit rate goes up and patch volume per frame drops — the measured 31% reduction in total patches-sent correlates with users perceiving smoother scrolling because the outer terminal (VS Code / xterm.js / tmux) isn't drowning in ANSI between paints. Tests/type-check/build clean; 352 tests pass. --- ui-tui/src/app/useInputHandlers.ts | 9 ++++++++- ui-tui/src/config/limits.ts | 18 +++++++++++++++++- 2 files changed, 25 insertions(+), 2 deletions(-) diff --git a/ui-tui/src/app/useInputHandlers.ts b/ui-tui/src/app/useInputHandlers.ts index fff73d9c..b18dcbbd 100644 --- a/ui-tui/src/app/useInputHandlers.ts +++ b/ui-tui/src/app/useInputHandlers.ts @@ -296,7 +296,14 @@ export function useInputHandlers(ctx: InputHandlerContext): InputHandlerResult { if (key.pageUp || key.pageDown) { const viewport = terminal.scrollRef.current?.getViewportHeight() ?? Math.max(6, (terminal.stdout?.rows ?? 24) - 8) - const step = Math.max(4, viewport - 2) + // Half-viewport per keystroke. A whole-viewport jump (our old + // `viewport - 2`) fully replaces what's on screen — no visual + // continuity, the user can't scan — AND it lands right at Ink's + // `delta < innerHeight` fast-path threshold, disqualifying the + // DECSTBM blit on every press. Half-viewport keeps 50% continuity, + // well under the threshold, and two presses still scroll the same + // total distance. + const step = Math.max(4, Math.floor(viewport / 2)) return scrollTranscript(key.pageUp ? -step : step) } diff --git a/ui-tui/src/config/limits.ts b/ui-tui/src/config/limits.ts index a2e817d8..889ac4d6 100644 --- a/ui-tui/src/config/limits.ts +++ b/ui-tui/src/config/limits.ts @@ -4,4 +4,20 @@ export const LIVE_RENDER_MAX_LINES = 240 export const LONG_MSG = 300 export const MAX_HISTORY = 800 export const THINKING_COT_MAX = 160 -export const WHEEL_SCROLL_STEP = 6 +// Rows scrolled per wheel-notch event. +// +// One notch of a mechanical wheel emits multiple wheel events (3-5 per +// click in most terminals; trackpad flicks emit 100+). Each event scrolls +// WHEEL_SCROLL_STEP rows. The product = rows-per-click. +// +// 1 = pure line-by-line. Small per-event delta keeps Ink's DECSTBM fast +// path firing (each scroll < viewport-1) and produces smooth visible +// motion — the user can scan content mid-scroll. We were at 6 before +// (= ~20-30 rows per notch) which visually teleported and forced the +// virtualization to reshape the mount range on every event. +// +// If this feels sluggish on precision scrolls, porting claude-code's +// wheel accel state machine (ScrollKeybindingHandler.tsx) is the right +// next step — it ramps step up during sustained fast clicks and decays +// on pause. +export const WHEEL_SCROLL_STEP = 1 From d3dedf10aaefb14fc2f3f03c109bf4f87c43a1cf Mon Sep 17 00:00:00 2001 From: Brooklyn Nicholson Date: Sun, 26 Apr 2026 17:03:38 -0500 Subject: [PATCH 62/87] revert(tui): drop DeferredMd, profiling showed it was neutral MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Profiled with scripts/profile-tui.py under hold-PageUp + hold-wheel. The placeholder → microtask-upgrade pattern did not reduce renderer p99 (63ms → 63ms) or max (96ms → 142ms, slightly worse). Each fresh row still pays the Md cost — just on a follow-up commit instead of inline — and the follow-up commit shows up as a second heavy frame a few ms later. The real bottlenecks turned out to be: 1. wheel step too large (fixed in 7ca16eea) 2. outer terminal ANSI parse throughput (diagnosing next) 3. React commit frequency during hold-scroll (needs coalescing) None of which DeferredMd addresses. Clearing the complexity so the next experiments land on a simpler substrate. --- ui-tui/src/components/deferredMarkdown.tsx | 90 ---------------------- ui-tui/src/components/messageLine.tsx | 9 +-- ui-tui/src/components/thinking.tsx | 2 +- 3 files changed, 3 insertions(+), 98 deletions(-) delete mode 100644 ui-tui/src/components/deferredMarkdown.tsx diff --git a/ui-tui/src/components/deferredMarkdown.tsx b/ui-tui/src/components/deferredMarkdown.tsx deleted file mode 100644 index 55d984a3..00000000 --- a/ui-tui/src/components/deferredMarkdown.tsx +++ /dev/null @@ -1,90 +0,0 @@ -// DeferredMd — renders a lightweight placeholder on first mount and -// upgrades to full markdown + syntax highlighting in a subsequent -// transition commit. Spreads the parse cost off the scroll critical path. -// -// Why: profiling shows the 63-112ms renderer spikes during hold-PageUp -// correlate with fresh MessageLine mounts running the markdown tokenizer -// + syntax highlighting synchronously. The new row is added by -// useVirtualHistory's slide step; React commits the tree; Ink lays out -// Yoga; stdout writes the result. All in one hitch frame. -// -// With this wrapper, the hitch frame lays out a pre-wrapped plain -// (Yoga only needs to wrap width-known strings — no tokenizer, no -// highlighter, no inline regex walk), then a follow-up commit re-renders -// the same row with full markdown. The follow-up is gated on a -// queueMicrotask so Ink has a chance to paint the placeholder before -// React starts the Md-heavy upgrade work. -// -// Upgrade cache: once a given (theme, text, compact) tuple has been -// rendered as full Md, we remember it so remounts (scroll-out then -// scroll-back) don't pay the placeholder round-trip again — they mount -// straight into the upgraded subtree, which Md internally memoizes -// on text identity, so there's no re-tokenization either. - -import { Text } from '@hermes/ink' -import { memo, useEffect, useState } from 'react' - -import type { Theme } from '../theme.js' - -import { Md, stripInlineMarkup } from './markdown.js' - -// Theme object is stable per-session; key upgrades under it so palette -// swaps naturally retrigger (colors differ → render changes). -const upgraded = new WeakMap>() - -const cacheKey = (compact: boolean | undefined, text: string) => (compact ? `c:${text}` : `x:${text}`) - -const hasUpgraded = (t: Theme, key: string) => upgraded.get(t)?.has(key) ?? false - -const markUpgraded = (t: Theme, key: string) => { - const bucket = upgraded.get(t) ?? new Set() - - bucket.add(key) - upgraded.set(t, bucket) -} - -export const DeferredMd = memo(function DeferredMd({ color, compact, t, text }: DeferredMdProps) { - const key = cacheKey(compact, text) - const [ready, setReady] = useState(() => hasUpgraded(t, key) || !text) - - useEffect(() => { - if (ready) { - return - } - - let cancelled = false - - queueMicrotask(() => { - if (cancelled) { - return - } - - markUpgraded(t, key) - setReady(true) - }) - - return () => { - cancelled = true - } - }, [key, ready, t]) - - if (ready) { - return - } - - // Placeholder: strip inline markup so the visible width approximately - // matches the final Md layout (bold/italic/links are width-neutral or - // collapse to anchor text). Line breaks preserved — Ink's wrap="wrap" - // lays the plain text out as blocks at the right column count. - // Using directly (no Box wrapper) so there's no column-flex - // decision for Yoga — it just wraps a string. - return {stripInlineMarkup(text)} -}) - -interface DeferredMdProps { - /** Fallback color for the placeholder text (typically the role's body color). */ - color?: string - compact?: boolean - t: Theme - text: string -} diff --git a/ui-tui/src/components/messageLine.tsx b/ui-tui/src/components/messageLine.tsx index fe7c8076..a3d3f584 100644 --- a/ui-tui/src/components/messageLine.tsx +++ b/ui-tui/src/components/messageLine.tsx @@ -9,7 +9,7 @@ import { boundedLiveRenderText, compactPreview, hasAnsi, isPasteBackedText, stri import type { Theme } from '../theme.js' import type { ActiveTool, DetailsMode, Msg, SectionVisibility } from '../types.js' -import { DeferredMd } from './deferredMarkdown.js' +import { Md } from './markdown.js' import { StreamingMd } from './streamingMarkdown.js' import { ToolTrail } from './thinking.js' import { TodoPanel } from './todoPanel.js' @@ -107,12 +107,7 @@ export const MessageLine = memo(function MessageLine({ // streamingMarkdown.tsx for the cost model. ) : ( - // Deferred markdown: plain-text placeholder on first mount, upgrade - // to full Md on a queued microtask. Spreads the tokenizer + syntax - // cost off the scroll critical path so hold-PageUp doesn't hitch - // on fresh assistant rows entering overscan. See - // deferredMarkdown.tsx for the trade-offs. - + ) } diff --git a/ui-tui/src/components/thinking.tsx b/ui-tui/src/components/thinking.tsx index 0fd47315..03ecf8c8 100644 --- a/ui-tui/src/components/thinking.tsx +++ b/ui-tui/src/components/thinking.tsx @@ -899,7 +899,7 @@ export const ToolTrail = memo(function ToolTrail({ return duration ? ( <> {label} - + {duration} From f823535db21585b0b604f9f48c41baaadcdaa12a Mon Sep 17 00:00:00 2001 From: Brooklyn Nicholson Date: Sun, 26 Apr 2026 17:06:22 -0500 Subject: [PATCH 63/87] =?UTF-8?q?perf(tui):=20instrument=20stdout=20drain?= =?UTF-8?q?=20=E2=80=94=20rule=20out=20terminal=20parse=20bottleneck?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Adds four fields to FrameEvent.phases and the matching profile summary: optimizedPatches post-optimize patch count (what's actually written to stdout; the .patches field is pre-optimize) writeBytes UTF-8 byte count of the write this frame backpressure true when Node's stdout.write returned false (Writable buffer full — outer terminal can't keep up) prevFrameDrainMs end-to-end drain time of the PREVIOUS frame's write, captured from stdout.write's 2-arg callback. Reported on the next frame so the measurement reflects "time until OS flushed the bytes to the terminal fd", not "time until queued in Node". writeDiffToTerminal() now returns { bytes, backpressure } and accepts an optional onDrain callback. Only attached on TTY with diff; piped/non-TTY stdout bypasses flow control so the callback would fire synchronously anyway. Initial measurements under hold-wheel_up against 1106-msg session (30Hz for 6s): patches total 28,888 optimized total 16,700 (ratio 0.58 — optimizer cuts ~42%) writeBytes 42 KB / 10s = 4.2 KB/s throughput drainMs p50 0.14 ms terminal accepts bytes instantly drainMs p99 0.85 ms backpressure 0% of frames This rules out the terminal-parse hypothesis — Cursor's xterm.js drains our output in sub-millisecond time at only 4 KB/s. The remaining lag has to be in the render pipeline, not the wire. Profile output now includes the bytes+drain+backpressure lines to keep this visible on every subsequent iteration. --- scripts/profile-tui.py | 39 ++++++++++++++ ui-tui/packages/hermes-ink/src/ink/frame.ts | 11 ++++ ui-tui/packages/hermes-ink/src/ink/ink.tsx | 51 ++++++++++++++++++- .../packages/hermes-ink/src/ink/terminal.ts | 21 ++++++-- ui-tui/src/lib/perfPane.tsx | 4 ++ ui-tui/src/types/hermes-ink.d.ts | 4 ++ 6 files changed, 126 insertions(+), 4 deletions(-) diff --git a/scripts/profile-tui.py b/scripts/profile-tui.py index 5ef987c6..e70e9906 100755 --- a/scripts/profile-tui.py +++ b/scripts/profile-tui.py @@ -219,6 +219,45 @@ def format_report(data: dict[str, Any]) -> str: f" patches p50={pct(patches,0.5):.0f} p99={pct(patches,0.99):.0f} " f"max={max(patches)} total={sum(patches)}" ) + optimized = [ + f["phases"].get("optimizedPatches", 0) + for f in frames if f.get("phases") + ] + if any(optimized): + out.append( + f" optimized p50={pct(optimized,0.5):.0f} p99={pct(optimized,0.99):.0f} " + f"max={max(optimized)} total={sum(optimized)}" + f" (ratio: {sum(optimized)/max(1,sum(patches)):.2f})" + ) + + # Write bytes + drain telemetry — the outer-terminal bottleneck gauge. + bytes_written = [ + f["phases"].get("writeBytes", 0) + for f in frames if f.get("phases") + ] + if any(bytes_written): + total_b = sum(bytes_written) + kb = total_b / 1024 + out.append( + f" writeBytes p50={pct(bytes_written,0.5):.0f}B p99={pct(bytes_written,0.99):.0f}B " + f"max={max(bytes_written)}B total={kb:.1f}KB" + ) + drains = [ + f["phases"].get("prevFrameDrainMs", 0) + for f in frames if f.get("phases") + ] + if any(d > 0 for d in drains): + nonzero = [d for d in drains if d > 0] + out.append( + f" drainMs p50={pct(nonzero,0.5):.2f} p95={pct(nonzero,0.95):.2f} " + f"p99={pct(nonzero,0.99):.2f} max={max(nonzero):.2f} (terminal flush latency)" + ) + backpressure = sum(1 for f in frames if f.get("phases", {}).get("backpressure")) + if backpressure: + out.append( + f" backpressure: {backpressure}/{len(frames)} frames " + f"({100*backpressure/len(frames):.0f}%) (Node stdout buffer full — terminal slow)" + ) # Flickers flicker_frames = [f for f in frames if f.get("flickers")] diff --git a/ui-tui/packages/hermes-ink/src/ink/frame.ts b/ui-tui/packages/hermes-ink/src/ink/frame.ts index b85c0ad9..760fcc52 100644 --- a/ui-tui/packages/hermes-ink/src/ink/frame.ts +++ b/ui-tui/packages/hermes-ink/src/ink/frame.ts @@ -46,6 +46,17 @@ export type FrameEvent = { write: number /** Pre-optimize patch count (proxy for how much changed this frame) */ patches: number + /** Post-optimize patch count — what was actually written to stdout. */ + optimizedPatches: number + /** Bytes written to stdout this frame (escape sequences + payload). */ + writeBytes: number + /** Whether stdout.write returned false (backpressure = outer terminal slow). */ + backpressure: boolean + /** ms from this frame's stdout.write until the write-callback fired. + * Populated on the NEXT frame (async), so this field reflects the + * PREVIOUS frame's terminal-drain time. 0 = callback already fired + * before next frame started (drained in sub-ms). */ + prevFrameDrainMs: number /** yoga calculateLayout() time (runs in resetAfterCommit, before onRender) */ yoga: number /** React reconcile time: scrollMutated → resetAfterCommit. 0 if no commit. */ diff --git a/ui-tui/packages/hermes-ink/src/ink/ink.tsx b/ui-tui/packages/hermes-ink/src/ink/ink.tsx index 71e3066a..1bd47d61 100644 --- a/ui-tui/packages/hermes-ink/src/ink/ink.tsx +++ b/ui-tui/packages/hermes-ink/src/ink/ink.tsx @@ -165,6 +165,15 @@ export default class Ink { private backFrame: Frame private lastPoolResetTime = performance.now() private drainTimer: ReturnType | null = null + // Write-drain telemetry: pendingWriteStart is the performance.now() of + // the most recent stdout.write waiting for its drain callback. Set to + // null when the callback fires (drained). Read on the NEXT frame and + // reported as prevFrameDrainMs so the FrameEvent records how long the + // previous write took to actually hit the terminal — distinguishes + // "queued in Node" (write returned true) from "terminal accepted bytes" + // (callback fired). + private pendingWriteStart: number | null = null + private lastDrainMs = 0 private lastYogaCounters: { ms: number visited: number @@ -970,7 +979,43 @@ export default class Ink { } const tWrite = performance.now() - writeDiffToTerminal(this.terminal, optimized, this.altScreenActive && !SYNC_OUTPUT_SUPPORTED) + // Capture any stale pending write BEFORE starting this frame's write — + // if the callback already fired, pendingWriteStart is null and lastDrainMs + // already reflects the previous frame's drain. If it hasn't fired, we + // report "still pending" via a non-zero duration based on now-then so + // backpressure shows up even if Node never flushes this session. + const staleDrain = + this.pendingWriteStart !== null + ? performance.now() - this.pendingWriteStart + : this.lastDrainMs + + const prevFrameDrainMs = Math.round(staleDrain * 100) / 100 + this.lastDrainMs = 0 + + // Only track drain on TTY. Piped/non-TTY stdout bypasses flow control. + const trackDrain = this.options.stdout.isTTY && hasDiff + const drainStart = trackDrain ? tWrite : 0 + + if (trackDrain) { + this.pendingWriteStart = drainStart + } + + const { bytes: writeBytes, backpressure } = writeDiffToTerminal( + this.terminal, + optimized, + this.altScreenActive && !SYNC_OUTPUT_SUPPORTED, + trackDrain + ? () => { + // Callback fires once Node has flushed the chunk to the OS. + // Capture the drain time and clear pending so the NEXT frame's + // staleDrain = the real end-to-end flush time. + if (this.pendingWriteStart === drainStart) { + this.lastDrainMs = performance.now() - drainStart + this.pendingWriteStart = null + } + } + : undefined + ) const writeMs = performance.now() - tWrite // Update blit safety for the NEXT frame. The frame just rendered @@ -1008,6 +1053,10 @@ export default class Ink { optimize: optimizeMs, write: writeMs, patches: diff.length, + optimizedPatches: optimized.length, + writeBytes, + backpressure, + prevFrameDrainMs, yoga: yogaMs, commit: commitMs, yogaVisited: yc.visited, diff --git a/ui-tui/packages/hermes-ink/src/ink/terminal.ts b/ui-tui/packages/hermes-ink/src/ink/terminal.ts index 75637c76..0ffe6e80 100644 --- a/ui-tui/packages/hermes-ink/src/ink/terminal.ts +++ b/ui-tui/packages/hermes-ink/src/ink/terminal.ts @@ -203,10 +203,15 @@ export type Terminal = { stderr: Writable } -export function writeDiffToTerminal(terminal: Terminal, diff: Diff, skipSyncMarkers = false): void { +export function writeDiffToTerminal( + terminal: Terminal, + diff: Diff, + skipSyncMarkers = false, + onDrain?: () => void +): { bytes: number; backpressure: boolean } { // No output if there are no patches if (diff.length === 0) { - return + return { bytes: 0, backpressure: false } } // BSU/ESU wrapping is opt-out to keep main-screen behavior unchanged. @@ -278,5 +283,15 @@ export function writeDiffToTerminal(terminal: Terminal, diff: Diff, skipSyncMark buffer += ESU } - terminal.stdout.write(buffer) + // Node's Writable.write returns false when the internal buffer is full + // (backpressure). On a slow terminal parser that's the tell: we're + // producing bytes faster than the outer terminal can consume them. + // The 2-arg form attaches a drain callback that fires once the chunk + // is actually flushed to the OS socket/pipe — giving us end-to-end + // drain timing, not just "queued in Node". + const wrote = onDrain + ? terminal.stdout.write(buffer, () => onDrain()) + : terminal.stdout.write(buffer) + + return { bytes: Buffer.byteLength(buffer, 'utf8'), backpressure: !wrote } } diff --git a/ui-tui/src/lib/perfPane.tsx b/ui-tui/src/lib/perfPane.tsx index 331fb62d..ab512c10 100644 --- a/ui-tui/src/lib/perfPane.tsx +++ b/ui-tui/src/lib/perfPane.tsx @@ -149,12 +149,16 @@ export const logFrameEvent = ENABLED flickers: event.flickers.length ? event.flickers : undefined, phases: event.phases ? { + backpressure: event.phases.backpressure, commit: round2(event.phases.commit), diff: round2(event.phases.diff), optimize: round2(event.phases.optimize), + optimizedPatches: event.phases.optimizedPatches, patches: event.phases.patches, + prevFrameDrainMs: round2(event.phases.prevFrameDrainMs), renderer: round2(event.phases.renderer), write: round2(event.phases.write), + writeBytes: event.phases.writeBytes, yoga: round2(event.phases.yoga), yogaCacheHits: event.phases.yogaCacheHits, yogaLive: event.phases.yogaLive, diff --git a/ui-tui/src/types/hermes-ink.d.ts b/ui-tui/src/types/hermes-ink.d.ts index 0ad9a957..4ecd10ee 100644 --- a/ui-tui/src/types/hermes-ink.d.ts +++ b/ui-tui/src/types/hermes-ink.d.ts @@ -41,6 +41,10 @@ declare module '@hermes/ink' { readonly optimize: number readonly write: number readonly patches: number + readonly optimizedPatches: number + readonly writeBytes: number + readonly backpressure: boolean + readonly prevFrameDrainMs: number readonly yoga: number readonly commit: number readonly yogaVisited: number From 82f842277e8b6b9a87d3fc9572f9054fb31d8339 Mon Sep 17 00:00:00 2001 From: Brooklyn Nicholson Date: Sun, 26 Apr 2026 17:08:07 -0500 Subject: [PATCH 64/87] perf(tui): profile harness gains --loop, --save, --compare MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Before: change code → build → run profile → manually compare to mental model of last run. After: `--loop` watches ui-tui/src and packages/hermes-ink/src for .ts(x) changes, rebuilds on change, re-runs the same scenario, prints a side-by-side A/B diff against the previous iteration — so each edit's impact is quantified instantly. Ctrl+C to stop. Also added: --save LABEL saves metrics snapshot to /tmp/perf-