Skip to content
5 changes: 5 additions & 0 deletions .changeset/perf-timeline-scroll-cache.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
---
default: patch
---

Cache VList item heights across room visits to restore scroll position instantly and skip the 80 ms opacity-fade stabilisation timer on revisit.
136 changes: 116 additions & 20 deletions src/app/features/room/RoomTimeline.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ import { useAtomValue, useSetAtom } from 'jotai';
import { PushProcessor, Room, Direction } from '$types/matrix-sdk';
import classNames from 'classnames';
import { VList, VListHandle } from 'virtua';
import { roomScrollCache, RoomScrollCache } from '$utils/roomScrollCache';
Copy link

Copilot AI Apr 12, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

RoomScrollCache is used only as a type in this file. Import it with type (or split the import) to avoid pulling it into the runtime module graph unnecessarily, consistent with other files in this repo that use import type for types.

Suggested change
import { roomScrollCache, RoomScrollCache } from '$utils/roomScrollCache';
import { roomScrollCache } from '$utils/roomScrollCache';
import type { RoomScrollCache } from '$utils/roomScrollCache';

Copilot uses AI. Check for mistakes.
import {
as,
Box,
Expand Down Expand Up @@ -199,6 +200,13 @@ export function RoomTimeline({
const setOpenThread = useSetAtom(openThreadAtom);

const vListRef = useRef<VListHandle>(null);
// Load any cached scroll state for this room on mount. A fresh RoomTimeline is
// mounted per room (via key={roomId} in RoomView) so this is the only place we
// need to read the cache — the render-phase room-change block below only fires
// in the (hypothetical) case where the room prop changes without a remount.
const scrollCacheForRoomRef = useRef<RoomScrollCache | undefined>(
roomScrollCache.load(room.roomId)
);
const [atBottomState, setAtBottomState] = useState(true);
const atBottomRef = useRef(atBottomState);
const setAtBottom = useCallback((val: boolean) => {
Expand All @@ -220,15 +228,26 @@ export function RoomTimeline({
// A recovery useLayoutEffect watches for processedEvents becoming non-empty
// and performs the final scroll + setIsReady when this flag is set.
const pendingReadyRef = useRef(false);
// Set to true before each programmatic scroll-to-bottom so intermediate
// onScroll events from virtua's height-correction pass cannot drive
// atBottomState to false (flashing the "Jump to Latest" button).
// Cleared when VList confirms isNowAtBottom, or on the first intermediate
// event so subsequent user-initiated scrolls are tracked normally.
const programmaticScrollToBottomRef = useRef(false);
const currentRoomIdRef = useRef(room.roomId);

const [isReady, setIsReady] = useState(false);

if (currentRoomIdRef.current !== room.roomId) {
// Load incoming room's scroll cache (undefined for first-visit rooms).
// Covers the rare case where room prop changes without a remount.
scrollCacheForRoomRef.current = roomScrollCache.load(room.roomId);

hasInitialScrolledRef.current = false;
mountScrollWindowRef.current = Date.now() + 3000;
currentRoomIdRef.current = room.roomId;
pendingReadyRef.current = false;
programmaticScrollToBottomRef.current = false;
if (initialScrollTimerRef.current !== undefined) {
clearTimeout(initialScrollTimerRef.current);
initialScrollTimerRef.current = undefined;
Expand Down Expand Up @@ -296,28 +315,69 @@ export function RoomTimeline({
timelineSync.liveTimelineLinked &&
vListRef.current
) {
vListRef.current.scrollToIndex(processedEventsRef.current.length - 1, { align: 'end' });
// Store in a ref rather than a local so subsequent eventsLength changes
// (e.g. the onLifecycle timeline reset firing within 80 ms) do NOT
// cancel this timer through the useLayoutEffect cleanup.
initialScrollTimerRef.current = setTimeout(() => {
initialScrollTimerRef.current = undefined;
if (processedEventsRef.current.length > 0) {
vListRef.current?.scrollToIndex(processedEventsRef.current.length - 1, { align: 'end' });
// Only mark ready once we've successfully scrolled. If processedEvents
// was empty when the timer fired (e.g. the onLifecycle reset cleared the
// timeline within the 80 ms window), defer setIsReady until the recovery
// effect below fires once events repopulate.
setIsReady(true);
const savedCache = scrollCacheForRoomRef.current;
hasInitialScrolledRef.current = true;

if (savedCache) {
// Revisiting a room with a cached scroll state — restore position
// immediately and skip the 80 ms stabilisation timer entirely.
if (savedCache.atBottom) {
programmaticScrollToBottomRef.current = true;
vListRef.current.scrollToIndex(processedEventsRef.current.length - 1, { align: 'end' });
// scrollToIndex is async; pre-empt the button so it doesn't flash for
Comment on lines +321 to +327
Copy link

Copilot AI Apr 12, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

In the saved-cache restore path, scrollToIndex(processedEventsRef.current.length - 1, ...) can be called when processedEventsRef.current is empty (the same TimelineReset race described for the 80ms timer path), which would pass -1 to scrollToIndex. Add a guard for processedEventsRef.current.length > 0 (or reuse pendingReadyRef to defer restore until events repopulate) before attempting the scroll.

Copilot uses AI. Check for mistakes.
// one render cycle before VList's onScroll confirms the position.
setAtBottom(true);
} else {
pendingReadyRef.current = true;
vListRef.current.scrollTo(savedCache.scrollOffset);
}
}, 80);
hasInitialScrolledRef.current = true;
setIsReady(true);
} else {
Comment on lines +329 to +334
Copy link

Copilot AI Apr 12, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

When restoring a non-bottom scroll position (savedCache.atBottom === false), the component leaves atBottomState at its default true until onScroll fires. Because setIsReady(true) happens immediately, this can briefly hide the “Jump to Latest” UI and can trigger at-bottom behaviors (e.g. auto-mark-as-read) even though the restored position is not at the bottom. Consider setting setAtBottom(false) (or setAtBottom(savedCache.atBottom)) as part of the restore before marking the timeline ready.

Copilot uses AI. Check for mistakes.
// First visit — original behaviour: scroll to bottom, then wait 80 ms
// for VList to finish measuring item heights before revealing the timeline.
vListRef.current.scrollToIndex(processedEventsRef.current.length - 1, { align: 'end' });
// Store in a ref rather than a local so subsequent eventsLength changes
// (e.g. the onLifecycle timeline reset firing within 80 ms) do NOT
// cancel this timer through the useLayoutEffect cleanup.
initialScrollTimerRef.current = setTimeout(() => {
initialScrollTimerRef.current = undefined;
if (processedEventsRef.current.length > 0) {
programmaticScrollToBottomRef.current = true;
vListRef.current?.scrollToIndex(processedEventsRef.current.length - 1, {
align: 'end',
});
// Persist the now-measured item heights so the next visit to this room
// can provide them to VList upfront and skip this 80 ms wait entirely.
const v = vListRef.current;
if (v) {
roomScrollCache.save(room.roomId, {
cache: v.cache,
scrollOffset: v.scrollOffset,
atBottom: true,
});
}
// Only mark ready once we've successfully scrolled. If processedEvents
// was empty when the timer fired (e.g. the onLifecycle reset cleared the
// timeline within the 80 ms window), defer setIsReady until the recovery
// effect below fires once events repopulate.
// scrollToIndex is async; pre-empt atBottom so the "Jump to Latest"
// button doesn't flash for one render cycle before onScroll confirms.
setAtBottom(true);
setIsReady(true);
} else {
pendingReadyRef.current = true;
}
}, 80);
}
}
// No cleanup return — the timer must survive eventsLength fluctuations.
// It is cancelled on unmount by the dedicated effect below.
}, [timelineSync.eventsLength, timelineSync.liveTimelineLinked, eventId, room.roomId]);
}, [
timelineSync.eventsLength,
timelineSync.liveTimelineLinked,
eventId,
room.roomId,
setAtBottom,
]);

// Cancel the initial-scroll timer on unmount (the useLayoutEffect above
// intentionally does not cancel it when deps change).
Expand Down Expand Up @@ -620,10 +680,30 @@ export function RoomTimeline({

const distanceFromBottom = v.scrollSize - offset - v.viewportSize;
const isNowAtBottom = distanceFromBottom < 100;
// Clear the programmatic-scroll guard whenever VList confirms we are at the
// bottom, regardless of whether atBottomRef needs updating.
if (isNowAtBottom) programmaticScrollToBottomRef.current = false;
if (isNowAtBottom !== atBottomRef.current) {
setAtBottom(isNowAtBottom);
if (isNowAtBottom || !programmaticScrollToBottomRef.current) {
setAtBottom(isNowAtBottom);
} else {
// VList fired an intermediate "not at bottom" event while settling after
// a programmatic scroll-to-bottom (e.g. height-correction pass). Suppress
// the false negative and clear the guard so the next event — either a
// VList correction to the true bottom, or a genuine user scroll — is
// processed normally.
programmaticScrollToBottomRef.current = false;
}
}

// Keep the scroll cache fresh so the next visit to this room can restore
// position (and skip the 80 ms measurement wait) immediately on mount.
roomScrollCache.save(room.roomId, {
cache: v.cache,
scrollOffset: offset,
atBottom: isNowAtBottom,
});
Comment on lines +699 to +705
Copy link

Copilot AI Apr 12, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Saving the scroll cache on every onScroll callback allocates a new object and updates the Map at scroll-event frequency, which can be very hot during user scrolling. Consider throttling/debouncing these saves (e.g. per animation frame) and/or only saving when values materially change, to reduce GC pressure and keep scrolling smooth.

Copilot uses AI. Check for mistakes.

if (offset < 500 && canPaginateBackRef.current && backwardStatusRef.current === 'idle') {
timelineSyncRef.current.handleTimelinePagination(true);
}
Expand All @@ -635,7 +715,7 @@ export function RoomTimeline({
timelineSyncRef.current.handleTimelinePagination(false);
}
},
[setAtBottom]
[setAtBottom, room.roomId]
);

const showLoadingPlaceholders =
Expand Down Expand Up @@ -741,9 +821,23 @@ export function RoomTimeline({
if (!pendingReadyRef.current) return;
if (processedEvents.length === 0) return;
pendingReadyRef.current = false;
programmaticScrollToBottomRef.current = true;
vListRef.current?.scrollToIndex(processedEvents.length - 1, { align: 'end' });
// The 80 ms timer's cache-save was skipped because processedEvents was empty
// when it fired. Save now so the next visit skips the timer.
const v = vListRef.current;
if (v) {
roomScrollCache.save(room.roomId, {
cache: v.cache,
scrollOffset: v.scrollOffset,
atBottom: true,
});
}
// scrollToIndex is async; pre-empt atBottom so the "Jump to Latest" button
// doesn't flash for one render cycle before onScroll confirms the position.
setAtBottom(true);
setIsReady(true);
}, [processedEvents.length]);
}, [processedEvents.length, room.roomId, setAtBottom]);

useEffect(() => {
if (!onEditLastMessageRef) return;
Expand Down Expand Up @@ -844,8 +938,10 @@ export function RoomTimeline({
}}
>
<VList<ProcessedEvent>
key={room.roomId}
ref={vListRef}
data={processedEvents}
cache={scrollCacheForRoomRef.current?.cache}
shift={shift}
className={css.messageList}
style={{
Expand Down
34 changes: 34 additions & 0 deletions src/app/utils/roomScrollCache.test.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
import { describe, it, expect } from 'vitest';
import { roomScrollCache } from './roomScrollCache';

// CacheSnapshot is opaque in tests — cast a plain object.
const fakeCache = () => ({}) as import('./roomScrollCache').RoomScrollCache['cache'];

describe('roomScrollCache', () => {
it('load returns undefined for an unknown roomId', () => {
expect(roomScrollCache.load('!unknown:test')).toBeUndefined();
});

it('stores and retrieves data for a roomId', () => {
const data = { cache: fakeCache(), scrollOffset: 120, atBottom: false };
roomScrollCache.save('!room1:test', data);
expect(roomScrollCache.load('!room1:test')).toBe(data);
});

it('overwrites existing data when saved again for the same roomId', () => {
const first = { cache: fakeCache(), scrollOffset: 50, atBottom: true };
const second = { cache: fakeCache(), scrollOffset: 200, atBottom: false };
roomScrollCache.save('!room2:test', first);
roomScrollCache.save('!room2:test', second);
expect(roomScrollCache.load('!room2:test')).toBe(second);
});

it('keeps data for separate rooms independent', () => {
const a = { cache: fakeCache(), scrollOffset: 10, atBottom: true };
const b = { cache: fakeCache(), scrollOffset: 20, atBottom: false };
roomScrollCache.save('!roomA:test', a);
roomScrollCache.save('!roomB:test', b);
expect(roomScrollCache.load('!roomA:test')).toBe(a);
expect(roomScrollCache.load('!roomB:test')).toBe(b);
});
});
22 changes: 22 additions & 0 deletions src/app/utils/roomScrollCache.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
import { CacheSnapshot } from 'virtua';
Copy link

Copilot AI Apr 12, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

CacheSnapshot is only used as a TypeScript type here. Importing it as a value (import { CacheSnapshot } ...) can force an unnecessary runtime dependency on virtua in the emitted JS/bundler graph. Switch this to a type-only import (e.g. import type { CacheSnapshot } ...).

Suggested change
import { CacheSnapshot } from 'virtua';
import type { CacheSnapshot } from 'virtua';

Copilot uses AI. Check for mistakes.

export type RoomScrollCache = {
/** VList item-size snapshot — restored via VList `cache=` prop on remount. */
cache: CacheSnapshot;
/** Pixel scroll offset at the time the room was left. */
scrollOffset: number;
/** Whether the view was pinned to the bottom (live) when the room was left. */
atBottom: boolean;
};

/** Session-scoped, per-room scroll cache. Not persisted across page reloads. */
const scrollCacheMap = new Map<string, RoomScrollCache>();

export const roomScrollCache = {
save(roomId: string, data: RoomScrollCache): void {
scrollCacheMap.set(roomId, data);
},
load(roomId: string): RoomScrollCache | undefined {
return scrollCacheMap.get(roomId);
},
Comment on lines +13 to +21
Copy link

Copilot AI Apr 12, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

scrollCacheMap grows for every room visited and is never cleared/evicted. Since cache can be sizable (item-size snapshots), this can lead to unbounded memory growth over a long session with many rooms. Consider adding an eviction strategy (e.g., LRU with a max entries cap) and/or a way to clear caches on logout/session reset.

Suggested change
const scrollCacheMap = new Map<string, RoomScrollCache>();
export const roomScrollCache = {
save(roomId: string, data: RoomScrollCache): void {
scrollCacheMap.set(roomId, data);
},
load(roomId: string): RoomScrollCache | undefined {
return scrollCacheMap.get(roomId);
},
const MAX_SCROLL_CACHE_ENTRIES = 100;
const scrollCacheMap = new Map<string, RoomScrollCache>();
function evictOldestScrollCacheEntry(): void {
if (scrollCacheMap.size <= MAX_SCROLL_CACHE_ENTRIES) return;
const oldestRoomId = scrollCacheMap.keys().next().value;
if (oldestRoomId !== undefined) {
scrollCacheMap.delete(oldestRoomId);
}
}
export const roomScrollCache = {
save(roomId: string, data: RoomScrollCache): void {
if (scrollCacheMap.has(roomId)) {
scrollCacheMap.delete(roomId);
}
scrollCacheMap.set(roomId, data);
evictOldestScrollCacheEntry();
},
load(roomId: string): RoomScrollCache | undefined {
const cached = scrollCacheMap.get(roomId);
if (cached === undefined) return undefined;
scrollCacheMap.delete(roomId);
scrollCacheMap.set(roomId, cached);
return cached;
},
clear(): void {
scrollCacheMap.clear();
},

Copilot uses AI. Check for mistakes.
};
Loading