@@ -195,6 +195,12 @@ function pushMessage(currentMessages: Message[], incomingMsg: Message): Message[
195195 }
196196}
197197
198+ function prefersReducedMotion ( ) : boolean {
199+ return window . matchMedia ( '(prefers-reduced-motion: reduce)' ) . matches ;
200+ }
201+
202+ const REDUCED_MOTION_BATCH_INTERVAL = 1000 ;
203+
198204async function streamFromResponse (
199205 stream : AsyncIterable < MessageEvent > ,
200206 initialMessages : Message [ ] ,
@@ -203,6 +209,49 @@ async function streamFromResponse(
203209 sessionId : string
204210) : Promise < void > {
205211 let currentMessages = initialMessages ;
212+ const reduceMotion = prefersReducedMotion ( ) ;
213+ let latestTokenState : TokenState | null = null ;
214+ let latestChatState : ChatState = ChatState . Streaming ;
215+ let lastBatchUpdate = Date . now ( ) ;
216+ let hasPendingUpdate = false ;
217+
218+ const flushBatchedUpdates = ( ) => {
219+ if ( reduceMotion && hasPendingUpdate ) {
220+ if ( latestTokenState ) {
221+ dispatch ( { type : 'SET_TOKEN_STATE' , payload : latestTokenState } ) ;
222+ }
223+ dispatch ( { type : 'SET_MESSAGES' , payload : currentMessages } ) ;
224+ dispatch ( { type : 'SET_CHAT_STATE' , payload : latestChatState } ) ;
225+ hasPendingUpdate = false ;
226+ lastBatchUpdate = Date . now ( ) ;
227+ }
228+ } ;
229+
230+ const maybeUpdateUI = (
231+ tokenState : TokenState ,
232+ chatState : ChatState ,
233+ forceImmediate = false
234+ ) => {
235+ if ( ! reduceMotion ) {
236+ dispatch ( { type : 'SET_TOKEN_STATE' , payload : tokenState } ) ;
237+ dispatch ( { type : 'SET_MESSAGES' , payload : currentMessages } ) ;
238+ dispatch ( { type : 'SET_CHAT_STATE' , payload : chatState } ) ;
239+ } else if ( forceImmediate ) {
240+ dispatch ( { type : 'SET_TOKEN_STATE' , payload : tokenState } ) ;
241+ dispatch ( { type : 'SET_MESSAGES' , payload : currentMessages } ) ;
242+ dispatch ( { type : 'SET_CHAT_STATE' , payload : chatState } ) ;
243+ hasPendingUpdate = false ;
244+ lastBatchUpdate = Date . now ( ) ;
245+ } else {
246+ latestTokenState = tokenState ;
247+ latestChatState = chatState ;
248+ hasPendingUpdate = true ;
249+ const now = Date . now ( ) ;
250+ if ( now - lastBatchUpdate >= REDUCED_MOTION_BATCH_INTERVAL ) {
251+ flushBatchedUpdates ( ) ;
252+ }
253+ }
254+ } ;
206255
207256 try {
208257 for await ( const event of stream ) {
@@ -221,24 +270,23 @@ async function streamFromResponse(
221270 ) ;
222271
223272 if ( hasToolConfirmation || hasElicitation ) {
224- dispatch ( { type : 'SET_CHAT_STATE' , payload : ChatState . WaitingForUserInput } ) ;
273+ maybeUpdateUI ( event . token_state , ChatState . WaitingForUserInput , true ) ;
225274 } else if ( getCompactingMessage ( msg ) ) {
226- dispatch ( { type : 'SET_CHAT_STATE' , payload : ChatState . Compacting } ) ;
275+ maybeUpdateUI ( event . token_state , ChatState . Compacting ) ;
227276 } else if ( getThinkingMessage ( msg ) ) {
228- dispatch ( { type : 'SET_CHAT_STATE' , payload : ChatState . Thinking } ) ;
277+ maybeUpdateUI ( event . token_state , ChatState . Thinking ) ;
229278 } else {
230- dispatch ( { type : 'SET_CHAT_STATE' , payload : ChatState . Streaming } ) ;
279+ maybeUpdateUI ( event . token_state , ChatState . Streaming ) ;
231280 }
232-
233- dispatch ( { type : 'SET_TOKEN_STATE' , payload : event . token_state } ) ;
234- dispatch ( { type : 'SET_MESSAGES' , payload : currentMessages } ) ;
235281 break ;
236282 }
237283 case 'Error' : {
284+ flushBatchedUpdates ( ) ;
238285 onFinish ( 'Stream error: ' + event . error ) ;
239286 return ;
240287 }
241288 case 'Finish' : {
289+ flushBatchedUpdates ( ) ;
242290 onFinish ( ) ;
243291 return ;
244292 }
@@ -247,7 +295,11 @@ async function streamFromResponse(
247295 }
248296 case 'UpdateConversation' : {
249297 currentMessages = event . conversation ;
250- dispatch ( { type : 'SET_MESSAGES' , payload : event . conversation } ) ;
298+ if ( ! reduceMotion ) {
299+ dispatch ( { type : 'SET_MESSAGES' , payload : event . conversation } ) ;
300+ } else {
301+ hasPendingUpdate = true ;
302+ }
251303 break ;
252304 }
253305 case 'Notification' : {
@@ -260,8 +312,10 @@ async function streamFromResponse(
260312 }
261313 }
262314
315+ flushBatchedUpdates ( ) ;
263316 onFinish ( ) ;
264317 } catch ( error ) {
318+ flushBatchedUpdates ( ) ;
265319 if ( error instanceof Error && error . name !== 'AbortError' ) {
266320 onFinish ( 'Stream error: ' + errorMessage ( error ) ) ;
267321 }
0 commit comments