@@ -187,7 +187,7 @@ def __init__(
187187
188188 # Initialize the chat with the provided messages
189189 for msg in messages :
190- _utils .run_coro_sync (self .append_message (msg , scroll = False ))
190+ _utils .run_coro_sync (self .append_message (msg ))
191191
192192 # When user input is submitted, transform, and store it in the chat state
193193 # (and make sure this runs before other effects since when the user
@@ -399,7 +399,7 @@ def messages(
399399
400400 return tuple (res )
401401
402- async def append_message (self , message : Any , scroll : bool = True ) -> None :
402+ async def append_message (self , message : Any ) -> None :
403403 """
404404 Append a message to the chat.
405405
@@ -409,23 +409,16 @@ async def append_message(self, message: Any, scroll: bool = True) -> None:
409409 The message to append. A variety of message formats are supported including
410410 a string, a dictionary with `content` and `role` keys, or a relevant chat
411411 completion object from platforms like OpenAI, Anthropic, Ollama, and others.
412- scroll
413- Whether to scroll to the bottom of the chat after appending the message.
414412
415413 Note
416414 ----
417415 Use `.append_message_stream()` instead of this method when `stream=True` (or
418416 similar) is specified in model's completion method.
419417 """
420- await self ._append_message (message , scroll = scroll )
418+ await self ._append_message (message )
421419
422420 async def _append_message (
423- self ,
424- message : Any ,
425- * ,
426- chunk : ChunkOption = False ,
427- stream_id : str | None = None ,
428- scroll : bool = True ,
421+ self , message : Any , * , chunk : ChunkOption = False , stream_id : str | None = None
429422 ) -> None :
430423 # If currently we're in a stream, handle other messages (outside the stream) later
431424 if not self ._can_append_message (stream_id ):
@@ -455,11 +448,9 @@ async def _append_message(
455448 if msg is None :
456449 return
457450 msg = self ._store_message (msg , chunk = chunk )
458- await self ._send_append_message (msg , chunk = chunk , scroll = scroll )
451+ await self ._send_append_message (msg , chunk = chunk )
459452
460- async def append_message_stream (
461- self , message : Iterable [Any ] | AsyncIterable [Any ], scroll : bool = True
462- ):
453+ async def append_message_stream (self , message : Iterable [Any ] | AsyncIterable [Any ]):
463454 """
464455 Append a message as a stream of message chunks.
465456
@@ -470,8 +461,6 @@ async def append_message_stream(
470461 message chunk formats are supported, including a string, a dictionary with
471462 `content` and `role` keys, or a relevant chat completion object from
472463 platforms like OpenAI, Anthropic, Ollama, and others.
473- scroll
474- Whether to scroll to the bottom of the chat while appending the message stream.
475464
476465 Note
477466 ----
@@ -484,7 +473,7 @@ async def append_message_stream(
484473 # Run the stream in the background to get non-blocking behavior
485474 @reactive .extended_task
486475 async def _stream_task ():
487- await self ._append_message_stream (message , scroll = scroll )
476+ await self ._append_message_stream (message )
488477
489478 _stream_task ()
490479
@@ -504,17 +493,17 @@ async def _handle_error():
504493 ctx .on_invalidate (_handle_error .destroy )
505494 self ._effects .append (_handle_error )
506495
507- async def _append_message_stream (self , message : AsyncIterable [Any ], scroll : bool ):
496+ async def _append_message_stream (self , message : AsyncIterable [Any ]):
508497 id = _utils .private_random_id ()
509498
510499 empty = ChatMessage (content = "" , role = "assistant" )
511- await self ._append_message (empty , chunk = "start" , stream_id = id , scroll = scroll )
500+ await self ._append_message (empty , chunk = "start" , stream_id = id )
512501
513502 try :
514503 async for msg in message :
515- await self ._append_message (msg , chunk = True , stream_id = id , scroll = scroll )
504+ await self ._append_message (msg , chunk = True , stream_id = id )
516505 finally :
517- await self ._append_message (empty , chunk = "end" , stream_id = id , scroll = scroll )
506+ await self ._append_message (empty , chunk = "end" , stream_id = id )
518507 await self ._flush_pending_messages ()
519508
520509 async def _flush_pending_messages (self ):
@@ -536,7 +525,6 @@ async def _send_append_message(
536525 self ,
537526 message : StoredMessage ,
538527 chunk : ChunkOption = False ,
539- scroll : bool = True ,
540528 ):
541529 if message ["role" ] == "system" :
542530 # System messages are not displayed in the UI
@@ -561,7 +549,6 @@ async def _send_append_message(
561549 role = message ["role" ],
562550 content_type = content_type ,
563551 chunk_type = chunk_type ,
564- request_scroll = scroll ,
565552 )
566553
567554 # print(msg)
0 commit comments