Skip to content

Commit 4aa8481

Browse files
committed
Remove ability to control server-side (in the future, we may want this, but if we do in fact support it, we should have some sort of visual cue that their is overflow -- i.e., a scroll button)
1 parent b95bf81 commit 4aa8481

File tree

5 files changed

+25
-38
lines changed

5 files changed

+25
-38
lines changed

js/chat/chat.ts

Lines changed: 10 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,6 @@ type Message = {
1616
role: "user" | "assistant";
1717
content_type: ContentType;
1818
chunk_type: "message_start" | "message_end" | null;
19-
request_scroll: boolean;
2019
};
2120
type ShinyChatMessage = {
2221
id: string;
@@ -80,14 +79,7 @@ class ChatMessage extends LightElement {
8079
updated(changedProperties: Map<string, unknown>): void {
8180
if (changedProperties.has("content")) {
8281
this.#highlightAndCodeCopy();
83-
84-
if (this.request_scroll) {
85-
const event = new CustomEvent("shiny-chat-request-scroll", {
86-
bubbles: true,
87-
composed: true,
88-
});
89-
this.dispatchEvent(event);
90-
}
82+
this.#requestScroll();
9183
}
9284
}
9385

@@ -117,6 +109,15 @@ class ChatMessage extends LightElement {
117109
});
118110
});
119111
}
112+
113+
#requestScroll(): void {
114+
this.dispatchEvent(
115+
new CustomEvent("shiny-chat-request-scroll", {
116+
bubbles: true,
117+
composed: true,
118+
})
119+
);
120+
}
120121
}
121122

122123
class ChatUserMessage extends LightElement {

shiny/ui/_chat.py

Lines changed: 11 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -187,7 +187,7 @@ def __init__(
187187

188188
# Initialize the chat with the provided messages
189189
for msg in messages:
190-
_utils.run_coro_sync(self.append_message(msg, scroll=False))
190+
_utils.run_coro_sync(self.append_message(msg))
191191

192192
# When user input is submitted, transform, and store it in the chat state
193193
# (and make sure this runs before other effects since when the user
@@ -399,7 +399,7 @@ def messages(
399399

400400
return tuple(res)
401401

402-
async def append_message(self, message: Any, scroll: bool = True) -> None:
402+
async def append_message(self, message: Any) -> None:
403403
"""
404404
Append a message to the chat.
405405
@@ -409,23 +409,16 @@ async def append_message(self, message: Any, scroll: bool = True) -> None:
409409
The message to append. A variety of message formats are supported including
410410
a string, a dictionary with `content` and `role` keys, or a relevant chat
411411
completion object from platforms like OpenAI, Anthropic, Ollama, and others.
412-
scroll
413-
Whether to scroll to the bottom of the chat after appending the message.
414412
415413
Note
416414
----
417415
Use `.append_message_stream()` instead of this method when `stream=True` (or
418416
similar) is specified in model's completion method.
419417
"""
420-
await self._append_message(message, scroll=scroll)
418+
await self._append_message(message)
421419

422420
async def _append_message(
423-
self,
424-
message: Any,
425-
*,
426-
chunk: ChunkOption = False,
427-
stream_id: str | None = None,
428-
scroll: bool = True,
421+
self, message: Any, *, chunk: ChunkOption = False, stream_id: str | None = None
429422
) -> None:
430423
# If currently we're in a stream, handle other messages (outside the stream) later
431424
if not self._can_append_message(stream_id):
@@ -455,11 +448,9 @@ async def _append_message(
455448
if msg is None:
456449
return
457450
msg = self._store_message(msg, chunk=chunk)
458-
await self._send_append_message(msg, chunk=chunk, scroll=scroll)
451+
await self._send_append_message(msg, chunk=chunk)
459452

460-
async def append_message_stream(
461-
self, message: Iterable[Any] | AsyncIterable[Any], scroll: bool = True
462-
):
453+
async def append_message_stream(self, message: Iterable[Any] | AsyncIterable[Any]):
463454
"""
464455
Append a message as a stream of message chunks.
465456
@@ -470,8 +461,6 @@ async def append_message_stream(
470461
message chunk formats are supported, including a string, a dictionary with
471462
`content` and `role` keys, or a relevant chat completion object from
472463
platforms like OpenAI, Anthropic, Ollama, and others.
473-
scroll
474-
Whether to scroll to the bottom of the chat while appending the message stream.
475464
476465
Note
477466
----
@@ -484,7 +473,7 @@ async def append_message_stream(
484473
# Run the stream in the background to get non-blocking behavior
485474
@reactive.extended_task
486475
async def _stream_task():
487-
await self._append_message_stream(message, scroll=scroll)
476+
await self._append_message_stream(message)
488477

489478
_stream_task()
490479

@@ -504,17 +493,17 @@ async def _handle_error():
504493
ctx.on_invalidate(_handle_error.destroy)
505494
self._effects.append(_handle_error)
506495

507-
async def _append_message_stream(self, message: AsyncIterable[Any], scroll: bool):
496+
async def _append_message_stream(self, message: AsyncIterable[Any]):
508497
id = _utils.private_random_id()
509498

510499
empty = ChatMessage(content="", role="assistant")
511-
await self._append_message(empty, chunk="start", stream_id=id, scroll=scroll)
500+
await self._append_message(empty, chunk="start", stream_id=id)
512501

513502
try:
514503
async for msg in message:
515-
await self._append_message(msg, chunk=True, stream_id=id, scroll=scroll)
504+
await self._append_message(msg, chunk=True, stream_id=id)
516505
finally:
517-
await self._append_message(empty, chunk="end", stream_id=id, scroll=scroll)
506+
await self._append_message(empty, chunk="end", stream_id=id)
518507
await self._flush_pending_messages()
519508

520509
async def _flush_pending_messages(self):
@@ -536,7 +525,6 @@ async def _send_append_message(
536525
self,
537526
message: StoredMessage,
538527
chunk: ChunkOption = False,
539-
scroll: bool = True,
540528
):
541529
if message["role"] == "system":
542530
# System messages are not displayed in the UI
@@ -561,7 +549,6 @@ async def _send_append_message(
561549
role=message["role"],
562550
content_type=content_type,
563551
chunk_type=chunk_type,
564-
request_scroll=scroll,
565552
)
566553

567554
# print(msg)

shiny/ui/_chat_types.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -31,4 +31,3 @@ class StoredMessage(TransformedMessage):
3131
class ClientMessage(ChatMessage):
3232
content_type: Literal["markdown", "html"]
3333
chunk_type: Literal["message_start", "message_end"] | None
34-
request_scroll: bool

shiny/www/py-shiny/chat/chat.js

Lines changed: 1 addition & 1 deletion
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

shiny/www/py-shiny/chat/chat.js.map

Lines changed: 3 additions & 3 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

0 commit comments

Comments
 (0)