Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
56 changes: 50 additions & 6 deletions examples/chat/python/src/graph.py
Original file line number Diff line number Diff line change
Expand Up @@ -500,7 +500,36 @@ async def emit_generated_surface(state: State) -> dict:
arr = json.loads(stripped)
jsonl = "\n".join(json.dumps(env) for env in arr) if isinstance(arr, list) else stripped
except json.JSONDecodeError:
arr = None
jsonl = payload # let the parser deal with malformed lines

# Reorder envelopes so beginRendering lands in position 2 (right
# after the first surfaceUpdate). The surface store gates surface
# materialization on beginRendering; emitting it early lets the
# frontend mount the (initially empty) surface and reveal per-
# component fallbacks while dataModelUpdate envelopes flow.
try:
if isinstance(arr, list):
surface_updates = [e for e in arr if isinstance(e, dict) and "surfaceUpdate" in e]
begin_renderings = [e for e in arr if isinstance(e, dict) and "beginRendering" in e]
data_updates = [e for e in arr if isinstance(e, dict) and "dataModelUpdate" in e]
others = [
e for e in arr
if isinstance(e, dict)
and not ("surfaceUpdate" in e or "beginRendering" in e or "dataModelUpdate" in e)
]
reordered = (
surface_updates
+ (begin_renderings[:1] if begin_renderings else [])
+ data_updates
+ others
+ begin_renderings[1:]
)
jsonl = "\n".join(json.dumps(env) for env in reordered)
except (TypeError, AttributeError, NameError):
# arr may be unbound or unexpected shape — fall back to existing jsonl.
pass

wrapped = A2UI_PREFIX + "\n" + jsonl + "\n"
elif tool_name == "generate_json_render_spec":
# json-render: classifier detects content starting with `{`, no
Expand All @@ -523,14 +552,29 @@ async def emit_generated_surface(state: State) -> dict:
# AIMessage as the final message; the chat composition's content
# classifier picks up the prefix and mounts <a2ui-surface> /
# <chat-generative-ui>.
# In-place replacement: return an AIMessage with the SAME id as the
# upstream tool-call AI. LangGraph's add_messages reducer matches by
# id and replaces, so the thread carries ONE AI message per GenUI
# turn (with both tool_calls AND the wrapped surface content)
# instead of two — the user sees a single bubble that transforms
# from skeleton to surface, not a skeleton bubble followed by a
# separate surface bubble.
out = []
placeholder_kwargs = {
"content": "rendered",
"tool_call_id": tool_msg.tool_call_id,
}
if getattr(tool_msg, "id", None):
out.append(ToolMessage(
id=tool_msg.id,
content="rendered",
tool_call_id=tool_msg.tool_call_id,
))
out.append(AIMessage(content=wrapped))
placeholder_kwargs["id"] = tool_msg.id
out.append(ToolMessage(**placeholder_kwargs))
replacement_kwargs = {"content": wrapped}
if ai_tool_call_msg is not None:
if getattr(ai_tool_call_msg, "id", None):
replacement_kwargs["id"] = ai_tool_call_msg.id
replacement_kwargs["tool_calls"] = ai_tool_call_msg.tool_calls
replacement_kwargs["additional_kwargs"] = ai_tool_call_msg.additional_kwargs or {}
replacement_kwargs["response_metadata"] = ai_tool_call_msg.response_metadata or {}
out.append(AIMessage(**replacement_kwargs))
return {"messages": out}


Expand Down
93 changes: 93 additions & 0 deletions examples/chat/python/tests/test_graph_smoke.py
Original file line number Diff line number Diff line change
Expand Up @@ -159,3 +159,96 @@ def test_empty_string_returns_empty(self):

def test_strips_leading_trailing_whitespace(self):
assert _slice_title(" hello ") == "hello"


import asyncio
from langchain_core.messages import HumanMessage, AIMessage, ToolMessage


class TestEmitGeneratedSurfaceCoalescing:
def test_replaces_tool_call_ai_in_place_same_id(self):
"""emit_generated_surface returns an AIMessage with the same id
as the upstream tool-call AI so add_messages replaces in-place."""
from src.graph import emit_generated_surface

tool_call_ai = AIMessage(
id="ai-1",
content=[
{"type": "function_call", "name": "generate_a2ui_schema",
"arguments": '{"request":"r"}'}
],
tool_calls=[{
"id": "call_1",
"name": "generate_a2ui_schema",
"args": {"request": "r"},
"type": "tool_call",
}],
)
tool_msg = ToolMessage(
tool_call_id="call_1",
name="generate_a2ui_schema",
content='[{"surfaceUpdate":{"surfaceId":"s1","components":[]}},'
'{"beginRendering":{"surfaceId":"s1","root":""}}]',
)
state = {
"messages": [HumanMessage(content="render a card"), tool_call_ai, tool_msg],
"gen_ui_mode": "a2ui",
}

result = asyncio.run(emit_generated_surface(state))

# Expect TWO message updates: the tool placeholder + a replacement
# AIMessage with the SAME id as the upstream tool-call AI.
msgs = result["messages"]
assert len(msgs) == 2
replacement_ai = next(m for m in msgs if isinstance(m, AIMessage))
assert replacement_ai.id == "ai-1", \
"Replacement AI must reuse the upstream tool-call AI id for in-place merge"
# Content carries the wrapped surface payload.
assert "---a2ui_JSON---" in replacement_ai.content
# tool_calls is preserved so detection (frontend isGenuiTurn) still fires.
assert any(tc.get("name") == "generate_a2ui_schema" for tc in replacement_ai.tool_calls)

def test_beginRendering_envelope_ordering(self):
"""emit reorders the wrapped envelopes so beginRendering lands
before any dataModelUpdate envelopes."""
from src.graph import emit_generated_surface

tool_call_ai = AIMessage(
id="ai-2",
content=[],
tool_calls=[{
"id": "call_2",
"name": "generate_a2ui_schema",
"args": {"request": "r"},
"type": "tool_call",
}],
)
tool_msg = ToolMessage(
tool_call_id="call_2",
name="generate_a2ui_schema",
content='['
'{"surfaceUpdate":{"surfaceId":"s","components":[]}},'
'{"dataModelUpdate":{"surfaceId":"s","contents":[]}},'
'{"dataModelUpdate":{"surfaceId":"s","contents":[]}},'
'{"beginRendering":{"surfaceId":"s","root":""}}'
']',
)
state = {"messages": [HumanMessage(content="x"), tool_call_ai, tool_msg],
"gen_ui_mode": "a2ui"}

result = asyncio.run(emit_generated_surface(state))
replacement_ai = next(m for m in result["messages"] if isinstance(m, AIMessage))

# Strip prefix + grab JSONL lines
body = replacement_ai.content.split("---a2ui_JSON---\n", 1)[1].rstrip("\n")
envelope_lines = body.split("\n")
# First envelope = surfaceUpdate, SECOND = beginRendering, then dataModelUpdates.
import json
parsed = [json.loads(line) for line in envelope_lines]
assert "surfaceUpdate" in parsed[0]
assert "beginRendering" in parsed[1], \
f"beginRendering should follow surfaceUpdate; got {list(parsed[1].keys())}"
# The remaining dataModelUpdate envelopes follow.
assert "dataModelUpdate" in parsed[2]
assert "dataModelUpdate" in parsed[3]
Loading