mirror of
https://github.com/crewAIInc/crewAI.git
synced 2026-05-07 18:19:00 +00:00
refactor: extract a2a into standalone crewai-a2a package
Move crewai.a2a into lib/crewai-a2a as its own workspace package, importable as crewai_a2a. The crewai[a2a] extra now pulls in crewai-a2a, which owns a2a-sdk, httpx-auth, httpx-sse, and aiocache. crewai.a2a stays importable. Its __init__ is a compat shim that installs a meta-path finder routing crewai.a2a.* to crewai_a2a.*, so existing user code keeps working untouched. a2a tests and cassettes moved alongside the package under lib/crewai-a2a/tests/. Added that path to the mypy and ruff per-file-ignores lists to match the other test dirs.
This commit is contained in:
@@ -0,0 +1,321 @@
|
||||
"""Cross-validate A2UI Pydantic models against vendored JSON schemas.
|
||||
|
||||
Ensures the two validation sources stay in sync: representative payloads
|
||||
must be accepted or rejected consistently by both the Pydantic models and
|
||||
the JSON schemas.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from crewai.a2a.extensions.a2ui import catalog
|
||||
from crewai.a2a.extensions.a2ui.models import A2UIEvent, A2UIMessage
|
||||
from crewai.a2a.extensions.a2ui.schema import load_schema
|
||||
import jsonschema
|
||||
import pytest
|
||||
|
||||
|
||||
SERVER_SCHEMA = load_schema("server_to_client")
|
||||
CLIENT_SCHEMA = load_schema("client_to_server")
|
||||
CATALOG_SCHEMA = load_schema("standard_catalog_definition")
|
||||
|
||||
|
||||
def _json_schema_valid(schema: dict[str, Any], instance: dict[str, Any]) -> bool:
|
||||
"""Return True if *instance* validates against *schema*."""
|
||||
try:
|
||||
jsonschema.validate(instance, schema)
|
||||
return True
|
||||
except jsonschema.ValidationError:
|
||||
return False
|
||||
|
||||
|
||||
def _pydantic_valid_message(data: dict[str, Any]) -> bool:
|
||||
"""Return True if *data* validates as an A2UIMessage."""
|
||||
try:
|
||||
A2UIMessage.model_validate(data)
|
||||
return True
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
|
||||
def _pydantic_valid_event(data: dict[str, Any]) -> bool:
|
||||
"""Return True if *data* validates as an A2UIEvent."""
|
||||
try:
|
||||
A2UIEvent.model_validate(data)
|
||||
return True
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Valid server-to-client payloads
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
VALID_SERVER_MESSAGES: list[dict[str, Any]] = [
|
||||
{
|
||||
"beginRendering": {
|
||||
"surfaceId": "s1",
|
||||
"root": "root-col",
|
||||
},
|
||||
},
|
||||
{
|
||||
"beginRendering": {
|
||||
"surfaceId": "s2",
|
||||
"root": "root-col",
|
||||
"catalogId": "standard (v0.8)",
|
||||
"styles": {"primaryColor": "#FF0000", "font": "Roboto"},
|
||||
},
|
||||
},
|
||||
{
|
||||
"surfaceUpdate": {
|
||||
"surfaceId": "s1",
|
||||
"components": [
|
||||
{
|
||||
"id": "title",
|
||||
"component": {
|
||||
"Text": {"text": {"literalString": "Hello"}},
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
{
|
||||
"surfaceUpdate": {
|
||||
"surfaceId": "s1",
|
||||
"components": [
|
||||
{
|
||||
"id": "weighted",
|
||||
"weight": 2.0,
|
||||
"component": {
|
||||
"Column": {
|
||||
"children": {"explicitList": ["a", "b"]},
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
{
|
||||
"dataModelUpdate": {
|
||||
"surfaceId": "s1",
|
||||
"contents": [
|
||||
{"key": "name", "valueString": "Alice"},
|
||||
{"key": "score", "valueNumber": 42},
|
||||
{"key": "active", "valueBoolean": True},
|
||||
],
|
||||
},
|
||||
},
|
||||
{
|
||||
"dataModelUpdate": {
|
||||
"surfaceId": "s1",
|
||||
"path": "/user",
|
||||
"contents": [
|
||||
{
|
||||
"key": "prefs",
|
||||
"valueMap": [
|
||||
{"key": "theme", "valueString": "dark"},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
{
|
||||
"deleteSurface": {"surfaceId": "s1"},
|
||||
},
|
||||
]
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Invalid server-to-client payloads
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
INVALID_SERVER_MESSAGES: list[dict[str, Any]] = [
|
||||
{},
|
||||
{"beginRendering": {"surfaceId": "s1"}},
|
||||
{"surfaceUpdate": {"surfaceId": "s1", "components": []}},
|
||||
{
|
||||
"beginRendering": {"surfaceId": "s1", "root": "r"},
|
||||
"deleteSurface": {"surfaceId": "s1"},
|
||||
},
|
||||
{"unknownType": {"surfaceId": "s1"}},
|
||||
]
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Valid client-to-server payloads
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
VALID_CLIENT_EVENTS: list[dict[str, Any]] = [
|
||||
{
|
||||
"userAction": {
|
||||
"name": "click",
|
||||
"surfaceId": "s1",
|
||||
"sourceComponentId": "btn-1",
|
||||
"timestamp": "2026-03-12T10:00:00Z",
|
||||
"context": {},
|
||||
},
|
||||
},
|
||||
{
|
||||
"userAction": {
|
||||
"name": "submit",
|
||||
"surfaceId": "s1",
|
||||
"sourceComponentId": "btn-2",
|
||||
"timestamp": "2026-03-12T10:00:00Z",
|
||||
"context": {"field": "value"},
|
||||
},
|
||||
},
|
||||
{
|
||||
"error": {"message": "render failed", "code": 500},
|
||||
},
|
||||
]
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Invalid client-to-server payloads
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
INVALID_CLIENT_EVENTS: list[dict[str, Any]] = [
|
||||
{},
|
||||
{"userAction": {"name": "click"}},
|
||||
{
|
||||
"userAction": {
|
||||
"name": "click",
|
||||
"surfaceId": "s1",
|
||||
"sourceComponentId": "btn-1",
|
||||
"timestamp": "2026-03-12T10:00:00Z",
|
||||
"context": {},
|
||||
},
|
||||
"error": {"message": "oops"},
|
||||
},
|
||||
]
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Catalog component payloads (validated structurally)
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
VALID_COMPONENTS: dict[str, dict[str, Any]] = {
|
||||
"Text": {"text": {"literalString": "hello"}, "usageHint": "h1"},
|
||||
"Image": {"url": {"path": "/img/url"}, "fit": "cover", "usageHint": "avatar"},
|
||||
"Icon": {"name": {"literalString": "home"}},
|
||||
"Video": {"url": {"literalString": "https://example.com/video.mp4"}},
|
||||
"AudioPlayer": {"url": {"literalString": "https://example.com/audio.mp3"}},
|
||||
"Row": {"children": {"explicitList": ["a", "b"]}, "distribution": "center"},
|
||||
"Column": {"children": {"template": {"componentId": "c1", "dataBinding": "/list"}}},
|
||||
"List": {"children": {"explicitList": ["x"]}, "direction": "horizontal"},
|
||||
"Card": {"child": "inner"},
|
||||
"Tabs": {"tabItems": [{"title": {"literalString": "Tab 1"}, "child": "content"}]},
|
||||
"Divider": {"axis": "horizontal"},
|
||||
"Modal": {"entryPointChild": "trigger", "contentChild": "body"},
|
||||
"Button": {"child": "label", "action": {"name": "go"}},
|
||||
"CheckBox": {
|
||||
"label": {"literalString": "Accept"},
|
||||
"value": {"literalBoolean": False},
|
||||
},
|
||||
"TextField": {"label": {"literalString": "Name"}},
|
||||
"DateTimeInput": {"value": {"path": "/date"}},
|
||||
"MultipleChoice": {
|
||||
"selections": {"literalArray": ["a"]},
|
||||
"options": [{"label": {"literalString": "A"}, "value": "a"}],
|
||||
},
|
||||
"Slider": {"value": {"literalNumber": 50}, "minValue": 0, "maxValue": 100},
|
||||
}
|
||||
|
||||
|
||||
class TestServerToClientConformance:
|
||||
"""Pydantic models and JSON schema must agree on server-to-client messages."""
|
||||
|
||||
@pytest.mark.parametrize("payload", VALID_SERVER_MESSAGES)
|
||||
def test_valid_accepted_by_both(self, payload: dict[str, Any]) -> None:
|
||||
assert _json_schema_valid(SERVER_SCHEMA, payload), (
|
||||
f"JSON schema rejected valid payload: {payload}"
|
||||
)
|
||||
assert _pydantic_valid_message(payload), (
|
||||
f"Pydantic rejected valid payload: {payload}"
|
||||
)
|
||||
|
||||
@pytest.mark.parametrize("payload", INVALID_SERVER_MESSAGES)
|
||||
def test_invalid_rejected_by_pydantic(self, payload: dict[str, Any]) -> None:
|
||||
assert not _pydantic_valid_message(payload), (
|
||||
f"Pydantic accepted invalid payload: {payload}"
|
||||
)
|
||||
|
||||
|
||||
class TestClientToServerConformance:
|
||||
"""Pydantic models and JSON schema must agree on client-to-server events."""
|
||||
|
||||
@pytest.mark.parametrize("payload", VALID_CLIENT_EVENTS)
|
||||
def test_valid_accepted_by_both(self, payload: dict[str, Any]) -> None:
|
||||
assert _json_schema_valid(CLIENT_SCHEMA, payload), (
|
||||
f"JSON schema rejected valid payload: {payload}"
|
||||
)
|
||||
assert _pydantic_valid_event(payload), (
|
||||
f"Pydantic rejected valid payload: {payload}"
|
||||
)
|
||||
|
||||
@pytest.mark.parametrize("payload", INVALID_CLIENT_EVENTS)
|
||||
def test_invalid_rejected_by_pydantic(self, payload: dict[str, Any]) -> None:
|
||||
assert not _pydantic_valid_event(payload), (
|
||||
f"Pydantic accepted invalid payload: {payload}"
|
||||
)
|
||||
|
||||
|
||||
class TestCatalogConformance:
|
||||
"""Catalog component schemas and Pydantic models must define the same components."""
|
||||
|
||||
def test_catalog_component_names_match(self) -> None:
|
||||
from crewai.a2a.extensions.a2ui.catalog import STANDARD_CATALOG_COMPONENTS
|
||||
|
||||
schema_components = set(CATALOG_SCHEMA["components"].keys())
|
||||
assert schema_components == STANDARD_CATALOG_COMPONENTS
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"name,props",
|
||||
list(VALID_COMPONENTS.items()),
|
||||
)
|
||||
def test_valid_component_accepted_by_catalog_schema(
|
||||
self, name: str, props: dict[str, Any]
|
||||
) -> None:
|
||||
component_schema = CATALOG_SCHEMA["components"][name]
|
||||
assert _json_schema_valid(component_schema, props), (
|
||||
f"Catalog schema rejected valid {name}: {props}"
|
||||
)
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"name,props",
|
||||
list(VALID_COMPONENTS.items()),
|
||||
)
|
||||
def test_valid_component_accepted_by_pydantic(
|
||||
self, name: str, props: dict[str, Any]
|
||||
) -> None:
|
||||
model_cls = getattr(catalog, name)
|
||||
try:
|
||||
model_cls.model_validate(props)
|
||||
except Exception as exc:
|
||||
pytest.fail(f"Pydantic {name} rejected valid props: {exc}")
|
||||
|
||||
def test_catalog_required_fields_match(self) -> None:
|
||||
"""Required fields in the JSON schema match non-optional Pydantic fields."""
|
||||
for comp_name, comp_schema in CATALOG_SCHEMA["components"].items():
|
||||
schema_required = set(comp_schema.get("required", []))
|
||||
model_cls = getattr(catalog, comp_name)
|
||||
pydantic_required = {
|
||||
info.alias or field_name
|
||||
for field_name, info in model_cls.model_fields.items()
|
||||
if info.is_required()
|
||||
}
|
||||
assert schema_required == pydantic_required, (
|
||||
f"{comp_name}: schema requires {schema_required}, "
|
||||
f"Pydantic requires {pydantic_required}"
|
||||
)
|
||||
|
||||
def test_catalog_fields_match(self) -> None:
|
||||
"""Field names in JSON schema match Pydantic model aliases."""
|
||||
for comp_name, comp_schema in CATALOG_SCHEMA["components"].items():
|
||||
schema_fields = set(comp_schema.get("properties", {}).keys())
|
||||
model_cls = getattr(catalog, comp_name)
|
||||
pydantic_fields = {
|
||||
info.alias or field_name
|
||||
for field_name, info in model_cls.model_fields.items()
|
||||
}
|
||||
assert schema_fields == pydantic_fields, (
|
||||
f"{comp_name}: schema has {schema_fields}, "
|
||||
f"Pydantic has {pydantic_fields}"
|
||||
)
|
||||
329
lib/crewai-a2a/tests/a2a/test_a2a_integration.py
Normal file
329
lib/crewai-a2a/tests/a2a/test_a2a_integration.py
Normal file
@@ -0,0 +1,329 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import uuid
|
||||
|
||||
from a2a.client import ClientFactory
|
||||
from a2a.types import AgentCard, Message, Part, Role, Task, TaskState, TextPart
|
||||
from crewai.a2a.updates.polling.handler import PollingHandler
|
||||
from crewai.a2a.updates.streaming.handler import StreamingHandler
|
||||
import pytest
|
||||
import pytest_asyncio
|
||||
|
||||
|
||||
A2A_TEST_ENDPOINT = os.getenv("A2A_TEST_ENDPOINT", "http://localhost:9999")
|
||||
|
||||
|
||||
@pytest_asyncio.fixture
|
||||
async def a2a_client():
|
||||
"""Create A2A client for test server."""
|
||||
client = await ClientFactory.connect(A2A_TEST_ENDPOINT)
|
||||
yield client
|
||||
await client.close()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def test_message() -> Message:
|
||||
"""Create a simple test message."""
|
||||
return Message(
|
||||
role=Role.user,
|
||||
parts=[Part(root=TextPart(text="What is 2 + 2?"))],
|
||||
message_id=str(uuid.uuid4()),
|
||||
)
|
||||
|
||||
|
||||
@pytest_asyncio.fixture
|
||||
async def agent_card(a2a_client) -> AgentCard:
|
||||
"""Fetch the real agent card from the server."""
|
||||
return await a2a_client.get_card()
|
||||
|
||||
|
||||
class TestA2AAgentCardFetching:
|
||||
"""Integration tests for agent card fetching."""
|
||||
|
||||
@pytest.mark.vcr()
|
||||
@pytest.mark.asyncio
|
||||
async def test_fetch_agent_card(self, a2a_client) -> None:
|
||||
"""Test fetching an agent card from the server."""
|
||||
card = await a2a_client.get_card()
|
||||
|
||||
assert card is not None
|
||||
assert card.name == "GPT Assistant"
|
||||
assert card.url is not None
|
||||
assert card.capabilities is not None
|
||||
assert card.capabilities.streaming is True
|
||||
|
||||
|
||||
class TestA2APollingIntegration:
|
||||
"""Integration tests for A2A polling handler."""
|
||||
|
||||
@pytest.mark.vcr()
|
||||
@pytest.mark.asyncio
|
||||
async def test_polling_completes_task(
|
||||
self,
|
||||
a2a_client,
|
||||
test_message: Message,
|
||||
agent_card: AgentCard,
|
||||
) -> None:
|
||||
"""Test that polling handler completes a task successfully."""
|
||||
new_messages: list[Message] = []
|
||||
|
||||
result = await PollingHandler.execute(
|
||||
client=a2a_client,
|
||||
message=test_message,
|
||||
new_messages=new_messages,
|
||||
agent_card=agent_card,
|
||||
polling_interval=0.5,
|
||||
polling_timeout=30.0,
|
||||
)
|
||||
|
||||
assert isinstance(result, dict)
|
||||
assert result["status"] == TaskState.completed
|
||||
assert result.get("result") is not None
|
||||
assert "4" in result["result"]
|
||||
|
||||
|
||||
class TestA2AStreamingIntegration:
|
||||
"""Integration tests for A2A streaming handler."""
|
||||
|
||||
@pytest.mark.vcr()
|
||||
@pytest.mark.asyncio
|
||||
async def test_streaming_completes_task(
|
||||
self,
|
||||
a2a_client,
|
||||
test_message: Message,
|
||||
agent_card: AgentCard,
|
||||
) -> None:
|
||||
"""Test that streaming handler completes a task successfully."""
|
||||
new_messages: list[Message] = []
|
||||
|
||||
result = await StreamingHandler.execute(
|
||||
client=a2a_client,
|
||||
message=test_message,
|
||||
new_messages=new_messages,
|
||||
agent_card=agent_card,
|
||||
endpoint=agent_card.url,
|
||||
)
|
||||
|
||||
assert isinstance(result, dict)
|
||||
assert result["status"] == TaskState.completed
|
||||
assert result.get("result") is not None
|
||||
|
||||
|
||||
class TestA2ATaskOperations:
|
||||
"""Integration tests for task operations."""
|
||||
|
||||
@pytest.mark.vcr()
|
||||
@pytest.mark.asyncio
|
||||
async def test_send_message_and_get_response(
|
||||
self,
|
||||
a2a_client,
|
||||
test_message: Message,
|
||||
) -> None:
|
||||
"""Test sending a message and getting a response."""
|
||||
from a2a.types import Task
|
||||
|
||||
final_task: Task | None = None
|
||||
async for event in a2a_client.send_message(test_message):
|
||||
if isinstance(event, tuple) and len(event) >= 1:
|
||||
task, _ = event
|
||||
if isinstance(task, Task):
|
||||
final_task = task
|
||||
|
||||
assert final_task is not None
|
||||
assert final_task.id is not None
|
||||
assert final_task.status is not None
|
||||
assert final_task.status.state == TaskState.completed
|
||||
|
||||
|
||||
class TestA2APushNotificationHandler:
|
||||
"""Tests for push notification handler.
|
||||
|
||||
These tests use mocks for the result store since webhook callbacks
|
||||
are incoming requests that can't be recorded with VCR.
|
||||
"""
|
||||
|
||||
@pytest.fixture
|
||||
def mock_agent_card(self) -> AgentCard:
|
||||
"""Create a minimal valid agent card for testing."""
|
||||
from a2a.types import AgentCapabilities
|
||||
|
||||
return AgentCard(
|
||||
name="Test Agent",
|
||||
description="Test agent for push notification tests",
|
||||
url="http://localhost:9999",
|
||||
version="1.0.0",
|
||||
capabilities=AgentCapabilities(streaming=True, push_notifications=True),
|
||||
default_input_modes=["text"],
|
||||
default_output_modes=["text"],
|
||||
skills=[],
|
||||
)
|
||||
|
||||
@pytest.fixture
|
||||
def mock_task(self) -> Task:
|
||||
"""Create a minimal valid task for testing."""
|
||||
from a2a.types import Task, TaskStatus
|
||||
|
||||
return Task(
|
||||
id="task-123",
|
||||
context_id="ctx-123",
|
||||
status=TaskStatus(state=TaskState.working),
|
||||
)
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_push_handler_waits_for_result(
|
||||
self,
|
||||
mock_agent_card: AgentCard,
|
||||
mock_task,
|
||||
) -> None:
|
||||
"""Test that push handler waits for result from store."""
|
||||
from unittest.mock import AsyncMock, MagicMock
|
||||
|
||||
from a2a.types import Task, TaskStatus
|
||||
from crewai.a2a.updates.push_notifications.config import PushNotificationConfig
|
||||
from crewai.a2a.updates.push_notifications.handler import (
|
||||
PushNotificationHandler,
|
||||
)
|
||||
from pydantic import AnyHttpUrl
|
||||
|
||||
completed_task = Task(
|
||||
id="task-123",
|
||||
context_id="ctx-123",
|
||||
status=TaskStatus(state=TaskState.completed),
|
||||
history=[],
|
||||
)
|
||||
|
||||
mock_store = MagicMock()
|
||||
mock_store.wait_for_result = AsyncMock(return_value=completed_task)
|
||||
|
||||
async def mock_send_message(*args, **kwargs):
|
||||
yield (mock_task, None)
|
||||
|
||||
mock_client = MagicMock()
|
||||
mock_client.send_message = mock_send_message
|
||||
|
||||
config = PushNotificationConfig(
|
||||
url=AnyHttpUrl("http://localhost:8080/a2a/callback"),
|
||||
token="secret-token",
|
||||
result_store=mock_store,
|
||||
)
|
||||
|
||||
test_msg = Message(
|
||||
role=Role.user,
|
||||
parts=[Part(root=TextPart(text="What is 2+2?"))],
|
||||
message_id="msg-001",
|
||||
)
|
||||
|
||||
new_messages: list[Message] = []
|
||||
|
||||
result = await PushNotificationHandler.execute(
|
||||
client=mock_client,
|
||||
message=test_msg,
|
||||
new_messages=new_messages,
|
||||
agent_card=mock_agent_card,
|
||||
config=config,
|
||||
result_store=mock_store,
|
||||
polling_timeout=30.0,
|
||||
polling_interval=1.0,
|
||||
endpoint=mock_agent_card.url,
|
||||
)
|
||||
|
||||
mock_store.wait_for_result.assert_called_once_with(
|
||||
task_id="task-123",
|
||||
timeout=30.0,
|
||||
poll_interval=1.0,
|
||||
)
|
||||
|
||||
assert result["status"] == TaskState.completed
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_push_handler_returns_failure_on_timeout(
|
||||
self,
|
||||
mock_agent_card: AgentCard,
|
||||
) -> None:
|
||||
"""Test that push handler returns failure when result store times out."""
|
||||
from unittest.mock import AsyncMock, MagicMock
|
||||
|
||||
from a2a.types import Task, TaskStatus
|
||||
from crewai.a2a.updates.push_notifications.config import PushNotificationConfig
|
||||
from crewai.a2a.updates.push_notifications.handler import (
|
||||
PushNotificationHandler,
|
||||
)
|
||||
from pydantic import AnyHttpUrl
|
||||
|
||||
mock_store = MagicMock()
|
||||
mock_store.wait_for_result = AsyncMock(return_value=None)
|
||||
|
||||
working_task = Task(
|
||||
id="task-456",
|
||||
context_id="ctx-456",
|
||||
status=TaskStatus(state=TaskState.working),
|
||||
)
|
||||
|
||||
async def mock_send_message(*args, **kwargs):
|
||||
yield (working_task, None)
|
||||
|
||||
mock_client = MagicMock()
|
||||
mock_client.send_message = mock_send_message
|
||||
|
||||
config = PushNotificationConfig(
|
||||
url=AnyHttpUrl("http://localhost:8080/a2a/callback"),
|
||||
token="token",
|
||||
result_store=mock_store,
|
||||
)
|
||||
|
||||
test_msg = Message(
|
||||
role=Role.user,
|
||||
parts=[Part(root=TextPart(text="test"))],
|
||||
message_id="msg-002",
|
||||
)
|
||||
|
||||
new_messages: list[Message] = []
|
||||
|
||||
result = await PushNotificationHandler.execute(
|
||||
client=mock_client,
|
||||
message=test_msg,
|
||||
new_messages=new_messages,
|
||||
agent_card=mock_agent_card,
|
||||
config=config,
|
||||
result_store=mock_store,
|
||||
polling_timeout=5.0,
|
||||
polling_interval=0.5,
|
||||
endpoint=mock_agent_card.url,
|
||||
)
|
||||
|
||||
assert result["status"] == TaskState.failed
|
||||
assert "timeout" in result.get("error", "").lower()
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_push_handler_requires_config(
|
||||
self,
|
||||
mock_agent_card: AgentCard,
|
||||
) -> None:
|
||||
"""Test that push handler fails gracefully without config."""
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
from crewai.a2a.updates.push_notifications.handler import (
|
||||
PushNotificationHandler,
|
||||
)
|
||||
|
||||
mock_client = MagicMock()
|
||||
|
||||
test_msg = Message(
|
||||
role=Role.user,
|
||||
parts=[Part(root=TextPart(text="test"))],
|
||||
message_id="msg-003",
|
||||
)
|
||||
|
||||
new_messages: list[Message] = []
|
||||
|
||||
result = await PushNotificationHandler.execute(
|
||||
client=mock_client,
|
||||
message=test_msg,
|
||||
new_messages=new_messages,
|
||||
agent_card=mock_agent_card,
|
||||
endpoint=mock_agent_card.url,
|
||||
)
|
||||
|
||||
assert result["status"] == TaskState.failed
|
||||
assert "config" in result.get("error", "").lower()
|
||||
324
lib/crewai-a2a/tests/a2a/utils/test_agent_card.py
Normal file
324
lib/crewai-a2a/tests/a2a/utils/test_agent_card.py
Normal file
@@ -0,0 +1,324 @@
|
||||
"""Tests for A2A agent card utilities."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from a2a.types import AgentCard, AgentSkill
|
||||
from crewai import Agent
|
||||
from crewai.a2a.config import A2AClientConfig, A2AServerConfig
|
||||
from crewai.a2a.utils.agent_card import inject_a2a_server_methods
|
||||
|
||||
|
||||
class TestInjectA2AServerMethods:
|
||||
"""Tests for inject_a2a_server_methods function."""
|
||||
|
||||
def test_agent_with_server_config_gets_to_agent_card_method(self) -> None:
|
||||
"""Agent with A2AServerConfig should have to_agent_card method injected."""
|
||||
agent = Agent(
|
||||
role="Test Agent",
|
||||
goal="Test goal",
|
||||
backstory="Test backstory",
|
||||
a2a=A2AServerConfig(),
|
||||
)
|
||||
|
||||
assert hasattr(agent, "to_agent_card")
|
||||
assert callable(agent.to_agent_card)
|
||||
|
||||
def test_agent_without_server_config_no_injection(self) -> None:
|
||||
"""Agent without A2AServerConfig should not get to_agent_card method."""
|
||||
agent = Agent(
|
||||
role="Test Agent",
|
||||
goal="Test goal",
|
||||
backstory="Test backstory",
|
||||
a2a=A2AClientConfig(endpoint="http://example.com"),
|
||||
)
|
||||
|
||||
assert not hasattr(agent, "to_agent_card")
|
||||
|
||||
def test_agent_without_a2a_no_injection(self) -> None:
|
||||
"""Agent without any a2a config should not get to_agent_card method."""
|
||||
agent = Agent(
|
||||
role="Test Agent",
|
||||
goal="Test goal",
|
||||
backstory="Test backstory",
|
||||
)
|
||||
|
||||
assert not hasattr(agent, "to_agent_card")
|
||||
|
||||
def test_agent_with_mixed_configs_gets_injection(self) -> None:
|
||||
"""Agent with list containing A2AServerConfig should get to_agent_card."""
|
||||
agent = Agent(
|
||||
role="Test Agent",
|
||||
goal="Test goal",
|
||||
backstory="Test backstory",
|
||||
a2a=[
|
||||
A2AClientConfig(endpoint="http://example.com"),
|
||||
A2AServerConfig(name="My Agent"),
|
||||
],
|
||||
)
|
||||
|
||||
assert hasattr(agent, "to_agent_card")
|
||||
assert callable(agent.to_agent_card)
|
||||
|
||||
def test_manual_injection_on_plain_agent(self) -> None:
|
||||
"""inject_a2a_server_methods should work when called manually."""
|
||||
agent = Agent(
|
||||
role="Test Agent",
|
||||
goal="Test goal",
|
||||
backstory="Test backstory",
|
||||
)
|
||||
# Manually set server config and inject
|
||||
object.__setattr__(agent, "a2a", A2AServerConfig())
|
||||
inject_a2a_server_methods(agent)
|
||||
|
||||
assert hasattr(agent, "to_agent_card")
|
||||
assert callable(agent.to_agent_card)
|
||||
|
||||
|
||||
class TestToAgentCard:
|
||||
"""Tests for the injected to_agent_card method."""
|
||||
|
||||
def test_returns_agent_card(self) -> None:
|
||||
"""to_agent_card should return an AgentCard instance."""
|
||||
agent = Agent(
|
||||
role="Test Agent",
|
||||
goal="Test goal",
|
||||
backstory="Test backstory",
|
||||
a2a=A2AServerConfig(),
|
||||
)
|
||||
|
||||
card = agent.to_agent_card("http://localhost:8000")
|
||||
|
||||
assert isinstance(card, AgentCard)
|
||||
|
||||
def test_uses_agent_role_as_name(self) -> None:
|
||||
"""AgentCard name should default to agent role."""
|
||||
agent = Agent(
|
||||
role="Data Analyst",
|
||||
goal="Analyze data",
|
||||
backstory="Expert analyst",
|
||||
a2a=A2AServerConfig(),
|
||||
)
|
||||
|
||||
card = agent.to_agent_card("http://localhost:8000")
|
||||
|
||||
assert card.name == "Data Analyst"
|
||||
|
||||
def test_uses_server_config_name(self) -> None:
|
||||
"""AgentCard name should prefer A2AServerConfig.name over role."""
|
||||
agent = Agent(
|
||||
role="Data Analyst",
|
||||
goal="Analyze data",
|
||||
backstory="Expert analyst",
|
||||
a2a=A2AServerConfig(name="Custom Agent Name"),
|
||||
)
|
||||
|
||||
card = agent.to_agent_card("http://localhost:8000")
|
||||
|
||||
assert card.name == "Custom Agent Name"
|
||||
|
||||
def test_uses_goal_as_description(self) -> None:
|
||||
"""AgentCard description should include agent goal."""
|
||||
agent = Agent(
|
||||
role="Test Agent",
|
||||
goal="Accomplish important tasks",
|
||||
backstory="Has extensive experience",
|
||||
a2a=A2AServerConfig(),
|
||||
)
|
||||
|
||||
card = agent.to_agent_card("http://localhost:8000")
|
||||
|
||||
assert "Accomplish important tasks" in card.description
|
||||
|
||||
def test_uses_server_config_description(self) -> None:
|
||||
"""AgentCard description should prefer A2AServerConfig.description."""
|
||||
agent = Agent(
|
||||
role="Test Agent",
|
||||
goal="Accomplish important tasks",
|
||||
backstory="Has extensive experience",
|
||||
a2a=A2AServerConfig(description="Custom description"),
|
||||
)
|
||||
|
||||
card = agent.to_agent_card("http://localhost:8000")
|
||||
|
||||
assert card.description == "Custom description"
|
||||
|
||||
def test_uses_provided_url(self) -> None:
|
||||
"""AgentCard url should use the provided URL."""
|
||||
agent = Agent(
|
||||
role="Test Agent",
|
||||
goal="Test goal",
|
||||
backstory="Test backstory",
|
||||
a2a=A2AServerConfig(),
|
||||
)
|
||||
|
||||
card = agent.to_agent_card("http://my-server.com:9000")
|
||||
|
||||
assert card.url == "http://my-server.com:9000"
|
||||
|
||||
def test_uses_server_config_url(self) -> None:
|
||||
"""AgentCard url should prefer A2AServerConfig.url over provided URL."""
|
||||
agent = Agent(
|
||||
role="Test Agent",
|
||||
goal="Test goal",
|
||||
backstory="Test backstory",
|
||||
a2a=A2AServerConfig(url="http://configured-url.com"),
|
||||
)
|
||||
|
||||
card = agent.to_agent_card("http://fallback-url.com")
|
||||
|
||||
assert card.url == "http://configured-url.com/"
|
||||
|
||||
def test_generates_default_skill(self) -> None:
|
||||
"""AgentCard should have at least one skill based on agent role."""
|
||||
agent = Agent(
|
||||
role="Research Assistant",
|
||||
goal="Help with research",
|
||||
backstory="Skilled researcher",
|
||||
a2a=A2AServerConfig(),
|
||||
)
|
||||
|
||||
card = agent.to_agent_card("http://localhost:8000")
|
||||
|
||||
assert len(card.skills) >= 1
|
||||
skill = card.skills[0]
|
||||
assert skill.name == "Research Assistant"
|
||||
assert skill.description == "Help with research"
|
||||
|
||||
def test_uses_server_config_skills(self) -> None:
|
||||
"""AgentCard skills should prefer A2AServerConfig.skills."""
|
||||
custom_skill = AgentSkill(
|
||||
id="custom-skill",
|
||||
name="Custom Skill",
|
||||
description="A custom skill",
|
||||
tags=["custom"],
|
||||
)
|
||||
agent = Agent(
|
||||
role="Test Agent",
|
||||
goal="Test goal",
|
||||
backstory="Test backstory",
|
||||
a2a=A2AServerConfig(skills=[custom_skill]),
|
||||
)
|
||||
|
||||
card = agent.to_agent_card("http://localhost:8000")
|
||||
|
||||
assert len(card.skills) == 1
|
||||
assert card.skills[0].id == "custom-skill"
|
||||
assert card.skills[0].name == "Custom Skill"
|
||||
|
||||
def test_includes_custom_version(self) -> None:
|
||||
"""AgentCard should include version from A2AServerConfig."""
|
||||
agent = Agent(
|
||||
role="Test Agent",
|
||||
goal="Test goal",
|
||||
backstory="Test backstory",
|
||||
a2a=A2AServerConfig(version="2.0.0"),
|
||||
)
|
||||
|
||||
card = agent.to_agent_card("http://localhost:8000")
|
||||
|
||||
assert card.version == "2.0.0"
|
||||
|
||||
def test_default_version(self) -> None:
|
||||
"""AgentCard should have default version 1.0.0."""
|
||||
agent = Agent(
|
||||
role="Test Agent",
|
||||
goal="Test goal",
|
||||
backstory="Test backstory",
|
||||
a2a=A2AServerConfig(),
|
||||
)
|
||||
|
||||
card = agent.to_agent_card("http://localhost:8000")
|
||||
|
||||
assert card.version == "1.0.0"
|
||||
|
||||
|
||||
class TestAgentCardJsonStructure:
|
||||
"""Tests for the JSON structure of AgentCard."""
|
||||
|
||||
def test_json_has_required_fields(self) -> None:
|
||||
"""AgentCard JSON should contain all required A2A protocol fields."""
|
||||
agent = Agent(
|
||||
role="Test Agent",
|
||||
goal="Test goal",
|
||||
backstory="Test backstory",
|
||||
a2a=A2AServerConfig(),
|
||||
)
|
||||
|
||||
card = agent.to_agent_card("http://localhost:8000")
|
||||
json_data = card.model_dump()
|
||||
|
||||
assert "name" in json_data
|
||||
assert "description" in json_data
|
||||
assert "url" in json_data
|
||||
assert "version" in json_data
|
||||
assert "skills" in json_data
|
||||
assert "capabilities" in json_data
|
||||
assert "defaultInputModes" in json_data
|
||||
assert "defaultOutputModes" in json_data
|
||||
|
||||
def test_json_skills_structure(self) -> None:
|
||||
"""Each skill in JSON should have required fields."""
|
||||
agent = Agent(
|
||||
role="Test Agent",
|
||||
goal="Test goal",
|
||||
backstory="Test backstory",
|
||||
a2a=A2AServerConfig(),
|
||||
)
|
||||
|
||||
card = agent.to_agent_card("http://localhost:8000")
|
||||
json_data = card.model_dump()
|
||||
|
||||
assert len(json_data["skills"]) >= 1
|
||||
skill = json_data["skills"][0]
|
||||
assert "id" in skill
|
||||
assert "name" in skill
|
||||
assert "description" in skill
|
||||
assert "tags" in skill
|
||||
|
||||
def test_json_capabilities_structure(self) -> None:
|
||||
"""Capabilities in JSON should have expected fields."""
|
||||
agent = Agent(
|
||||
role="Test Agent",
|
||||
goal="Test goal",
|
||||
backstory="Test backstory",
|
||||
a2a=A2AServerConfig(),
|
||||
)
|
||||
|
||||
card = agent.to_agent_card("http://localhost:8000")
|
||||
json_data = card.model_dump()
|
||||
|
||||
capabilities = json_data["capabilities"]
|
||||
assert "streaming" in capabilities
|
||||
assert "pushNotifications" in capabilities
|
||||
|
||||
def test_json_serializable(self) -> None:
|
||||
"""AgentCard should be JSON serializable."""
|
||||
agent = Agent(
|
||||
role="Test Agent",
|
||||
goal="Test goal",
|
||||
backstory="Test backstory",
|
||||
a2a=A2AServerConfig(),
|
||||
)
|
||||
|
||||
card = agent.to_agent_card("http://localhost:8000")
|
||||
json_str = card.model_dump_json()
|
||||
|
||||
assert isinstance(json_str, str)
|
||||
assert "Test Agent" in json_str
|
||||
assert "http://localhost:8000" in json_str
|
||||
|
||||
def test_json_excludes_none_values(self) -> None:
|
||||
"""AgentCard JSON with exclude_none should omit None fields."""
|
||||
agent = Agent(
|
||||
role="Test Agent",
|
||||
goal="Test goal",
|
||||
backstory="Test backstory",
|
||||
a2a=A2AServerConfig(),
|
||||
)
|
||||
|
||||
card = agent.to_agent_card("http://localhost:8000")
|
||||
json_data = card.model_dump(exclude_none=True)
|
||||
|
||||
assert "provider" not in json_data
|
||||
assert "documentationUrl" not in json_data
|
||||
assert "iconUrl" not in json_data
|
||||
379
lib/crewai-a2a/tests/a2a/utils/test_task.py
Normal file
379
lib/crewai-a2a/tests/a2a/utils/test_task.py
Normal file
@@ -0,0 +1,379 @@
|
||||
"""Tests for A2A task utilities."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from typing import Any
|
||||
from unittest.mock import AsyncMock, MagicMock, patch
|
||||
|
||||
from a2a.server.agent_execution import RequestContext
|
||||
from a2a.server.events import EventQueue
|
||||
from a2a.types import Message, Task as A2ATask, TaskState, TaskStatus
|
||||
from crewai.a2a.utils.task import cancel, cancellable, execute
|
||||
import pytest
|
||||
import pytest_asyncio
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_agent() -> MagicMock:
|
||||
"""Create a mock CrewAI agent."""
|
||||
agent = MagicMock()
|
||||
agent.role = "Test Agent"
|
||||
agent.tools = []
|
||||
agent.aexecute_task = AsyncMock(return_value="Task completed successfully")
|
||||
return agent
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_task(mock_context: MagicMock) -> MagicMock:
|
||||
"""Create a mock Task."""
|
||||
task = MagicMock()
|
||||
task.id = mock_context.task_id
|
||||
task.name = "Mock Task"
|
||||
task.description = "Mock task description"
|
||||
return task
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_context() -> MagicMock:
|
||||
"""Create a mock RequestContext."""
|
||||
context = MagicMock(spec=RequestContext)
|
||||
context.task_id = "test-task-123"
|
||||
context.context_id = "test-context-456"
|
||||
context.get_user_input.return_value = "Test user message"
|
||||
context.message = MagicMock(spec=Message)
|
||||
context.message.parts = []
|
||||
context.current_task = None
|
||||
return context
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_event_queue() -> AsyncMock:
|
||||
"""Create a mock EventQueue."""
|
||||
queue = AsyncMock(spec=EventQueue)
|
||||
queue.enqueue_event = AsyncMock()
|
||||
return queue
|
||||
|
||||
|
||||
@pytest_asyncio.fixture(autouse=True)
|
||||
async def clear_cache(mock_context: MagicMock) -> None:
|
||||
"""Clear cancel flag from cache before each test."""
|
||||
from aiocache import caches
|
||||
|
||||
cache = caches.get("default")
|
||||
await cache.delete(f"cancel:{mock_context.task_id}")
|
||||
|
||||
|
||||
class TestCancellableDecorator:
|
||||
"""Tests for the cancellable decorator."""
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_executes_function_without_context(self) -> None:
|
||||
"""Function executes normally when no RequestContext is provided."""
|
||||
call_count = 0
|
||||
|
||||
@cancellable
|
||||
async def my_func(value: int) -> int:
|
||||
nonlocal call_count
|
||||
call_count += 1
|
||||
return value * 2
|
||||
|
||||
result = await my_func(5)
|
||||
|
||||
assert result == 10
|
||||
assert call_count == 1
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_executes_function_with_context(
|
||||
self, mock_context: MagicMock
|
||||
) -> None:
|
||||
"""Function executes normally with RequestContext when not cancelled."""
|
||||
|
||||
@cancellable
|
||||
async def my_func(context: RequestContext) -> str:
|
||||
await asyncio.sleep(0.01)
|
||||
return "completed"
|
||||
|
||||
result = await my_func(mock_context)
|
||||
|
||||
assert result == "completed"
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_cancellation_raises_cancelled_error(
|
||||
self, mock_context: MagicMock
|
||||
) -> None:
|
||||
"""Function raises CancelledError when cancel flag is set."""
|
||||
from aiocache import caches
|
||||
|
||||
cache = caches.get("default")
|
||||
|
||||
@cancellable
|
||||
async def slow_func(context: RequestContext) -> str:
|
||||
await asyncio.sleep(1.0)
|
||||
return "should not reach"
|
||||
|
||||
await cache.set(f"cancel:{mock_context.task_id}", True)
|
||||
|
||||
with pytest.raises(asyncio.CancelledError):
|
||||
await slow_func(mock_context)
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_cleanup_removes_cancel_flag(self, mock_context: MagicMock) -> None:
|
||||
"""Cancel flag is cleaned up after execution."""
|
||||
from aiocache import caches
|
||||
|
||||
cache = caches.get("default")
|
||||
|
||||
@cancellable
|
||||
async def quick_func(context: RequestContext) -> str:
|
||||
return "done"
|
||||
|
||||
await quick_func(mock_context)
|
||||
|
||||
flag = await cache.get(f"cancel:{mock_context.task_id}")
|
||||
assert flag is None
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_extracts_context_from_kwargs(self, mock_context: MagicMock) -> None:
|
||||
"""Context can be passed as keyword argument."""
|
||||
|
||||
@cancellable
|
||||
async def my_func(value: int, context: RequestContext | None = None) -> int:
|
||||
return value + 1
|
||||
|
||||
result = await my_func(10, context=mock_context)
|
||||
|
||||
assert result == 11
|
||||
|
||||
|
||||
class TestExecute:
|
||||
"""Tests for the execute function."""
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_successful_execution(
|
||||
self,
|
||||
mock_agent: MagicMock,
|
||||
mock_context: MagicMock,
|
||||
mock_event_queue: AsyncMock,
|
||||
mock_task: MagicMock,
|
||||
) -> None:
|
||||
"""Execute completes successfully and enqueues completed task."""
|
||||
with (
|
||||
patch("crewai.a2a.utils.task.Task", return_value=mock_task),
|
||||
patch("crewai.a2a.utils.task.crewai_event_bus") as mock_bus,
|
||||
):
|
||||
await execute(mock_agent, mock_context, mock_event_queue)
|
||||
|
||||
mock_agent.aexecute_task.assert_called_once()
|
||||
mock_event_queue.enqueue_event.assert_called_once()
|
||||
assert mock_bus.emit.call_count == 2
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_emits_started_event(
|
||||
self,
|
||||
mock_agent: MagicMock,
|
||||
mock_context: MagicMock,
|
||||
mock_event_queue: AsyncMock,
|
||||
mock_task: MagicMock,
|
||||
) -> None:
|
||||
"""Execute emits A2AServerTaskStartedEvent."""
|
||||
with (
|
||||
patch("crewai.a2a.utils.task.Task", return_value=mock_task),
|
||||
patch("crewai.a2a.utils.task.crewai_event_bus") as mock_bus,
|
||||
):
|
||||
await execute(mock_agent, mock_context, mock_event_queue)
|
||||
|
||||
first_call = mock_bus.emit.call_args_list[0]
|
||||
event = first_call[0][1]
|
||||
|
||||
assert event.type == "a2a_server_task_started"
|
||||
assert event.task_id == mock_context.task_id
|
||||
assert event.context_id == mock_context.context_id
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_emits_completed_event(
|
||||
self,
|
||||
mock_agent: MagicMock,
|
||||
mock_context: MagicMock,
|
||||
mock_event_queue: AsyncMock,
|
||||
mock_task: MagicMock,
|
||||
) -> None:
|
||||
"""Execute emits A2AServerTaskCompletedEvent on success."""
|
||||
with (
|
||||
patch("crewai.a2a.utils.task.Task", return_value=mock_task),
|
||||
patch("crewai.a2a.utils.task.crewai_event_bus") as mock_bus,
|
||||
):
|
||||
await execute(mock_agent, mock_context, mock_event_queue)
|
||||
|
||||
second_call = mock_bus.emit.call_args_list[1]
|
||||
event = second_call[0][1]
|
||||
|
||||
assert event.type == "a2a_server_task_completed"
|
||||
assert event.task_id == mock_context.task_id
|
||||
assert event.result == "Task completed successfully"
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_emits_failed_event_on_exception(
|
||||
self,
|
||||
mock_agent: MagicMock,
|
||||
mock_context: MagicMock,
|
||||
mock_event_queue: AsyncMock,
|
||||
mock_task: MagicMock,
|
||||
) -> None:
|
||||
"""Execute emits A2AServerTaskFailedEvent on exception."""
|
||||
mock_agent.aexecute_task = AsyncMock(side_effect=ValueError("Test error"))
|
||||
|
||||
with (
|
||||
patch("crewai.a2a.utils.task.Task", return_value=mock_task),
|
||||
patch("crewai.a2a.utils.task.crewai_event_bus") as mock_bus,
|
||||
):
|
||||
with pytest.raises(Exception):
|
||||
await execute(mock_agent, mock_context, mock_event_queue)
|
||||
|
||||
failed_call = mock_bus.emit.call_args_list[1]
|
||||
event = failed_call[0][1]
|
||||
|
||||
assert event.type == "a2a_server_task_failed"
|
||||
assert "Test error" in event.error
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_emits_canceled_event_on_cancellation(
|
||||
self,
|
||||
mock_agent: MagicMock,
|
||||
mock_context: MagicMock,
|
||||
mock_event_queue: AsyncMock,
|
||||
mock_task: MagicMock,
|
||||
) -> None:
|
||||
"""Execute emits A2AServerTaskCanceledEvent on CancelledError."""
|
||||
mock_agent.aexecute_task = AsyncMock(side_effect=asyncio.CancelledError())
|
||||
|
||||
with (
|
||||
patch("crewai.a2a.utils.task.Task", return_value=mock_task),
|
||||
patch("crewai.a2a.utils.task.crewai_event_bus") as mock_bus,
|
||||
):
|
||||
with pytest.raises(asyncio.CancelledError):
|
||||
await execute(mock_agent, mock_context, mock_event_queue)
|
||||
|
||||
canceled_call = mock_bus.emit.call_args_list[1]
|
||||
event = canceled_call[0][1]
|
||||
|
||||
assert event.type == "a2a_server_task_canceled"
|
||||
assert event.task_id == mock_context.task_id
|
||||
|
||||
|
||||
class TestCancel:
|
||||
"""Tests for the cancel function."""
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_sets_cancel_flag_in_cache(
|
||||
self,
|
||||
mock_context: MagicMock,
|
||||
mock_event_queue: AsyncMock,
|
||||
) -> None:
|
||||
"""Cancel sets the cancel flag in cache."""
|
||||
from aiocache import caches
|
||||
|
||||
cache = caches.get("default")
|
||||
|
||||
await cancel(mock_context, mock_event_queue)
|
||||
|
||||
flag = await cache.get(f"cancel:{mock_context.task_id}")
|
||||
assert flag is True
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_enqueues_task_status_update_event(
|
||||
self,
|
||||
mock_context: MagicMock,
|
||||
mock_event_queue: AsyncMock,
|
||||
) -> None:
|
||||
"""Cancel enqueues TaskStatusUpdateEvent with canceled state."""
|
||||
await cancel(mock_context, mock_event_queue)
|
||||
|
||||
mock_event_queue.enqueue_event.assert_called_once()
|
||||
event = mock_event_queue.enqueue_event.call_args[0][0]
|
||||
|
||||
assert event.task_id == mock_context.task_id
|
||||
assert event.context_id == mock_context.context_id
|
||||
assert event.status.state == TaskState.canceled
|
||||
assert event.final is True
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_returns_none_when_no_current_task(
|
||||
self,
|
||||
mock_context: MagicMock,
|
||||
mock_event_queue: AsyncMock,
|
||||
) -> None:
|
||||
"""Cancel returns None when context has no current_task."""
|
||||
mock_context.current_task = None
|
||||
|
||||
result = await cancel(mock_context, mock_event_queue)
|
||||
|
||||
assert result is None
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_returns_updated_task_when_current_task_exists(
|
||||
self,
|
||||
mock_context: MagicMock,
|
||||
mock_event_queue: AsyncMock,
|
||||
) -> None:
|
||||
"""Cancel returns updated task when context has current_task."""
|
||||
current_task = MagicMock(spec=A2ATask)
|
||||
current_task.status = TaskStatus(state=TaskState.working)
|
||||
mock_context.current_task = current_task
|
||||
|
||||
result = await cancel(mock_context, mock_event_queue)
|
||||
|
||||
assert result is current_task
|
||||
assert result.status.state == TaskState.canceled
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_cleanup_after_cancel(
|
||||
self,
|
||||
mock_context: MagicMock,
|
||||
mock_event_queue: AsyncMock,
|
||||
) -> None:
|
||||
"""Cancel flag persists for cancellable decorator to detect."""
|
||||
from aiocache import caches
|
||||
|
||||
cache = caches.get("default")
|
||||
|
||||
await cancel(mock_context, mock_event_queue)
|
||||
|
||||
flag = await cache.get(f"cancel:{mock_context.task_id}")
|
||||
assert flag is True
|
||||
|
||||
await cache.delete(f"cancel:{mock_context.task_id}")
|
||||
|
||||
|
||||
class TestExecuteAndCancelIntegration:
|
||||
"""Integration tests for execute and cancel working together."""
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_cancel_stops_running_execute(
|
||||
self,
|
||||
mock_agent: MagicMock,
|
||||
mock_context: MagicMock,
|
||||
mock_event_queue: AsyncMock,
|
||||
mock_task: MagicMock,
|
||||
) -> None:
|
||||
"""Calling cancel stops a running execute."""
|
||||
|
||||
async def slow_task(**kwargs: Any) -> str:
|
||||
await asyncio.sleep(2.0)
|
||||
return "should not complete"
|
||||
|
||||
mock_agent.aexecute_task = slow_task
|
||||
|
||||
with (
|
||||
patch("crewai.a2a.utils.task.Task", return_value=mock_task),
|
||||
patch("crewai.a2a.utils.task.crewai_event_bus"),
|
||||
):
|
||||
execute_task = asyncio.create_task(
|
||||
execute(mock_agent, mock_context, mock_event_queue)
|
||||
)
|
||||
|
||||
await asyncio.sleep(0.1)
|
||||
await cancel(mock_context, mock_event_queue)
|
||||
|
||||
with pytest.raises(asyncio.CancelledError):
|
||||
await execute_task
|
||||
154
lib/crewai-a2a/tests/agents/test_a2a_trust_completion_status.py
Normal file
154
lib/crewai-a2a/tests/agents/test_a2a_trust_completion_status.py
Normal file
@@ -0,0 +1,154 @@
|
||||
"""Test trust_remote_completion_status flag in A2A wrapper."""
|
||||
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
from crewai.a2a.config import A2AConfig
|
||||
import pytest
|
||||
|
||||
|
||||
try:
|
||||
from a2a.types import Message, Role
|
||||
|
||||
A2A_SDK_INSTALLED = True
|
||||
except ImportError:
|
||||
A2A_SDK_INSTALLED = False
|
||||
|
||||
|
||||
def _create_mock_agent_card(name: str = "Test", url: str = "http://test-endpoint.com/"):
|
||||
"""Create a mock agent card with proper model_dump behavior."""
|
||||
mock_card = MagicMock()
|
||||
mock_card.name = name
|
||||
mock_card.url = url
|
||||
mock_card.model_dump.return_value = {"name": name, "url": url}
|
||||
mock_card.model_dump_json.return_value = f'{{"name": "{name}", "url": "{url}"}}'
|
||||
return mock_card
|
||||
|
||||
|
||||
@pytest.mark.skipif(not A2A_SDK_INSTALLED, reason="Requires a2a-sdk to be installed")
|
||||
def test_trust_remote_completion_status_true_returns_directly():
|
||||
"""When trust_remote_completion_status=True and A2A returns completed, return result directly."""
|
||||
from crewai import Agent, Task
|
||||
from crewai.a2a.wrapper import _delegate_to_a2a
|
||||
|
||||
a2a_config = A2AConfig(
|
||||
endpoint="http://test-endpoint.com",
|
||||
trust_remote_completion_status=True,
|
||||
)
|
||||
|
||||
agent = Agent(
|
||||
role="test manager",
|
||||
goal="coordinate",
|
||||
backstory="test",
|
||||
a2a=a2a_config,
|
||||
)
|
||||
|
||||
task = Task(description="test", expected_output="test", agent=agent)
|
||||
|
||||
class MockResponse:
|
||||
is_a2a = True
|
||||
message = "Please help"
|
||||
a2a_ids = ["http://test-endpoint.com/"]
|
||||
|
||||
with (
|
||||
patch("crewai.a2a.wrapper.execute_a2a_delegation") as mock_execute,
|
||||
patch("crewai.a2a.wrapper._fetch_agent_cards_concurrently") as mock_fetch,
|
||||
):
|
||||
mock_card = _create_mock_agent_card()
|
||||
mock_fetch.return_value = ({"http://test-endpoint.com/": mock_card}, {})
|
||||
|
||||
# A2A returns completed
|
||||
mock_execute.return_value = {
|
||||
"status": "completed",
|
||||
"result": "Done by remote",
|
||||
"history": [],
|
||||
}
|
||||
|
||||
# This should return directly without checking LLM response
|
||||
result = _delegate_to_a2a(
|
||||
self=agent,
|
||||
agent_response=MockResponse(),
|
||||
task=task,
|
||||
original_fn=lambda *args, **kwargs: "fallback",
|
||||
context=None,
|
||||
tools=None,
|
||||
agent_cards={"http://test-endpoint.com/": mock_card},
|
||||
original_task_description="test",
|
||||
)
|
||||
|
||||
assert result == "Done by remote"
|
||||
assert mock_execute.call_count == 1
|
||||
|
||||
|
||||
@pytest.mark.skipif(not A2A_SDK_INSTALLED, reason="Requires a2a-sdk to be installed")
|
||||
def test_trust_remote_completion_status_false_continues_conversation():
|
||||
"""When trust_remote_completion_status=False and A2A returns completed, ask server agent."""
|
||||
from crewai import Agent, Task
|
||||
from crewai.a2a.wrapper import _delegate_to_a2a
|
||||
|
||||
a2a_config = A2AConfig(
|
||||
endpoint="http://test-endpoint.com",
|
||||
trust_remote_completion_status=False,
|
||||
)
|
||||
|
||||
agent = Agent(
|
||||
role="test manager",
|
||||
goal="coordinate",
|
||||
backstory="test",
|
||||
a2a=a2a_config,
|
||||
)
|
||||
|
||||
task = Task(description="test", expected_output="test", agent=agent)
|
||||
|
||||
class MockResponse:
|
||||
is_a2a = True
|
||||
message = "Please help"
|
||||
a2a_ids = ["http://test-endpoint.com/"]
|
||||
|
||||
call_count = 0
|
||||
|
||||
def mock_original_fn(self, task, context, tools):
|
||||
nonlocal call_count
|
||||
call_count += 1
|
||||
if call_count == 1:
|
||||
# Server decides to finish
|
||||
return '{"is_a2a": false, "message": "Server final answer", "a2a_ids": []}'
|
||||
return "unexpected"
|
||||
|
||||
with (
|
||||
patch("crewai.a2a.wrapper.execute_a2a_delegation") as mock_execute,
|
||||
patch("crewai.a2a.wrapper._fetch_agent_cards_concurrently") as mock_fetch,
|
||||
):
|
||||
mock_card = _create_mock_agent_card()
|
||||
mock_fetch.return_value = ({"http://test-endpoint.com/": mock_card}, {})
|
||||
|
||||
# A2A returns completed
|
||||
mock_execute.return_value = {
|
||||
"status": "completed",
|
||||
"result": "Done by remote",
|
||||
"history": [],
|
||||
}
|
||||
|
||||
result = _delegate_to_a2a(
|
||||
self=agent,
|
||||
agent_response=MockResponse(),
|
||||
task=task,
|
||||
original_fn=mock_original_fn,
|
||||
context=None,
|
||||
tools=None,
|
||||
agent_cards={"http://test-endpoint.com/": mock_card},
|
||||
original_task_description="test",
|
||||
)
|
||||
|
||||
# Should call original_fn to get server response
|
||||
assert call_count >= 1
|
||||
assert result == "Server final answer"
|
||||
|
||||
|
||||
@pytest.mark.skipif(not A2A_SDK_INSTALLED, reason="Requires a2a-sdk to be installed")
|
||||
def test_default_trust_remote_completion_status_is_false():
|
||||
"""Verify that default value of trust_remote_completion_status is False."""
|
||||
a2a_config = A2AConfig(
|
||||
endpoint="http://test-endpoint.com",
|
||||
)
|
||||
|
||||
assert a2a_config.trust_remote_completion_status is False
|
||||
212
lib/crewai-a2a/tests/agents/test_agent_a2a_kickoff.py
Normal file
212
lib/crewai-a2a/tests/agents/test_agent_a2a_kickoff.py
Normal file
@@ -0,0 +1,212 @@
|
||||
"""Tests for Agent.kickoff() with A2A delegation using VCR cassettes."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
|
||||
from crewai import Agent
|
||||
from crewai.a2a.config import A2AClientConfig
|
||||
import pytest
|
||||
|
||||
|
||||
A2A_TEST_ENDPOINT = os.getenv(
|
||||
"A2A_TEST_ENDPOINT", "http://localhost:9999/.well-known/agent-card.json"
|
||||
)
|
||||
|
||||
|
||||
class TestAgentA2AKickoff:
|
||||
"""Tests for Agent.kickoff() with A2A delegation."""
|
||||
|
||||
@pytest.fixture
|
||||
def researcher_agent(self) -> Agent:
|
||||
"""Create a research agent with A2A configuration."""
|
||||
return Agent(
|
||||
role="Research Analyst",
|
||||
goal="Find and analyze information about AI developments",
|
||||
backstory="Expert researcher with access to remote specialized agents",
|
||||
verbose=True,
|
||||
a2a=[
|
||||
A2AClientConfig(
|
||||
endpoint=A2A_TEST_ENDPOINT,
|
||||
fail_fast=False,
|
||||
max_turns=3, # Limit turns for testing
|
||||
)
|
||||
],
|
||||
)
|
||||
|
||||
@pytest.mark.skip(reason="VCR cassette matching issue with agent card caching")
|
||||
@pytest.mark.vcr()
|
||||
def test_agent_kickoff_delegates_to_a2a(self, researcher_agent: Agent) -> None:
|
||||
"""Test that agent.kickoff() delegates to A2A server."""
|
||||
result = researcher_agent.kickoff(
|
||||
"Use the remote A2A agent to find out what the current time is in New York."
|
||||
)
|
||||
|
||||
assert result is not None
|
||||
assert result.raw is not None
|
||||
assert isinstance(result.raw, str)
|
||||
assert len(result.raw) > 0
|
||||
|
||||
@pytest.mark.skip(reason="VCR cassette matching issue with agent card caching")
|
||||
@pytest.mark.vcr()
|
||||
def test_agent_kickoff_with_calculator_skill(self, researcher_agent: Agent) -> None:
|
||||
"""Test that agent can delegate calculation to A2A server."""
|
||||
result = researcher_agent.kickoff(
|
||||
"Ask the remote A2A agent to calculate 25 times 17."
|
||||
)
|
||||
|
||||
assert result is not None
|
||||
assert result.raw is not None
|
||||
assert "425" in result.raw or "425.0" in result.raw
|
||||
|
||||
@pytest.mark.skip(reason="VCR cassette matching issue with agent card caching")
|
||||
@pytest.mark.vcr()
|
||||
def test_agent_kickoff_with_conversation_skill(
|
||||
self, researcher_agent: Agent
|
||||
) -> None:
|
||||
"""Test that agent can have a conversation with A2A server."""
|
||||
result = researcher_agent.kickoff(
|
||||
"Delegate to the remote A2A agent to explain quantum computing in simple terms."
|
||||
)
|
||||
|
||||
assert result is not None
|
||||
assert result.raw is not None
|
||||
assert isinstance(result.raw, str)
|
||||
assert len(result.raw) > 50 # Should have a meaningful response
|
||||
|
||||
@pytest.mark.vcr()
|
||||
def test_agent_kickoff_returns_lite_agent_output(
|
||||
self, researcher_agent: Agent
|
||||
) -> None:
|
||||
"""Test that kickoff returns LiteAgentOutput with correct structure."""
|
||||
from crewai.lite_agent_output import LiteAgentOutput
|
||||
|
||||
result = researcher_agent.kickoff(
|
||||
"Use the A2A agent to tell me what time it is."
|
||||
)
|
||||
|
||||
assert isinstance(result, LiteAgentOutput)
|
||||
assert result.raw is not None
|
||||
assert result.agent_role == "Research Analyst"
|
||||
assert isinstance(result.messages, list)
|
||||
|
||||
@pytest.mark.skip(reason="VCR cassette matching issue with agent card caching")
|
||||
@pytest.mark.vcr()
|
||||
def test_agent_kickoff_handles_multi_turn_conversation(
|
||||
self, researcher_agent: Agent
|
||||
) -> None:
|
||||
"""Test that agent handles multi-turn A2A conversations."""
|
||||
# This should trigger multiple turns of conversation
|
||||
result = researcher_agent.kickoff(
|
||||
"Ask the remote A2A agent about recent developments in AI agent communication protocols."
|
||||
)
|
||||
|
||||
assert result is not None
|
||||
assert result.raw is not None
|
||||
# The response should contain information about A2A or agent protocols
|
||||
assert isinstance(result.raw, str)
|
||||
|
||||
@pytest.mark.vcr()
|
||||
def test_agent_without_a2a_works_normally(self) -> None:
|
||||
"""Test that agent without A2A config works normally."""
|
||||
agent = Agent(
|
||||
role="Simple Assistant",
|
||||
goal="Help with basic tasks",
|
||||
backstory="A helpful assistant",
|
||||
verbose=False,
|
||||
)
|
||||
|
||||
# This should work without A2A delegation
|
||||
result = agent.kickoff("Say hello")
|
||||
|
||||
assert result is not None
|
||||
assert result.raw is not None
|
||||
|
||||
@pytest.mark.vcr()
|
||||
def test_agent_kickoff_with_failed_a2a_endpoint(self) -> None:
|
||||
"""Test that agent handles failed A2A connection gracefully."""
|
||||
agent = Agent(
|
||||
role="Research Analyst",
|
||||
goal="Find information",
|
||||
backstory="Expert researcher",
|
||||
verbose=False,
|
||||
a2a=[
|
||||
A2AClientConfig(
|
||||
endpoint="http://nonexistent:9999/.well-known/agent-card.json",
|
||||
fail_fast=False,
|
||||
)
|
||||
],
|
||||
)
|
||||
|
||||
# Should fallback to local LLM when A2A fails
|
||||
result = agent.kickoff("What is 2 + 2?")
|
||||
|
||||
assert result is not None
|
||||
assert result.raw is not None
|
||||
|
||||
@pytest.mark.skip(reason="VCR cassette matching issue with agent card caching")
|
||||
@pytest.mark.vcr()
|
||||
def test_agent_kickoff_with_list_messages(self, researcher_agent: Agent) -> None:
|
||||
"""Test that agent.kickoff() works with list of messages."""
|
||||
messages = [
|
||||
{
|
||||
"role": "user",
|
||||
"content": "Delegate to the A2A agent to find the current time in Tokyo.",
|
||||
},
|
||||
]
|
||||
|
||||
result = researcher_agent.kickoff(messages)
|
||||
|
||||
assert result is not None
|
||||
assert result.raw is not None
|
||||
assert isinstance(result.raw, str)
|
||||
|
||||
|
||||
class TestAgentA2AKickoffAsync:
|
||||
"""Tests for async Agent.kickoff_async() with A2A delegation."""
|
||||
|
||||
@pytest.fixture
|
||||
def researcher_agent(self) -> Agent:
|
||||
"""Create a research agent with A2A configuration."""
|
||||
return Agent(
|
||||
role="Research Analyst",
|
||||
goal="Find and analyze information",
|
||||
backstory="Expert researcher with access to remote agents",
|
||||
verbose=True,
|
||||
a2a=[
|
||||
A2AClientConfig(
|
||||
endpoint=A2A_TEST_ENDPOINT,
|
||||
fail_fast=False,
|
||||
max_turns=3,
|
||||
)
|
||||
],
|
||||
)
|
||||
|
||||
@pytest.mark.vcr()
|
||||
@pytest.mark.asyncio
|
||||
async def test_agent_kickoff_async_delegates_to_a2a(
|
||||
self, researcher_agent: Agent
|
||||
) -> None:
|
||||
"""Test that agent.kickoff_async() delegates to A2A server."""
|
||||
result = await researcher_agent.kickoff_async(
|
||||
"Use the remote A2A agent to calculate 10 plus 15."
|
||||
)
|
||||
|
||||
assert result is not None
|
||||
assert result.raw is not None
|
||||
assert isinstance(result.raw, str)
|
||||
|
||||
@pytest.mark.skip(reason="Test assertion needs fixing - not capturing final answer")
|
||||
@pytest.mark.vcr()
|
||||
@pytest.mark.asyncio
|
||||
async def test_agent_kickoff_async_with_calculator(
|
||||
self, researcher_agent: Agent
|
||||
) -> None:
|
||||
"""Test async delegation with calculator skill."""
|
||||
result = await researcher_agent.kickoff_async(
|
||||
"Ask the A2A agent to calculate 100 divided by 4."
|
||||
)
|
||||
|
||||
assert result is not None
|
||||
assert result.raw is not None
|
||||
assert "25" in result.raw or "25.0" in result.raw
|
||||
112
lib/crewai-a2a/tests/agents/test_agent_a2a_wrapping.py
Normal file
112
lib/crewai-a2a/tests/agents/test_agent_a2a_wrapping.py
Normal file
@@ -0,0 +1,112 @@
|
||||
"""Test A2A wrapper is only applied when a2a is passed to Agent."""
|
||||
|
||||
from crewai import Agent
|
||||
from crewai.a2a.config import A2AConfig
|
||||
import pytest
|
||||
|
||||
|
||||
try:
|
||||
import a2a
|
||||
|
||||
A2A_SDK_INSTALLED = True
|
||||
except ImportError:
|
||||
A2A_SDK_INSTALLED = False
|
||||
|
||||
|
||||
def test_agent_without_a2a_has_no_wrapper():
|
||||
"""Verify that agents without a2a don't get the wrapper applied."""
|
||||
agent = Agent(
|
||||
role="test role",
|
||||
goal="test goal",
|
||||
backstory="test backstory",
|
||||
)
|
||||
|
||||
assert agent.a2a is None
|
||||
assert callable(agent.execute_task)
|
||||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
True,
|
||||
reason="Requires a2a-sdk to be installed. This test verifies wrapper is applied when a2a is set.",
|
||||
)
|
||||
def test_agent_with_a2a_has_wrapper():
|
||||
"""Verify that agents with a2a get the wrapper applied."""
|
||||
a2a_config = A2AConfig(
|
||||
endpoint="http://test-endpoint.com",
|
||||
)
|
||||
|
||||
agent = Agent(
|
||||
role="test role",
|
||||
goal="test goal",
|
||||
backstory="test backstory",
|
||||
a2a=a2a_config,
|
||||
)
|
||||
|
||||
assert agent.a2a is not None
|
||||
assert agent.a2a.endpoint == "http://test-endpoint.com"
|
||||
assert callable(agent.execute_task)
|
||||
|
||||
|
||||
@pytest.mark.skipif(not A2A_SDK_INSTALLED, reason="Requires a2a-sdk to be installed")
|
||||
def test_agent_with_a2a_creates_successfully():
|
||||
"""Verify that creating an agent with a2a succeeds and applies wrapper."""
|
||||
a2a_config = A2AConfig(
|
||||
endpoint="http://test-endpoint.com",
|
||||
)
|
||||
|
||||
agent = Agent(
|
||||
role="test role",
|
||||
goal="test goal",
|
||||
backstory="test backstory",
|
||||
a2a=a2a_config,
|
||||
)
|
||||
|
||||
assert agent.a2a is not None
|
||||
assert agent.a2a.endpoint == "http://test-endpoint.com/"
|
||||
assert callable(agent.execute_task)
|
||||
assert hasattr(agent.execute_task, "__wrapped__")
|
||||
|
||||
|
||||
def test_multiple_agents_without_a2a():
|
||||
"""Verify that multiple agents without a2a work correctly."""
|
||||
agent1 = Agent(
|
||||
role="agent 1",
|
||||
goal="test goal",
|
||||
backstory="test backstory",
|
||||
)
|
||||
|
||||
agent2 = Agent(
|
||||
role="agent 2",
|
||||
goal="test goal",
|
||||
backstory="test backstory",
|
||||
)
|
||||
|
||||
assert agent1.a2a is None
|
||||
assert agent2.a2a is None
|
||||
assert callable(agent1.execute_task)
|
||||
assert callable(agent2.execute_task)
|
||||
|
||||
|
||||
@pytest.mark.skipif(not A2A_SDK_INSTALLED, reason="Requires a2a-sdk to be installed")
|
||||
def test_wrapper_is_applied_differently_per_instance():
|
||||
"""Verify that agents with and without a2a have different execute_task methods."""
|
||||
agent_without_a2a = Agent(
|
||||
role="agent without a2a",
|
||||
goal="test goal",
|
||||
backstory="test backstory",
|
||||
)
|
||||
|
||||
a2a_config = A2AConfig(endpoint="http://test-endpoint.com")
|
||||
agent_with_a2a = Agent(
|
||||
role="agent with a2a",
|
||||
goal="test goal",
|
||||
backstory="test backstory",
|
||||
a2a=a2a_config,
|
||||
)
|
||||
|
||||
assert (
|
||||
agent_without_a2a.execute_task.__func__
|
||||
is not agent_with_a2a.execute_task.__func__
|
||||
)
|
||||
assert not hasattr(agent_without_a2a.execute_task, "__wrapped__")
|
||||
assert hasattr(agent_with_a2a.execute_task, "__wrapped__")
|
||||
@@ -0,0 +1,44 @@
|
||||
interactions:
|
||||
- request:
|
||||
body: ''
|
||||
headers:
|
||||
User-Agent:
|
||||
- X-USER-AGENT-XXX
|
||||
accept:
|
||||
- '*/*'
|
||||
accept-encoding:
|
||||
- ACCEPT-ENCODING-XXX
|
||||
connection:
|
||||
- keep-alive
|
||||
host:
|
||||
- localhost:9999
|
||||
method: GET
|
||||
uri: http://localhost:9999/.well-known/agent-card.json
|
||||
response:
|
||||
body:
|
||||
string: '{"capabilities":{"streaming":true},"defaultInputModes":["text"],"defaultOutputModes":["text"],"description":"An
|
||||
AI assistant powered by OpenAI GPT with calculator and time tools. Ask questions,
|
||||
perform calculations, or get the current time in any timezone.","name":"GPT
|
||||
Assistant","preferredTransport":"JSONRPC","protocolVersion":"0.3.0","skills":[{"description":"Have
|
||||
a general conversation with the AI assistant. Ask questions, get explanations,
|
||||
or just chat.","examples":["Hello, how are you?","Explain quantum computing
|
||||
in simple terms","What can you help me with?"],"id":"conversation","name":"General
|
||||
Conversation","tags":["chat","conversation","general"]},{"description":"Perform
|
||||
mathematical calculations including arithmetic, exponents, and more.","examples":["What
|
||||
is 25 * 17?","Calculate 2^10","What''s (100 + 50) / 3?"],"id":"calculator","name":"Calculator","tags":["math","calculator","arithmetic"]},{"description":"Get
|
||||
the current date and time in any timezone.","examples":["What time is it?","What''s
|
||||
the current time in Tokyo?","What''s today''s date in New York?"],"id":"time","name":"Current
|
||||
Time","tags":["time","date","timezone"]}],"url":"http://localhost:9999/","version":"1.0.0"}'
|
||||
headers:
|
||||
content-length:
|
||||
- '1198'
|
||||
content-type:
|
||||
- application/json
|
||||
date:
|
||||
- Tue, 06 Jan 2026 14:17:00 GMT
|
||||
server:
|
||||
- uvicorn
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
version: 1
|
||||
@@ -0,0 +1,126 @@
|
||||
interactions:
|
||||
- request:
|
||||
body: ''
|
||||
headers:
|
||||
User-Agent:
|
||||
- X-USER-AGENT-XXX
|
||||
accept:
|
||||
- '*/*'
|
||||
accept-encoding:
|
||||
- ACCEPT-ENCODING-XXX
|
||||
connection:
|
||||
- keep-alive
|
||||
host:
|
||||
- localhost:9999
|
||||
method: GET
|
||||
uri: http://localhost:9999/.well-known/agent-card.json
|
||||
response:
|
||||
body:
|
||||
string: '{"capabilities":{"streaming":true},"defaultInputModes":["text"],"defaultOutputModes":["text"],"description":"An
|
||||
AI assistant powered by OpenAI GPT with calculator and time tools. Ask questions,
|
||||
perform calculations, or get the current time in any timezone.","name":"GPT
|
||||
Assistant","preferredTransport":"JSONRPC","protocolVersion":"0.3.0","skills":[{"description":"Have
|
||||
a general conversation with the AI assistant. Ask questions, get explanations,
|
||||
or just chat.","examples":["Hello, how are you?","Explain quantum computing
|
||||
in simple terms","What can you help me with?"],"id":"conversation","name":"General
|
||||
Conversation","tags":["chat","conversation","general"]},{"description":"Perform
|
||||
mathematical calculations including arithmetic, exponents, and more.","examples":["What
|
||||
is 25 * 17?","Calculate 2^10","What''s (100 + 50) / 3?"],"id":"calculator","name":"Calculator","tags":["math","calculator","arithmetic"]},{"description":"Get
|
||||
the current date and time in any timezone.","examples":["What time is it?","What''s
|
||||
the current time in Tokyo?","What''s today''s date in New York?"],"id":"time","name":"Current
|
||||
Time","tags":["time","date","timezone"]}],"url":"http://localhost:9999/","version":"1.0.0"}'
|
||||
headers:
|
||||
content-length:
|
||||
- '1198'
|
||||
content-type:
|
||||
- application/json
|
||||
date:
|
||||
- Tue, 06 Jan 2026 14:16:58 GMT
|
||||
server:
|
||||
- uvicorn
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
- request:
|
||||
body: '{"id":"e5ac2160-ae9b-4bf9-aad7-14bf0d53d6d9","jsonrpc":"2.0","method":"message/stream","params":{"configuration":{"acceptedOutputModes":[],"blocking":true},"message":{"kind":"message","messageId":"e1e63c75-3ea0-49fb-b512-5128a2476416","parts":[{"kind":"text","text":"What
|
||||
is 2 + 2?"}],"role":"user"}}}'
|
||||
headers:
|
||||
User-Agent:
|
||||
- X-USER-AGENT-XXX
|
||||
accept:
|
||||
- '*/*, text/event-stream'
|
||||
accept-encoding:
|
||||
- ACCEPT-ENCODING-XXX
|
||||
cache-control:
|
||||
- no-store
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '301'
|
||||
content-type:
|
||||
- application/json
|
||||
host:
|
||||
- localhost:9999
|
||||
method: POST
|
||||
uri: http://localhost:9999/
|
||||
response:
|
||||
body:
|
||||
string: "data: {\"id\":\"e5ac2160-ae9b-4bf9-aad7-14bf0d53d6d9\",\"jsonrpc\":\"2.0\",\"result\":{\"contextId\":\"b9e14c1b-734d-4d1e-864a-e6dda5231d71\",\"final\":false,\"kind\":\"status-update\",\"status\":{\"state\":\"submitted\"},\"taskId\":\"0dd4d3af-f35d-409d-9462-01218e5641f9\"}}\r\n\r\ndata:
|
||||
{\"id\":\"e5ac2160-ae9b-4bf9-aad7-14bf0d53d6d9\",\"jsonrpc\":\"2.0\",\"result\":{\"contextId\":\"b9e14c1b-734d-4d1e-864a-e6dda5231d71\",\"final\":false,\"kind\":\"status-update\",\"status\":{\"state\":\"working\"},\"taskId\":\"0dd4d3af-f35d-409d-9462-01218e5641f9\"}}\r\n\r\ndata:
|
||||
{\"id\":\"e5ac2160-ae9b-4bf9-aad7-14bf0d53d6d9\",\"jsonrpc\":\"2.0\",\"result\":{\"contextId\":\"b9e14c1b-734d-4d1e-864a-e6dda5231d71\",\"final\":true,\"kind\":\"status-update\",\"status\":{\"message\":{\"kind\":\"message\",\"messageId\":\"54bb7ff3-f2c0-4eb3-b427-bf1c8cf90832\",\"parts\":[{\"kind\":\"text\",\"text\":\"\\n[Tool:
|
||||
calculator] 2 + 2 = 4\\n2 + 2 equals 4.\"}],\"role\":\"agent\"},\"state\":\"completed\"},\"taskId\":\"0dd4d3af-f35d-409d-9462-01218e5641f9\"}}\r\n\r\n"
|
||||
headers:
|
||||
Transfer-Encoding:
|
||||
- chunked
|
||||
cache-control:
|
||||
- no-store
|
||||
connection:
|
||||
- keep-alive
|
||||
content-type:
|
||||
- text/event-stream; charset=utf-8
|
||||
date:
|
||||
- Tue, 06 Jan 2026 14:16:58 GMT
|
||||
server:
|
||||
- uvicorn
|
||||
x-accel-buffering:
|
||||
- 'no'
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
- request:
|
||||
body: '{"id":"cb1e4af3-d2d0-4848-96b8-7082ee6171d1","jsonrpc":"2.0","method":"tasks/get","params":{"historyLength":100,"id":"0dd4d3af-f35d-409d-9462-01218e5641f9"}}'
|
||||
headers:
|
||||
User-Agent:
|
||||
- X-USER-AGENT-XXX
|
||||
accept:
|
||||
- '*/*'
|
||||
accept-encoding:
|
||||
- ACCEPT-ENCODING-XXX
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '157'
|
||||
content-type:
|
||||
- application/json
|
||||
host:
|
||||
- localhost:9999
|
||||
method: POST
|
||||
uri: http://localhost:9999/
|
||||
response:
|
||||
body:
|
||||
string: '{"id":"cb1e4af3-d2d0-4848-96b8-7082ee6171d1","jsonrpc":"2.0","result":{"contextId":"b9e14c1b-734d-4d1e-864a-e6dda5231d71","history":[{"contextId":"b9e14c1b-734d-4d1e-864a-e6dda5231d71","kind":"message","messageId":"e1e63c75-3ea0-49fb-b512-5128a2476416","parts":[{"kind":"text","text":"What
|
||||
is 2 + 2?"}],"role":"user","taskId":"0dd4d3af-f35d-409d-9462-01218e5641f9"}],"id":"0dd4d3af-f35d-409d-9462-01218e5641f9","kind":"task","status":{"message":{"kind":"message","messageId":"54bb7ff3-f2c0-4eb3-b427-bf1c8cf90832","parts":[{"kind":"text","text":"\n[Tool:
|
||||
calculator] 2 + 2 = 4\n2 + 2 equals 4."}],"role":"agent"},"state":"completed"}}}'
|
||||
headers:
|
||||
content-length:
|
||||
- '635'
|
||||
content-type:
|
||||
- application/json
|
||||
date:
|
||||
- Tue, 06 Jan 2026 14:17:00 GMT
|
||||
server:
|
||||
- uvicorn
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
version: 1
|
||||
@@ -0,0 +1,90 @@
|
||||
interactions:
|
||||
- request:
|
||||
body: ''
|
||||
headers:
|
||||
User-Agent:
|
||||
- X-USER-AGENT-XXX
|
||||
accept:
|
||||
- '*/*'
|
||||
accept-encoding:
|
||||
- ACCEPT-ENCODING-XXX
|
||||
connection:
|
||||
- keep-alive
|
||||
host:
|
||||
- localhost:9999
|
||||
method: GET
|
||||
uri: http://localhost:9999/.well-known/agent-card.json
|
||||
response:
|
||||
body:
|
||||
string: '{"capabilities":{"streaming":true},"defaultInputModes":["text"],"defaultOutputModes":["text"],"description":"An
|
||||
AI assistant powered by OpenAI GPT with calculator and time tools. Ask questions,
|
||||
perform calculations, or get the current time in any timezone.","name":"GPT
|
||||
Assistant","preferredTransport":"JSONRPC","protocolVersion":"0.3.0","skills":[{"description":"Have
|
||||
a general conversation with the AI assistant. Ask questions, get explanations,
|
||||
or just chat.","examples":["Hello, how are you?","Explain quantum computing
|
||||
in simple terms","What can you help me with?"],"id":"conversation","name":"General
|
||||
Conversation","tags":["chat","conversation","general"]},{"description":"Perform
|
||||
mathematical calculations including arithmetic, exponents, and more.","examples":["What
|
||||
is 25 * 17?","Calculate 2^10","What''s (100 + 50) / 3?"],"id":"calculator","name":"Calculator","tags":["math","calculator","arithmetic"]},{"description":"Get
|
||||
the current date and time in any timezone.","examples":["What time is it?","What''s
|
||||
the current time in Tokyo?","What''s today''s date in New York?"],"id":"time","name":"Current
|
||||
Time","tags":["time","date","timezone"]}],"url":"http://localhost:9999/","version":"1.0.0"}'
|
||||
headers:
|
||||
content-length:
|
||||
- '1198'
|
||||
content-type:
|
||||
- application/json
|
||||
date:
|
||||
- Tue, 06 Jan 2026 14:17:02 GMT
|
||||
server:
|
||||
- uvicorn
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
- request:
|
||||
body: '{"id":"8cf25b61-8884-4246-adce-fccb32e176ab","jsonrpc":"2.0","method":"message/stream","params":{"configuration":{"acceptedOutputModes":[],"blocking":true},"message":{"kind":"message","messageId":"c145297f-7331-4835-adcc-66b51de92a2b","parts":[{"kind":"text","text":"What
|
||||
is 2 + 2?"}],"role":"user"}}}'
|
||||
headers:
|
||||
User-Agent:
|
||||
- X-USER-AGENT-XXX
|
||||
accept:
|
||||
- '*/*, text/event-stream'
|
||||
accept-encoding:
|
||||
- ACCEPT-ENCODING-XXX
|
||||
cache-control:
|
||||
- no-store
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '301'
|
||||
content-type:
|
||||
- application/json
|
||||
host:
|
||||
- localhost:9999
|
||||
method: POST
|
||||
uri: http://localhost:9999/
|
||||
response:
|
||||
body:
|
||||
string: "data: {\"id\":\"8cf25b61-8884-4246-adce-fccb32e176ab\",\"jsonrpc\":\"2.0\",\"result\":{\"contextId\":\"30601267-ab3b-48ef-afc8-916c37a18651\",\"final\":false,\"kind\":\"status-update\",\"status\":{\"state\":\"submitted\"},\"taskId\":\"3083d3da-4739-4f4f-a4e8-7c048ea819c1\"}}\r\n\r\ndata:
|
||||
{\"id\":\"8cf25b61-8884-4246-adce-fccb32e176ab\",\"jsonrpc\":\"2.0\",\"result\":{\"contextId\":\"30601267-ab3b-48ef-afc8-916c37a18651\",\"final\":false,\"kind\":\"status-update\",\"status\":{\"state\":\"working\"},\"taskId\":\"3083d3da-4739-4f4f-a4e8-7c048ea819c1\"}}\r\n\r\ndata:
|
||||
{\"id\":\"8cf25b61-8884-4246-adce-fccb32e176ab\",\"jsonrpc\":\"2.0\",\"result\":{\"contextId\":\"30601267-ab3b-48ef-afc8-916c37a18651\",\"final\":true,\"kind\":\"status-update\",\"status\":{\"message\":{\"kind\":\"message\",\"messageId\":\"25f81e3c-b7e8-48b5-a98a-4066f3637a13\",\"parts\":[{\"kind\":\"text\",\"text\":\"\\n[Tool:
|
||||
calculator] 2 + 2 = 4\\n2 + 2 equals 4.\"}],\"role\":\"agent\"},\"state\":\"completed\"},\"taskId\":\"3083d3da-4739-4f4f-a4e8-7c048ea819c1\"}}\r\n\r\n"
|
||||
headers:
|
||||
Transfer-Encoding:
|
||||
- chunked
|
||||
cache-control:
|
||||
- no-store
|
||||
connection:
|
||||
- keep-alive
|
||||
content-type:
|
||||
- text/event-stream; charset=utf-8
|
||||
date:
|
||||
- Tue, 06 Jan 2026 14:17:02 GMT
|
||||
server:
|
||||
- uvicorn
|
||||
x-accel-buffering:
|
||||
- 'no'
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
version: 1
|
||||
@@ -0,0 +1,90 @@
|
||||
interactions:
|
||||
- request:
|
||||
body: ''
|
||||
headers:
|
||||
User-Agent:
|
||||
- X-USER-AGENT-XXX
|
||||
accept:
|
||||
- '*/*'
|
||||
accept-encoding:
|
||||
- ACCEPT-ENCODING-XXX
|
||||
connection:
|
||||
- keep-alive
|
||||
host:
|
||||
- localhost:9999
|
||||
method: GET
|
||||
uri: http://localhost:9999/.well-known/agent-card.json
|
||||
response:
|
||||
body:
|
||||
string: '{"capabilities":{"streaming":true},"defaultInputModes":["text"],"defaultOutputModes":["text"],"description":"An
|
||||
AI assistant powered by OpenAI GPT with calculator and time tools. Ask questions,
|
||||
perform calculations, or get the current time in any timezone.","name":"GPT
|
||||
Assistant","preferredTransport":"JSONRPC","protocolVersion":"0.3.0","skills":[{"description":"Have
|
||||
a general conversation with the AI assistant. Ask questions, get explanations,
|
||||
or just chat.","examples":["Hello, how are you?","Explain quantum computing
|
||||
in simple terms","What can you help me with?"],"id":"conversation","name":"General
|
||||
Conversation","tags":["chat","conversation","general"]},{"description":"Perform
|
||||
mathematical calculations including arithmetic, exponents, and more.","examples":["What
|
||||
is 25 * 17?","Calculate 2^10","What''s (100 + 50) / 3?"],"id":"calculator","name":"Calculator","tags":["math","calculator","arithmetic"]},{"description":"Get
|
||||
the current date and time in any timezone.","examples":["What time is it?","What''s
|
||||
the current time in Tokyo?","What''s today''s date in New York?"],"id":"time","name":"Current
|
||||
Time","tags":["time","date","timezone"]}],"url":"http://localhost:9999/","version":"1.0.0"}'
|
||||
headers:
|
||||
content-length:
|
||||
- '1198'
|
||||
content-type:
|
||||
- application/json
|
||||
date:
|
||||
- Tue, 06 Jan 2026 14:17:00 GMT
|
||||
server:
|
||||
- uvicorn
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
- request:
|
||||
body: '{"id":"3a17c6bf-8db6-45a6-8535-34c45c0c4936","jsonrpc":"2.0","method":"message/stream","params":{"configuration":{"acceptedOutputModes":[],"blocking":true},"message":{"kind":"message","messageId":"712558a3-6d92-4591-be8a-9dd8566dde82","parts":[{"kind":"text","text":"What
|
||||
is 2 + 2?"}],"role":"user"}}}'
|
||||
headers:
|
||||
User-Agent:
|
||||
- X-USER-AGENT-XXX
|
||||
accept:
|
||||
- '*/*, text/event-stream'
|
||||
accept-encoding:
|
||||
- ACCEPT-ENCODING-XXX
|
||||
cache-control:
|
||||
- no-store
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '301'
|
||||
content-type:
|
||||
- application/json
|
||||
host:
|
||||
- localhost:9999
|
||||
method: POST
|
||||
uri: http://localhost:9999/
|
||||
response:
|
||||
body:
|
||||
string: "data: {\"id\":\"3a17c6bf-8db6-45a6-8535-34c45c0c4936\",\"jsonrpc\":\"2.0\",\"result\":{\"contextId\":\"ca2fbbc9-761e-45d9-a929-0c68b1f8acbf\",\"final\":false,\"kind\":\"status-update\",\"status\":{\"state\":\"submitted\"},\"taskId\":\"c6e88db0-36e9-4269-8b9a-ecb6dfdcf6a1\"}}\r\n\r\ndata:
|
||||
{\"id\":\"3a17c6bf-8db6-45a6-8535-34c45c0c4936\",\"jsonrpc\":\"2.0\",\"result\":{\"contextId\":\"ca2fbbc9-761e-45d9-a929-0c68b1f8acbf\",\"final\":false,\"kind\":\"status-update\",\"status\":{\"state\":\"working\"},\"taskId\":\"c6e88db0-36e9-4269-8b9a-ecb6dfdcf6a1\"}}\r\n\r\ndata:
|
||||
{\"id\":\"3a17c6bf-8db6-45a6-8535-34c45c0c4936\",\"jsonrpc\":\"2.0\",\"result\":{\"contextId\":\"ca2fbbc9-761e-45d9-a929-0c68b1f8acbf\",\"final\":true,\"kind\":\"status-update\",\"status\":{\"message\":{\"kind\":\"message\",\"messageId\":\"916324aa-fd25-4849-bceb-c4644e2fcbb0\",\"parts\":[{\"kind\":\"text\",\"text\":\"\\n[Tool:
|
||||
calculator] 2 + 2 = 4\\n2 + 2 equals 4.\"}],\"role\":\"agent\"},\"state\":\"completed\"},\"taskId\":\"c6e88db0-36e9-4269-8b9a-ecb6dfdcf6a1\"}}\r\n\r\n"
|
||||
headers:
|
||||
Transfer-Encoding:
|
||||
- chunked
|
||||
cache-control:
|
||||
- no-store
|
||||
connection:
|
||||
- keep-alive
|
||||
content-type:
|
||||
- text/event-stream; charset=utf-8
|
||||
date:
|
||||
- Tue, 06 Jan 2026 14:17:00 GMT
|
||||
server:
|
||||
- uvicorn
|
||||
x-accel-buffering:
|
||||
- 'no'
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
version: 1
|
||||
@@ -0,0 +1,665 @@
|
||||
interactions:
|
||||
- request:
|
||||
body: '{"messages":[{"role":"system","content":"You are Research Analyst. Expert
|
||||
researcher with access to remote specialized agents\nYour personal goal is:
|
||||
Find and analyze information about AI developments"},{"role":"user","content":"\nCurrent
|
||||
Task: Use the remote A2A agent to find out what the current time is in New York.\n\nProvide
|
||||
your complete response:"}],"model":"gpt-4.1-mini","response_format":{"type":"json_schema","json_schema":{"schema":{"properties":{"a2a_ids":{"description":"A2A
|
||||
agent IDs to delegate to.","items":{"const":"http://localhost:9999/.well-known/agent-card.json","type":"string"},"maxItems":1,"title":"A2A
|
||||
Ids","type":"array"},"message":{"description":"The message content. If is_a2a=true,
|
||||
this is sent to the A2A agent. If is_a2a=false, this is your final answer ending
|
||||
the conversation.","title":"Message","type":"string"},"is_a2a":{"description":"Set
|
||||
to false when the remote agent has answered your question - extract their answer
|
||||
and return it as your final message. Set to true ONLY if you need to ask a NEW,
|
||||
DIFFERENT question. NEVER repeat the same request - if the conversation history
|
||||
shows the agent already answered, set is_a2a=false immediately.","title":"Is
|
||||
A2A","type":"boolean"}},"required":["a2a_ids","message","is_a2a"],"title":"AgentResponse","type":"object","additionalProperties":false},"name":"AgentResponse","strict":true}},"stream":false}'
|
||||
headers:
|
||||
User-Agent:
|
||||
- X-USER-AGENT-XXX
|
||||
accept:
|
||||
- application/json
|
||||
accept-encoding:
|
||||
- ACCEPT-ENCODING-XXX
|
||||
authorization:
|
||||
- AUTHORIZATION-XXX
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '1385'
|
||||
content-type:
|
||||
- application/json
|
||||
host:
|
||||
- api.openai.com
|
||||
x-stainless-arch:
|
||||
- X-STAINLESS-ARCH-XXX
|
||||
x-stainless-async:
|
||||
- 'false'
|
||||
x-stainless-helper-method:
|
||||
- beta.chat.completions.parse
|
||||
x-stainless-lang:
|
||||
- python
|
||||
x-stainless-os:
|
||||
- X-STAINLESS-OS-XXX
|
||||
x-stainless-package-version:
|
||||
- 1.83.0
|
||||
x-stainless-read-timeout:
|
||||
- X-STAINLESS-READ-TIMEOUT-XXX
|
||||
x-stainless-retry-count:
|
||||
- '0'
|
||||
x-stainless-runtime:
|
||||
- CPython
|
||||
x-stainless-runtime-version:
|
||||
- 3.12.10
|
||||
method: POST
|
||||
uri: https://api.openai.com/v1/chat/completions
|
||||
response:
|
||||
body:
|
||||
string: "{\n \"id\": \"chatcmpl-D3qPKNZ1miu2xMURPYYcLXdKrlMIh\",\n \"object\":
|
||||
\"chat.completion\",\n \"created\": 1769808810,\n \"model\": \"gpt-4.1-mini-2025-04-14\",\n
|
||||
\ \"choices\": [\n {\n \"index\": 0,\n \"message\": {\n \"role\":
|
||||
\"assistant\",\n \"content\": \"{\\\"a2a_ids\\\":[\\\"http://localhost:9999/.well-known/agent-card.json\\\"],\\\"message\\\":\\\"What
|
||||
is the current time in New York?\\\",\\\"is_a2a\\\":true}\",\n \"refusal\":
|
||||
null,\n \"annotations\": []\n },\n \"logprobs\": null,\n
|
||||
\ \"finish_reason\": \"stop\"\n }\n ],\n \"usage\": {\n \"prompt_tokens\":
|
||||
274,\n \"completion_tokens\": 40,\n \"total_tokens\": 314,\n \"prompt_tokens_details\":
|
||||
{\n \"cached_tokens\": 0,\n \"audio_tokens\": 0\n },\n \"completion_tokens_details\":
|
||||
{\n \"reasoning_tokens\": 0,\n \"audio_tokens\": 0,\n \"accepted_prediction_tokens\":
|
||||
0,\n \"rejected_prediction_tokens\": 0\n }\n },\n \"service_tier\":
|
||||
\"default\",\n \"system_fingerprint\": \"fp_e01c6f58e1\"\n}\n"
|
||||
headers:
|
||||
CF-RAY:
|
||||
- CF-RAY-XXX
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Type:
|
||||
- application/json
|
||||
Date:
|
||||
- Fri, 30 Jan 2026 21:33:31 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
Set-Cookie:
|
||||
- SET-COOKIE-XXX
|
||||
Strict-Transport-Security:
|
||||
- STS-XXX
|
||||
Transfer-Encoding:
|
||||
- chunked
|
||||
X-Content-Type-Options:
|
||||
- X-CONTENT-TYPE-XXX
|
||||
access-control-expose-headers:
|
||||
- ACCESS-CONTROL-XXX
|
||||
alt-svc:
|
||||
- h3=":443"; ma=86400
|
||||
cf-cache-status:
|
||||
- DYNAMIC
|
||||
openai-organization:
|
||||
- OPENAI-ORG-XXX
|
||||
openai-processing-ms:
|
||||
- '854'
|
||||
openai-project:
|
||||
- OPENAI-PROJECT-XXX
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
x-openai-proxy-wasm:
|
||||
- v0.1
|
||||
x-ratelimit-limit-requests:
|
||||
- X-RATELIMIT-LIMIT-REQUESTS-XXX
|
||||
x-ratelimit-limit-tokens:
|
||||
- X-RATELIMIT-LIMIT-TOKENS-XXX
|
||||
x-ratelimit-remaining-requests:
|
||||
- X-RATELIMIT-REMAINING-REQUESTS-XXX
|
||||
x-ratelimit-remaining-tokens:
|
||||
- X-RATELIMIT-REMAINING-TOKENS-XXX
|
||||
x-ratelimit-reset-requests:
|
||||
- X-RATELIMIT-RESET-REQUESTS-XXX
|
||||
x-ratelimit-reset-tokens:
|
||||
- X-RATELIMIT-RESET-TOKENS-XXX
|
||||
x-request-id:
|
||||
- X-REQUEST-ID-XXX
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
- request:
|
||||
body: '{"id":"869c0693-9e53-40ae-acd0-5823d73c7808","jsonrpc":"2.0","method":"message/stream","params":{"configuration":{"acceptedOutputModes":["application/json"],"blocking":true},"message":{"kind":"message","messageId":"5c4fc5ee-97c4-42f9-96a0-f1e1205f0832","parts":[{"kind":"text","text":"What
|
||||
is the current time in New York?"}],"referenceTaskIds":[],"role":"user"}}}'
|
||||
headers:
|
||||
User-Agent:
|
||||
- X-USER-AGENT-XXX
|
||||
accept:
|
||||
- '*/*, text/event-stream'
|
||||
accept-encoding:
|
||||
- ACCEPT-ENCODING-XXX
|
||||
cache-control:
|
||||
- no-store
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '364'
|
||||
content-type:
|
||||
- application/json
|
||||
host:
|
||||
- localhost:9999
|
||||
method: POST
|
||||
uri: http://localhost:9999
|
||||
response:
|
||||
body:
|
||||
string: "data: {\"id\":\"869c0693-9e53-40ae-acd0-5823d73c7808\",\"jsonrpc\":\"2.0\",\"result\":{\"contextId\":\"0aba73f6-87de-4e43-9a5a-7ebd22f590e3\",\"final\":false,\"kind\":\"status-update\",\"status\":{\"state\":\"submitted\"},\"taskId\":\"0aca3052-dd0b-4f0e-bc0e-a646b0a86de2\"}}\r\n\r\ndata:
|
||||
{\"id\":\"869c0693-9e53-40ae-acd0-5823d73c7808\",\"jsonrpc\":\"2.0\",\"result\":{\"contextId\":\"0aba73f6-87de-4e43-9a5a-7ebd22f590e3\",\"final\":false,\"kind\":\"status-update\",\"status\":{\"state\":\"working\"},\"taskId\":\"0aca3052-dd0b-4f0e-bc0e-a646b0a86de2\"}}\r\n\r\ndata:
|
||||
{\"id\":\"869c0693-9e53-40ae-acd0-5823d73c7808\",\"jsonrpc\":\"2.0\",\"result\":{\"contextId\":\"0aba73f6-87de-4e43-9a5a-7ebd22f590e3\",\"final\":true,\"kind\":\"status-update\",\"status\":{\"message\":{\"kind\":\"message\",\"messageId\":\"f70e8fe1-26c9-47e1-9331-1be893e0c5b0\",\"parts\":[{\"kind\":\"text\",\"text\":\"[Tool:
|
||||
get_time] 2026-01-30 16:33:31 EST (America/New_York)\\nThe current time in
|
||||
New York is 4:33 PM EST.\"}],\"role\":\"agent\"},\"state\":\"completed\"},\"taskId\":\"0aca3052-dd0b-4f0e-bc0e-a646b0a86de2\"}}\r\n\r\n"
|
||||
headers:
|
||||
cache-control:
|
||||
- no-store
|
||||
connection:
|
||||
- keep-alive
|
||||
content-type:
|
||||
- text/event-stream; charset=utf-8
|
||||
date:
|
||||
- Fri, 30 Jan 2026 21:33:30 GMT
|
||||
server:
|
||||
- uvicorn
|
||||
transfer-encoding:
|
||||
- chunked
|
||||
x-accel-buffering:
|
||||
- 'no'
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
- request:
|
||||
body: '{"messages":[{"role":"system","content":"You are Research Analyst. Expert
|
||||
researcher with access to remote specialized agents\nYour personal goal is:
|
||||
Find and analyze information about AI developments"},{"role":"user","content":"\nCurrent
|
||||
Task: Use the remote A2A agent to find out what the current time is in New York.\n\nProvide
|
||||
your complete response:"}],"model":"gpt-4.1-mini","response_format":{"type":"json_schema","json_schema":{"schema":{"properties":{"a2a_ids":{"description":"A2A
|
||||
agent IDs to delegate to.","items":{"const":"http://localhost:9999/.well-known/agent-card.json","type":"string"},"maxItems":1,"title":"A2A
|
||||
Ids","type":"array"},"message":{"description":"The message content. If is_a2a=true,
|
||||
this is sent to the A2A agent. If is_a2a=false, this is your final answer ending
|
||||
the conversation.","title":"Message","type":"string"},"is_a2a":{"description":"Set
|
||||
to false when the remote agent has answered your question - extract their answer
|
||||
and return it as your final message. Set to true ONLY if you need to ask a NEW,
|
||||
DIFFERENT question. NEVER repeat the same request - if the conversation history
|
||||
shows the agent already answered, set is_a2a=false immediately.","title":"Is
|
||||
A2A","type":"boolean"}},"required":["a2a_ids","message","is_a2a"],"title":"AgentResponse","type":"object","additionalProperties":false},"name":"AgentResponse","strict":true}},"stream":false}'
|
||||
headers:
|
||||
User-Agent:
|
||||
- X-USER-AGENT-XXX
|
||||
accept:
|
||||
- application/json
|
||||
accept-encoding:
|
||||
- ACCEPT-ENCODING-XXX
|
||||
authorization:
|
||||
- AUTHORIZATION-XXX
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '1385'
|
||||
content-type:
|
||||
- application/json
|
||||
cookie:
|
||||
- COOKIE-XXX
|
||||
host:
|
||||
- api.openai.com
|
||||
x-stainless-arch:
|
||||
- X-STAINLESS-ARCH-XXX
|
||||
x-stainless-async:
|
||||
- 'false'
|
||||
x-stainless-helper-method:
|
||||
- beta.chat.completions.parse
|
||||
x-stainless-lang:
|
||||
- python
|
||||
x-stainless-os:
|
||||
- X-STAINLESS-OS-XXX
|
||||
x-stainless-package-version:
|
||||
- 1.83.0
|
||||
x-stainless-read-timeout:
|
||||
- X-STAINLESS-READ-TIMEOUT-XXX
|
||||
x-stainless-retry-count:
|
||||
- '0'
|
||||
x-stainless-runtime:
|
||||
- CPython
|
||||
x-stainless-runtime-version:
|
||||
- 3.12.10
|
||||
method: POST
|
||||
uri: https://api.openai.com/v1/chat/completions
|
||||
response:
|
||||
body:
|
||||
string: "{\n \"id\": \"chatcmpl-D3qPN7ipYzYuI3Htoj13LNHyic8RB\",\n \"object\":
|
||||
\"chat.completion\",\n \"created\": 1769808813,\n \"model\": \"gpt-4.1-mini-2025-04-14\",\n
|
||||
\ \"choices\": [\n {\n \"index\": 0,\n \"message\": {\n \"role\":
|
||||
\"assistant\",\n \"content\": \"{\\\"a2a_ids\\\":[\\\"http://localhost:9999/.well-known/agent-card.json\\\"],\\\"message\\\":\\\"What
|
||||
is the current time in New York?\\\",\\\"is_a2a\\\":true}\",\n \"refusal\":
|
||||
null,\n \"annotations\": []\n },\n \"logprobs\": null,\n
|
||||
\ \"finish_reason\": \"stop\"\n }\n ],\n \"usage\": {\n \"prompt_tokens\":
|
||||
274,\n \"completion_tokens\": 40,\n \"total_tokens\": 314,\n \"prompt_tokens_details\":
|
||||
{\n \"cached_tokens\": 0,\n \"audio_tokens\": 0\n },\n \"completion_tokens_details\":
|
||||
{\n \"reasoning_tokens\": 0,\n \"audio_tokens\": 0,\n \"accepted_prediction_tokens\":
|
||||
0,\n \"rejected_prediction_tokens\": 0\n }\n },\n \"service_tier\":
|
||||
\"default\",\n \"system_fingerprint\": \"fp_e01c6f58e1\"\n}\n"
|
||||
headers:
|
||||
CF-RAY:
|
||||
- CF-RAY-XXX
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Type:
|
||||
- application/json
|
||||
Date:
|
||||
- Fri, 30 Jan 2026 21:33:34 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
Strict-Transport-Security:
|
||||
- STS-XXX
|
||||
Transfer-Encoding:
|
||||
- chunked
|
||||
X-Content-Type-Options:
|
||||
- X-CONTENT-TYPE-XXX
|
||||
access-control-expose-headers:
|
||||
- ACCESS-CONTROL-XXX
|
||||
alt-svc:
|
||||
- h3=":443"; ma=86400
|
||||
cf-cache-status:
|
||||
- DYNAMIC
|
||||
openai-organization:
|
||||
- OPENAI-ORG-XXX
|
||||
openai-processing-ms:
|
||||
- '660'
|
||||
openai-project:
|
||||
- OPENAI-PROJECT-XXX
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
x-openai-proxy-wasm:
|
||||
- v0.1
|
||||
x-ratelimit-limit-requests:
|
||||
- X-RATELIMIT-LIMIT-REQUESTS-XXX
|
||||
x-ratelimit-limit-tokens:
|
||||
- X-RATELIMIT-LIMIT-TOKENS-XXX
|
||||
x-ratelimit-remaining-requests:
|
||||
- X-RATELIMIT-REMAINING-REQUESTS-XXX
|
||||
x-ratelimit-remaining-tokens:
|
||||
- X-RATELIMIT-REMAINING-TOKENS-XXX
|
||||
x-ratelimit-reset-requests:
|
||||
- X-RATELIMIT-RESET-REQUESTS-XXX
|
||||
x-ratelimit-reset-tokens:
|
||||
- X-RATELIMIT-RESET-TOKENS-XXX
|
||||
x-request-id:
|
||||
- X-REQUEST-ID-XXX
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
- request:
|
||||
body: '{"id":"7aac6192-3805-4a2f-b9b1-3e281f723f35","jsonrpc":"2.0","method":"message/stream","params":{"configuration":{"acceptedOutputModes":["application/json"],"blocking":true},"message":{"kind":"message","messageId":"0a76a4cc-e497-491e-84e0-2a9d35e106b5","parts":[{"kind":"text","text":"What
|
||||
is the current time in New York?"}],"referenceTaskIds":[],"role":"user"}}}'
|
||||
headers:
|
||||
User-Agent:
|
||||
- X-USER-AGENT-XXX
|
||||
accept:
|
||||
- '*/*, text/event-stream'
|
||||
accept-encoding:
|
||||
- ACCEPT-ENCODING-XXX
|
||||
cache-control:
|
||||
- no-store
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '364'
|
||||
content-type:
|
||||
- application/json
|
||||
host:
|
||||
- localhost:9999
|
||||
method: POST
|
||||
uri: http://localhost:9999
|
||||
response:
|
||||
body:
|
||||
string: "data: {\"id\":\"7aac6192-3805-4a2f-b9b1-3e281f723f35\",\"jsonrpc\":\"2.0\",\"result\":{\"contextId\":\"7a2ffd0b-8d57-4a06-8e61-614cb6132b76\",\"final\":false,\"kind\":\"status-update\",\"status\":{\"state\":\"submitted\"},\"taskId\":\"a562b802-b554-4dce-93f1-d3b757ab1e93\"}}\r\n\r\ndata:
|
||||
{\"id\":\"7aac6192-3805-4a2f-b9b1-3e281f723f35\",\"jsonrpc\":\"2.0\",\"result\":{\"contextId\":\"7a2ffd0b-8d57-4a06-8e61-614cb6132b76\",\"final\":false,\"kind\":\"status-update\",\"status\":{\"state\":\"working\"},\"taskId\":\"a562b802-b554-4dce-93f1-d3b757ab1e93\"}}\r\n\r\ndata:
|
||||
{\"id\":\"7aac6192-3805-4a2f-b9b1-3e281f723f35\",\"jsonrpc\":\"2.0\",\"result\":{\"contextId\":\"7a2ffd0b-8d57-4a06-8e61-614cb6132b76\",\"final\":true,\"kind\":\"status-update\",\"status\":{\"message\":{\"kind\":\"message\",\"messageId\":\"9a5678a8-9fdc-47c5-b7fa-061da1bf98e1\",\"parts\":[{\"kind\":\"text\",\"text\":\"[Tool:
|
||||
get_time] 2026-01-30 16:33:34 EST (America/New_York)\\nThe current time in
|
||||
New York is 4:33 PM EST.\"}],\"role\":\"agent\"},\"state\":\"completed\"},\"taskId\":\"a562b802-b554-4dce-93f1-d3b757ab1e93\"}}\r\n\r\n"
|
||||
headers:
|
||||
cache-control:
|
||||
- no-store
|
||||
connection:
|
||||
- keep-alive
|
||||
content-type:
|
||||
- text/event-stream; charset=utf-8
|
||||
date:
|
||||
- Fri, 30 Jan 2026 21:33:33 GMT
|
||||
server:
|
||||
- uvicorn
|
||||
transfer-encoding:
|
||||
- chunked
|
||||
x-accel-buffering:
|
||||
- 'no'
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
- request:
|
||||
body: '{"messages":[{"role":"system","content":"You are Research Analyst. Expert
|
||||
researcher with access to remote specialized agents\nYour personal goal is:
|
||||
Find and analyze information about AI developments"},{"role":"user","content":"\nCurrent
|
||||
Task: Use the remote A2A agent to find out what the current time is in New York.\n\nProvide
|
||||
your complete response:"}],"model":"gpt-4.1-mini","response_format":{"type":"json_schema","json_schema":{"schema":{"properties":{"a2a_ids":{"description":"A2A
|
||||
agent IDs to delegate to.","items":{"const":"http://localhost:9999/.well-known/agent-card.json","type":"string"},"maxItems":1,"title":"A2A
|
||||
Ids","type":"array"},"message":{"description":"The message content. If is_a2a=true,
|
||||
this is sent to the A2A agent. If is_a2a=false, this is your final answer ending
|
||||
the conversation.","title":"Message","type":"string"},"is_a2a":{"description":"Set
|
||||
to false when the remote agent has answered your question - extract their answer
|
||||
and return it as your final message. Set to true ONLY if you need to ask a NEW,
|
||||
DIFFERENT question. NEVER repeat the same request - if the conversation history
|
||||
shows the agent already answered, set is_a2a=false immediately.","title":"Is
|
||||
A2A","type":"boolean"}},"required":["a2a_ids","message","is_a2a"],"title":"AgentResponse","type":"object","additionalProperties":false},"name":"AgentResponse","strict":true}},"stream":false}'
|
||||
headers:
|
||||
User-Agent:
|
||||
- X-USER-AGENT-XXX
|
||||
accept:
|
||||
- application/json
|
||||
accept-encoding:
|
||||
- ACCEPT-ENCODING-XXX
|
||||
authorization:
|
||||
- AUTHORIZATION-XXX
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '1385'
|
||||
content-type:
|
||||
- application/json
|
||||
cookie:
|
||||
- COOKIE-XXX
|
||||
host:
|
||||
- api.openai.com
|
||||
x-stainless-arch:
|
||||
- X-STAINLESS-ARCH-XXX
|
||||
x-stainless-async:
|
||||
- 'false'
|
||||
x-stainless-helper-method:
|
||||
- beta.chat.completions.parse
|
||||
x-stainless-lang:
|
||||
- python
|
||||
x-stainless-os:
|
||||
- X-STAINLESS-OS-XXX
|
||||
x-stainless-package-version:
|
||||
- 1.83.0
|
||||
x-stainless-read-timeout:
|
||||
- X-STAINLESS-READ-TIMEOUT-XXX
|
||||
x-stainless-retry-count:
|
||||
- '0'
|
||||
x-stainless-runtime:
|
||||
- CPython
|
||||
x-stainless-runtime-version:
|
||||
- 3.12.10
|
||||
method: POST
|
||||
uri: https://api.openai.com/v1/chat/completions
|
||||
response:
|
||||
body:
|
||||
string: "{\n \"id\": \"chatcmpl-D3qPQoeD1yHZjR5bbWnq9fMdIPMFu\",\n \"object\":
|
||||
\"chat.completion\",\n \"created\": 1769808816,\n \"model\": \"gpt-4.1-mini-2025-04-14\",\n
|
||||
\ \"choices\": [\n {\n \"index\": 0,\n \"message\": {\n \"role\":
|
||||
\"assistant\",\n \"content\": \"{\\\"a2a_ids\\\":[\\\"http://localhost:9999/.well-known/agent-card.json\\\"],\\\"message\\\":\\\"What
|
||||
is the current local time in New York?\\\",\\\"is_a2a\\\":true}\",\n \"refusal\":
|
||||
null,\n \"annotations\": []\n },\n \"logprobs\": null,\n
|
||||
\ \"finish_reason\": \"stop\"\n }\n ],\n \"usage\": {\n \"prompt_tokens\":
|
||||
274,\n \"completion_tokens\": 41,\n \"total_tokens\": 315,\n \"prompt_tokens_details\":
|
||||
{\n \"cached_tokens\": 0,\n \"audio_tokens\": 0\n },\n \"completion_tokens_details\":
|
||||
{\n \"reasoning_tokens\": 0,\n \"audio_tokens\": 0,\n \"accepted_prediction_tokens\":
|
||||
0,\n \"rejected_prediction_tokens\": 0\n }\n },\n \"service_tier\":
|
||||
\"default\",\n \"system_fingerprint\": \"fp_e01c6f58e1\"\n}\n"
|
||||
headers:
|
||||
CF-RAY:
|
||||
- CF-RAY-XXX
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Type:
|
||||
- application/json
|
||||
Date:
|
||||
- Fri, 30 Jan 2026 21:33:37 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
Strict-Transport-Security:
|
||||
- STS-XXX
|
||||
Transfer-Encoding:
|
||||
- chunked
|
||||
X-Content-Type-Options:
|
||||
- X-CONTENT-TYPE-XXX
|
||||
access-control-expose-headers:
|
||||
- ACCESS-CONTROL-XXX
|
||||
alt-svc:
|
||||
- h3=":443"; ma=86400
|
||||
cf-cache-status:
|
||||
- DYNAMIC
|
||||
openai-organization:
|
||||
- OPENAI-ORG-XXX
|
||||
openai-processing-ms:
|
||||
- '684'
|
||||
openai-project:
|
||||
- OPENAI-PROJECT-XXX
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
x-openai-proxy-wasm:
|
||||
- v0.1
|
||||
x-ratelimit-limit-requests:
|
||||
- X-RATELIMIT-LIMIT-REQUESTS-XXX
|
||||
x-ratelimit-limit-tokens:
|
||||
- X-RATELIMIT-LIMIT-TOKENS-XXX
|
||||
x-ratelimit-remaining-requests:
|
||||
- X-RATELIMIT-REMAINING-REQUESTS-XXX
|
||||
x-ratelimit-remaining-tokens:
|
||||
- X-RATELIMIT-REMAINING-TOKENS-XXX
|
||||
x-ratelimit-reset-requests:
|
||||
- X-RATELIMIT-RESET-REQUESTS-XXX
|
||||
x-ratelimit-reset-tokens:
|
||||
- X-RATELIMIT-RESET-TOKENS-XXX
|
||||
x-request-id:
|
||||
- X-REQUEST-ID-XXX
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
- request:
|
||||
body: '{"id":"03e755a9-bf97-4c55-bd2f-fbc23e3385ef","jsonrpc":"2.0","method":"message/stream","params":{"configuration":{"acceptedOutputModes":["application/json"],"blocking":true},"message":{"kind":"message","messageId":"4c7c5802-a8b0-40ac-bbf4-4ff1cb0b10f3","parts":[{"kind":"text","text":"What
|
||||
is the current local time in New York?"}],"referenceTaskIds":[],"role":"user"}}}'
|
||||
headers:
|
||||
User-Agent:
|
||||
- X-USER-AGENT-XXX
|
||||
accept:
|
||||
- '*/*, text/event-stream'
|
||||
accept-encoding:
|
||||
- ACCEPT-ENCODING-XXX
|
||||
cache-control:
|
||||
- no-store
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '370'
|
||||
content-type:
|
||||
- application/json
|
||||
host:
|
||||
- localhost:9999
|
||||
method: POST
|
||||
uri: http://localhost:9999
|
||||
response:
|
||||
body:
|
||||
string: "data: {\"id\":\"03e755a9-bf97-4c55-bd2f-fbc23e3385ef\",\"jsonrpc\":\"2.0\",\"result\":{\"contextId\":\"fcef0a1d-ba1d-4703-88a7-0caddb7d8602\",\"final\":false,\"kind\":\"status-update\",\"status\":{\"state\":\"submitted\"},\"taskId\":\"01abf50f-944a-4814-816c-170e460a542a\"}}\r\n\r\ndata:
|
||||
{\"id\":\"03e755a9-bf97-4c55-bd2f-fbc23e3385ef\",\"jsonrpc\":\"2.0\",\"result\":{\"contextId\":\"fcef0a1d-ba1d-4703-88a7-0caddb7d8602\",\"final\":false,\"kind\":\"status-update\",\"status\":{\"state\":\"working\"},\"taskId\":\"01abf50f-944a-4814-816c-170e460a542a\"}}\r\n\r\ndata:
|
||||
{\"id\":\"03e755a9-bf97-4c55-bd2f-fbc23e3385ef\",\"jsonrpc\":\"2.0\",\"result\":{\"contextId\":\"fcef0a1d-ba1d-4703-88a7-0caddb7d8602\",\"final\":true,\"kind\":\"status-update\",\"status\":{\"message\":{\"kind\":\"message\",\"messageId\":\"cc32e320-d067-4006-85ad-c3eb988ee0cc\",\"parts\":[{\"kind\":\"text\",\"text\":\"[Tool:
|
||||
get_time] 2026-01-30 16:33:38 EST (America/New_York)\\nThe current local time
|
||||
in New York is 4:33 PM EST on January 30, 2026.\"}],\"role\":\"agent\"},\"state\":\"completed\"},\"taskId\":\"01abf50f-944a-4814-816c-170e460a542a\"}}\r\n\r\n"
|
||||
headers:
|
||||
cache-control:
|
||||
- no-store
|
||||
connection:
|
||||
- keep-alive
|
||||
content-type:
|
||||
- text/event-stream; charset=utf-8
|
||||
date:
|
||||
- Fri, 30 Jan 2026 21:33:37 GMT
|
||||
server:
|
||||
- uvicorn
|
||||
transfer-encoding:
|
||||
- chunked
|
||||
x-accel-buffering:
|
||||
- 'no'
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
- request:
|
||||
body: '{"messages":[{"role":"system","content":"You are Research Analyst. Expert
|
||||
researcher with access to remote specialized agents\nYour personal goal is:
|
||||
Find and analyze information about AI developments"},{"role":"user","content":"\nCurrent
|
||||
Task: Use the remote A2A agent to find out what the current time is in New York.\n\nProvide
|
||||
your complete response:"}],"model":"gpt-4.1-mini","response_format":{"type":"json_schema","json_schema":{"schema":{"properties":{"a2a_ids":{"description":"A2A
|
||||
agent IDs to delegate to.","items":{"const":"http://localhost:9999/.well-known/agent-card.json","type":"string"},"maxItems":1,"title":"A2A
|
||||
Ids","type":"array"},"message":{"description":"The message content. If is_a2a=true,
|
||||
this is sent to the A2A agent. If is_a2a=false, this is your final answer ending
|
||||
the conversation.","title":"Message","type":"string"},"is_a2a":{"description":"Set
|
||||
to false when the remote agent has answered your question - extract their answer
|
||||
and return it as your final message. Set to true ONLY if you need to ask a NEW,
|
||||
DIFFERENT question. NEVER repeat the same request - if the conversation history
|
||||
shows the agent already answered, set is_a2a=false immediately.","title":"Is
|
||||
A2A","type":"boolean"}},"required":["a2a_ids","message","is_a2a"],"title":"AgentResponse","type":"object","additionalProperties":false},"name":"AgentResponse","strict":true}},"stream":false}'
|
||||
headers:
|
||||
User-Agent:
|
||||
- X-USER-AGENT-XXX
|
||||
accept:
|
||||
- application/json
|
||||
accept-encoding:
|
||||
- ACCEPT-ENCODING-XXX
|
||||
authorization:
|
||||
- AUTHORIZATION-XXX
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '1385'
|
||||
content-type:
|
||||
- application/json
|
||||
cookie:
|
||||
- COOKIE-XXX
|
||||
host:
|
||||
- api.openai.com
|
||||
x-stainless-arch:
|
||||
- X-STAINLESS-ARCH-XXX
|
||||
x-stainless-async:
|
||||
- 'false'
|
||||
x-stainless-helper-method:
|
||||
- beta.chat.completions.parse
|
||||
x-stainless-lang:
|
||||
- python
|
||||
x-stainless-os:
|
||||
- X-STAINLESS-OS-XXX
|
||||
x-stainless-package-version:
|
||||
- 1.83.0
|
||||
x-stainless-read-timeout:
|
||||
- X-STAINLESS-READ-TIMEOUT-XXX
|
||||
x-stainless-retry-count:
|
||||
- '0'
|
||||
x-stainless-runtime:
|
||||
- CPython
|
||||
x-stainless-runtime-version:
|
||||
- 3.12.10
|
||||
method: POST
|
||||
uri: https://api.openai.com/v1/chat/completions
|
||||
response:
|
||||
body:
|
||||
string: "{\n \"id\": \"chatcmpl-D3qPUBwXUl9D9xr19IR5ayWaliTQc\",\n \"object\":
|
||||
\"chat.completion\",\n \"created\": 1769808820,\n \"model\": \"gpt-4.1-mini-2025-04-14\",\n
|
||||
\ \"choices\": [\n {\n \"index\": 0,\n \"message\": {\n \"role\":
|
||||
\"assistant\",\n \"content\": \"{\\\"a2a_ids\\\":[\\\"http://localhost:9999/.well-known/agent-card.json\\\"],\\\"message\\\":\\\"What
|
||||
is the current time in New York?\\\",\\\"is_a2a\\\":true}\",\n \"refusal\":
|
||||
null,\n \"annotations\": []\n },\n \"logprobs\": null,\n
|
||||
\ \"finish_reason\": \"stop\"\n }\n ],\n \"usage\": {\n \"prompt_tokens\":
|
||||
274,\n \"completion_tokens\": 40,\n \"total_tokens\": 314,\n \"prompt_tokens_details\":
|
||||
{\n \"cached_tokens\": 0,\n \"audio_tokens\": 0\n },\n \"completion_tokens_details\":
|
||||
{\n \"reasoning_tokens\": 0,\n \"audio_tokens\": 0,\n \"accepted_prediction_tokens\":
|
||||
0,\n \"rejected_prediction_tokens\": 0\n }\n },\n \"service_tier\":
|
||||
\"default\",\n \"system_fingerprint\": \"fp_e01c6f58e1\"\n}\n"
|
||||
headers:
|
||||
CF-RAY:
|
||||
- CF-RAY-XXX
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Type:
|
||||
- application/json
|
||||
Date:
|
||||
- Fri, 30 Jan 2026 21:33:41 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
Strict-Transport-Security:
|
||||
- STS-XXX
|
||||
Transfer-Encoding:
|
||||
- chunked
|
||||
X-Content-Type-Options:
|
||||
- X-CONTENT-TYPE-XXX
|
||||
access-control-expose-headers:
|
||||
- ACCESS-CONTROL-XXX
|
||||
alt-svc:
|
||||
- h3=":443"; ma=86400
|
||||
cf-cache-status:
|
||||
- DYNAMIC
|
||||
openai-organization:
|
||||
- OPENAI-ORG-XXX
|
||||
openai-processing-ms:
|
||||
- '844'
|
||||
openai-project:
|
||||
- OPENAI-PROJECT-XXX
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
x-openai-proxy-wasm:
|
||||
- v0.1
|
||||
x-ratelimit-limit-requests:
|
||||
- X-RATELIMIT-LIMIT-REQUESTS-XXX
|
||||
x-ratelimit-limit-tokens:
|
||||
- X-RATELIMIT-LIMIT-TOKENS-XXX
|
||||
x-ratelimit-remaining-requests:
|
||||
- X-RATELIMIT-REMAINING-REQUESTS-XXX
|
||||
x-ratelimit-remaining-tokens:
|
||||
- X-RATELIMIT-REMAINING-TOKENS-XXX
|
||||
x-ratelimit-reset-requests:
|
||||
- X-RATELIMIT-RESET-REQUESTS-XXX
|
||||
x-ratelimit-reset-tokens:
|
||||
- X-RATELIMIT-RESET-TOKENS-XXX
|
||||
x-request-id:
|
||||
- X-REQUEST-ID-XXX
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
- request:
|
||||
body: ''
|
||||
headers:
|
||||
User-Agent:
|
||||
- X-USER-AGENT-XXX
|
||||
accept:
|
||||
- '*/*'
|
||||
accept-encoding:
|
||||
- ACCEPT-ENCODING-XXX
|
||||
connection:
|
||||
- keep-alive
|
||||
host:
|
||||
- localhost:9999
|
||||
method: GET
|
||||
uri: http://localhost:9999/.well-known/agent-card.json
|
||||
response:
|
||||
body:
|
||||
string: '{"capabilities":{"pushNotifications":true,"streaming":true},"defaultInputModes":["text/plain","application/json"],"defaultOutputModes":["text/plain","application/json"],"description":"An
|
||||
AI assistant powered by OpenAI GPT with calculator and time tools. Ask questions,
|
||||
perform calculations, or get the current time in any timezone.","name":"GPT
|
||||
Assistant","preferredTransport":"JSONRPC","protocolVersion":"0.3.0","skills":[{"description":"Have
|
||||
a general conversation with the AI assistant. Ask questions, get explanations,
|
||||
or just chat.","examples":["Hello, how are you?","Explain quantum computing
|
||||
in simple terms","What can you help me with?"],"id":"conversation","name":"General
|
||||
Conversation","tags":["chat","conversation","general"]},{"description":"Perform
|
||||
mathematical calculations including arithmetic, exponents, and more.","examples":["What
|
||||
is 25 * 17?","Calculate 2^10","What''s (100 + 50) / 3?"],"id":"calculator","name":"Calculator","tags":["math","calculator","arithmetic"]},{"description":"Get
|
||||
the current date and time in any timezone.","examples":["What time is it?","What''s
|
||||
the current time in Tokyo?","What''s today''s date in New York?"],"id":"time","name":"Current
|
||||
Time","tags":["time","date","timezone"]}],"url":"http://localhost:9999","version":"1.0.0"}'
|
||||
headers:
|
||||
content-length:
|
||||
- '1272'
|
||||
content-type:
|
||||
- application/json
|
||||
date:
|
||||
- Fri, 30 Jan 2026 21:49:04 GMT
|
||||
server:
|
||||
- uvicorn
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
version: 1
|
||||
@@ -0,0 +1,744 @@
|
||||
interactions:
|
||||
- request:
|
||||
body: '{"messages":[{"role":"system","content":"You are Research Analyst. Expert
|
||||
researcher with access to remote specialized agents\nYour personal goal is:
|
||||
Find and analyze information about AI developments"},{"role":"user","content":"\nCurrent
|
||||
Task: Ask the remote A2A agent about recent developments in AI agent communication
|
||||
protocols.\n\nProvide your complete response:"}],"model":"gpt-4.1-mini","response_format":{"type":"json_schema","json_schema":{"schema":{"properties":{"a2a_ids":{"description":"A2A
|
||||
agent IDs to delegate to.","items":{"const":"http://localhost:9999/.well-known/agent-card.json","type":"string"},"maxItems":1,"title":"A2A
|
||||
Ids","type":"array"},"message":{"description":"The message content. If is_a2a=true,
|
||||
this is sent to the A2A agent. If is_a2a=false, this is your final answer ending
|
||||
the conversation.","title":"Message","type":"string"},"is_a2a":{"description":"Set
|
||||
to false when the remote agent has answered your question - extract their answer
|
||||
and return it as your final message. Set to true ONLY if you need to ask a NEW,
|
||||
DIFFERENT question. NEVER repeat the same request - if the conversation history
|
||||
shows the agent already answered, set is_a2a=false immediately.","title":"Is
|
||||
A2A","type":"boolean"}},"required":["a2a_ids","message","is_a2a"],"title":"AgentResponse","type":"object","additionalProperties":false},"name":"AgentResponse","strict":true}},"stream":false}'
|
||||
headers:
|
||||
User-Agent:
|
||||
- X-USER-AGENT-XXX
|
||||
accept:
|
||||
- application/json
|
||||
accept-encoding:
|
||||
- ACCEPT-ENCODING-XXX
|
||||
authorization:
|
||||
- AUTHORIZATION-XXX
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '1398'
|
||||
content-type:
|
||||
- application/json
|
||||
host:
|
||||
- api.openai.com
|
||||
x-stainless-arch:
|
||||
- X-STAINLESS-ARCH-XXX
|
||||
x-stainless-async:
|
||||
- 'false'
|
||||
x-stainless-helper-method:
|
||||
- beta.chat.completions.parse
|
||||
x-stainless-lang:
|
||||
- python
|
||||
x-stainless-os:
|
||||
- X-STAINLESS-OS-XXX
|
||||
x-stainless-package-version:
|
||||
- 1.83.0
|
||||
x-stainless-read-timeout:
|
||||
- X-STAINLESS-READ-TIMEOUT-XXX
|
||||
x-stainless-retry-count:
|
||||
- '0'
|
||||
x-stainless-runtime:
|
||||
- CPython
|
||||
x-stainless-runtime-version:
|
||||
- 3.12.10
|
||||
method: POST
|
||||
uri: https://api.openai.com/v1/chat/completions
|
||||
response:
|
||||
body:
|
||||
string: "{\n \"id\": \"chatcmpl-D3qPVb8R5TRMw6i6NIg1K1QCH37DH\",\n \"object\":
|
||||
\"chat.completion\",\n \"created\": 1769808821,\n \"model\": \"gpt-4.1-mini-2025-04-14\",\n
|
||||
\ \"choices\": [\n {\n \"index\": 0,\n \"message\": {\n \"role\":
|
||||
\"assistant\",\n \"content\": \"{\\\"a2a_ids\\\":[\\\"http://localhost:9999/.well-known/agent-card.json\\\"],\\\"message\\\":\\\"Could
|
||||
you provide detailed information on the latest developments and advancements
|
||||
in AI agent communication protocols? I'm particularly interested in new standards,
|
||||
interoperability improvements, and innovative methods adopted recently.\\\",\\\"is_a2a\\\":true}\",\n
|
||||
\ \"refusal\": null,\n \"annotations\": []\n },\n \"logprobs\":
|
||||
null,\n \"finish_reason\": \"stop\"\n }\n ],\n \"usage\": {\n \"prompt_tokens\":
|
||||
271,\n \"completion_tokens\": 63,\n \"total_tokens\": 334,\n \"prompt_tokens_details\":
|
||||
{\n \"cached_tokens\": 0,\n \"audio_tokens\": 0\n },\n \"completion_tokens_details\":
|
||||
{\n \"reasoning_tokens\": 0,\n \"audio_tokens\": 0,\n \"accepted_prediction_tokens\":
|
||||
0,\n \"rejected_prediction_tokens\": 0\n }\n },\n \"service_tier\":
|
||||
\"default\",\n \"system_fingerprint\": \"fp_e01c6f58e1\"\n}\n"
|
||||
headers:
|
||||
CF-RAY:
|
||||
- CF-RAY-XXX
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Type:
|
||||
- application/json
|
||||
Date:
|
||||
- Fri, 30 Jan 2026 21:33:42 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
Set-Cookie:
|
||||
- SET-COOKIE-XXX
|
||||
Strict-Transport-Security:
|
||||
- STS-XXX
|
||||
Transfer-Encoding:
|
||||
- chunked
|
||||
X-Content-Type-Options:
|
||||
- X-CONTENT-TYPE-XXX
|
||||
access-control-expose-headers:
|
||||
- ACCESS-CONTROL-XXX
|
||||
alt-svc:
|
||||
- h3=":443"; ma=86400
|
||||
cf-cache-status:
|
||||
- DYNAMIC
|
||||
openai-organization:
|
||||
- OPENAI-ORG-XXX
|
||||
openai-processing-ms:
|
||||
- '1076'
|
||||
openai-project:
|
||||
- OPENAI-PROJECT-XXX
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
x-openai-proxy-wasm:
|
||||
- v0.1
|
||||
x-ratelimit-limit-requests:
|
||||
- X-RATELIMIT-LIMIT-REQUESTS-XXX
|
||||
x-ratelimit-limit-tokens:
|
||||
- X-RATELIMIT-LIMIT-TOKENS-XXX
|
||||
x-ratelimit-remaining-requests:
|
||||
- X-RATELIMIT-REMAINING-REQUESTS-XXX
|
||||
x-ratelimit-remaining-tokens:
|
||||
- X-RATELIMIT-REMAINING-TOKENS-XXX
|
||||
x-ratelimit-reset-requests:
|
||||
- X-RATELIMIT-RESET-REQUESTS-XXX
|
||||
x-ratelimit-reset-tokens:
|
||||
- X-RATELIMIT-RESET-TOKENS-XXX
|
||||
x-request-id:
|
||||
- X-REQUEST-ID-XXX
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
- request:
|
||||
body: '{"id":"7a3da9c5-2ad7-4334-9cb8-3e45920013f3","jsonrpc":"2.0","method":"message/stream","params":{"configuration":{"acceptedOutputModes":["application/json"],"blocking":true},"message":{"kind":"message","messageId":"a3d6ab2f-4e18-4b1c-9a5a-1d326b500dc6","parts":[{"kind":"text","text":"Could
|
||||
you provide detailed information on the latest developments and advancements
|
||||
in AI agent communication protocols? I''m particularly interested in new standards,
|
||||
interoperability improvements, and innovative methods adopted recently."}],"referenceTaskIds":[],"role":"user"}}}'
|
||||
headers:
|
||||
User-Agent:
|
||||
- X-USER-AGENT-XXX
|
||||
accept:
|
||||
- '*/*, text/event-stream'
|
||||
accept-encoding:
|
||||
- ACCEPT-ENCODING-XXX
|
||||
cache-control:
|
||||
- no-store
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '564'
|
||||
content-type:
|
||||
- application/json
|
||||
host:
|
||||
- localhost:9999
|
||||
method: POST
|
||||
uri: http://localhost:9999
|
||||
response:
|
||||
body:
|
||||
string: "data: {\"id\":\"7a3da9c5-2ad7-4334-9cb8-3e45920013f3\",\"jsonrpc\":\"2.0\",\"result\":{\"contextId\":\"89db77f5-1399-4c96-827b-2d4bac7f412d\",\"final\":false,\"kind\":\"status-update\",\"status\":{\"state\":\"submitted\"},\"taskId\":\"4c4fceee-2a1b-4050-a958-56f23e9f5920\"}}\r\n\r\ndata:
|
||||
{\"id\":\"7a3da9c5-2ad7-4334-9cb8-3e45920013f3\",\"jsonrpc\":\"2.0\",\"result\":{\"contextId\":\"89db77f5-1399-4c96-827b-2d4bac7f412d\",\"final\":false,\"kind\":\"status-update\",\"status\":{\"state\":\"working\"},\"taskId\":\"4c4fceee-2a1b-4050-a958-56f23e9f5920\"}}\r\n\r\ndata:
|
||||
{\"id\":\"7a3da9c5-2ad7-4334-9cb8-3e45920013f3\",\"jsonrpc\":\"2.0\",\"result\":{\"contextId\":\"89db77f5-1399-4c96-827b-2d4bac7f412d\",\"final\":true,\"kind\":\"status-update\",\"status\":{\"message\":{\"kind\":\"message\",\"messageId\":\"b7b4db08-f796-4233-a606-44b65791928b\",\"parts\":[{\"kind\":\"text\",\"text\":\"Recent
|
||||
developments in AI agent communication protocols have focused on enhancing
|
||||
interoperability, establishing new standards, and introducing innovative methods.
|
||||
Here are some key advancements:\\n\\n1. **New Standards**: \\n - **IEEE
|
||||
P7010**: This is a standard under development that aims to establish ethical
|
||||
guidelines for AI systems, particularly in how they communicate and interact
|
||||
with humans and other systems. \\n - **W3C's Vocabulary for AI**: The World
|
||||
Wide Web Consortium (W3C) is working on creating vocabularies and protocols
|
||||
to facilitate clearer communication between AI agents across different platforms.\\n\\n2.
|
||||
**Interoperability Improvements**:\\n - **API Standardization**: Efforts
|
||||
like the OpenAPI Specification (formerly known as Swagger) are being adopted
|
||||
for AI models, allowing better integration and communication between different
|
||||
AI systems and services.\\n - **Interoperable Frameworks**: Frameworks such
|
||||
as the AI Exchange (a protocol for exchanging AI models and data) are evolving
|
||||
to ensure that different AI systems can work together more effectively, enhancing
|
||||
data sharing and joint operations.\\n\\n3. **Innovative Methods**:\\n -
|
||||
**Natural Language Understanding (NLU)**: Advances in NLU technologies have
|
||||
made communication more intuitive. AI agents now better grasp context, intent,
|
||||
and sentiment, allowing for more fluid interactions.\\n - **Multi-agent
|
||||
Systems (MAS)**: Recent research is focusing on improving how multiple AI
|
||||
agents communicate within a shared environment, utilizing protocols that allow
|
||||
for collaborative problem-solving and negotiation.\\n - **Federated Learning**:
|
||||
This method enhances privacy and security in communications by allowing AI
|
||||
models to learn collaboratively across multiple devices or systems without
|
||||
sharing personal data.\\n\\n4. **Decentralized Protocols**: The emergence
|
||||
of decentralized communication protocols, such as those based on blockchain
|
||||
technology, is enabling AI agents to communicate securely and transparently,
|
||||
fostering trust among users and systems.\\n\\n5. **Research Collaborations**:
|
||||
Initiatives like the Partnership on AI have brought together companies, academia,
|
||||
and civil society to explore best practices in communication protocols that
|
||||
prioritize ethical considerations.\\n\\nOverall, these advancements in standards
|
||||
and protocols are aimed at creating a more interconnected and efficient environment
|
||||
for AI agents, encouraging innovation while addressing ethical and interoperability
|
||||
challenges.\"}],\"role\":\"agent\"},\"state\":\"completed\"},\"taskId\":\"4c4fceee-2a1b-4050-a958-56f23e9f5920\"}}\r\n\r\n"
|
||||
headers:
|
||||
cache-control:
|
||||
- no-store
|
||||
connection:
|
||||
- keep-alive
|
||||
content-type:
|
||||
- text/event-stream; charset=utf-8
|
||||
date:
|
||||
- Fri, 30 Jan 2026 21:33:42 GMT
|
||||
server:
|
||||
- uvicorn
|
||||
transfer-encoding:
|
||||
- chunked
|
||||
x-accel-buffering:
|
||||
- 'no'
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
- request:
|
||||
body: '{"messages":[{"role":"system","content":"You are Research Analyst. Expert
|
||||
researcher with access to remote specialized agents\nYour personal goal is:
|
||||
Find and analyze information about AI developments"},{"role":"user","content":"\nCurrent
|
||||
Task: Ask the remote A2A agent about recent developments in AI agent communication
|
||||
protocols.\n\nProvide your complete response:"}],"model":"gpt-4.1-mini","response_format":{"type":"json_schema","json_schema":{"schema":{"properties":{"a2a_ids":{"description":"A2A
|
||||
agent IDs to delegate to.","items":{"const":"http://localhost:9999/.well-known/agent-card.json","type":"string"},"maxItems":1,"title":"A2A
|
||||
Ids","type":"array"},"message":{"description":"The message content. If is_a2a=true,
|
||||
this is sent to the A2A agent. If is_a2a=false, this is your final answer ending
|
||||
the conversation.","title":"Message","type":"string"},"is_a2a":{"description":"Set
|
||||
to false when the remote agent has answered your question - extract their answer
|
||||
and return it as your final message. Set to true ONLY if you need to ask a NEW,
|
||||
DIFFERENT question. NEVER repeat the same request - if the conversation history
|
||||
shows the agent already answered, set is_a2a=false immediately.","title":"Is
|
||||
A2A","type":"boolean"}},"required":["a2a_ids","message","is_a2a"],"title":"AgentResponse","type":"object","additionalProperties":false},"name":"AgentResponse","strict":true}},"stream":false}'
|
||||
headers:
|
||||
User-Agent:
|
||||
- X-USER-AGENT-XXX
|
||||
accept:
|
||||
- application/json
|
||||
accept-encoding:
|
||||
- ACCEPT-ENCODING-XXX
|
||||
authorization:
|
||||
- AUTHORIZATION-XXX
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '1398'
|
||||
content-type:
|
||||
- application/json
|
||||
cookie:
|
||||
- COOKIE-XXX
|
||||
host:
|
||||
- api.openai.com
|
||||
x-stainless-arch:
|
||||
- X-STAINLESS-ARCH-XXX
|
||||
x-stainless-async:
|
||||
- 'false'
|
||||
x-stainless-helper-method:
|
||||
- beta.chat.completions.parse
|
||||
x-stainless-lang:
|
||||
- python
|
||||
x-stainless-os:
|
||||
- X-STAINLESS-OS-XXX
|
||||
x-stainless-package-version:
|
||||
- 1.83.0
|
||||
x-stainless-read-timeout:
|
||||
- X-STAINLESS-READ-TIMEOUT-XXX
|
||||
x-stainless-retry-count:
|
||||
- '0'
|
||||
x-stainless-runtime:
|
||||
- CPython
|
||||
x-stainless-runtime-version:
|
||||
- 3.12.10
|
||||
method: POST
|
||||
uri: https://api.openai.com/v1/chat/completions
|
||||
response:
|
||||
body:
|
||||
string: "{\n \"id\": \"chatcmpl-D3qPjIHCyZCk5jLwgIGOvDH1NvaPu\",\n \"object\":
|
||||
\"chat.completion\",\n \"created\": 1769808835,\n \"model\": \"gpt-4.1-mini-2025-04-14\",\n
|
||||
\ \"choices\": [\n {\n \"index\": 0,\n \"message\": {\n \"role\":
|
||||
\"assistant\",\n \"content\": \"{\\\"a2a_ids\\\":[\\\"http://localhost:9999/.well-known/agent-card.json\\\"],\\\"message\\\":\\\"Could
|
||||
you provide an overview of the recent developments in AI agent communication
|
||||
protocols? Specifically, updates or breakthroughs in how AI agents communicate
|
||||
and coordinate with each other.\\\",\\\"is_a2a\\\":true}\",\n \"refusal\":
|
||||
null,\n \"annotations\": []\n },\n \"logprobs\": null,\n
|
||||
\ \"finish_reason\": \"stop\"\n }\n ],\n \"usage\": {\n \"prompt_tokens\":
|
||||
271,\n \"completion_tokens\": 61,\n \"total_tokens\": 332,\n \"prompt_tokens_details\":
|
||||
{\n \"cached_tokens\": 0,\n \"audio_tokens\": 0\n },\n \"completion_tokens_details\":
|
||||
{\n \"reasoning_tokens\": 0,\n \"audio_tokens\": 0,\n \"accepted_prediction_tokens\":
|
||||
0,\n \"rejected_prediction_tokens\": 0\n }\n },\n \"service_tier\":
|
||||
\"default\",\n \"system_fingerprint\": \"fp_e01c6f58e1\"\n}\n"
|
||||
headers:
|
||||
CF-RAY:
|
||||
- CF-RAY-XXX
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Type:
|
||||
- application/json
|
||||
Date:
|
||||
- Fri, 30 Jan 2026 21:33:57 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
Strict-Transport-Security:
|
||||
- STS-XXX
|
||||
Transfer-Encoding:
|
||||
- chunked
|
||||
X-Content-Type-Options:
|
||||
- X-CONTENT-TYPE-XXX
|
||||
access-control-expose-headers:
|
||||
- ACCESS-CONTROL-XXX
|
||||
alt-svc:
|
||||
- h3=":443"; ma=86400
|
||||
cf-cache-status:
|
||||
- DYNAMIC
|
||||
openai-organization:
|
||||
- OPENAI-ORG-XXX
|
||||
openai-processing-ms:
|
||||
- '1453'
|
||||
openai-project:
|
||||
- OPENAI-PROJECT-XXX
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
x-openai-proxy-wasm:
|
||||
- v0.1
|
||||
x-ratelimit-limit-requests:
|
||||
- X-RATELIMIT-LIMIT-REQUESTS-XXX
|
||||
x-ratelimit-limit-tokens:
|
||||
- X-RATELIMIT-LIMIT-TOKENS-XXX
|
||||
x-ratelimit-remaining-requests:
|
||||
- X-RATELIMIT-REMAINING-REQUESTS-XXX
|
||||
x-ratelimit-remaining-tokens:
|
||||
- X-RATELIMIT-REMAINING-TOKENS-XXX
|
||||
x-ratelimit-reset-requests:
|
||||
- X-RATELIMIT-RESET-REQUESTS-XXX
|
||||
x-ratelimit-reset-tokens:
|
||||
- X-RATELIMIT-RESET-TOKENS-XXX
|
||||
x-request-id:
|
||||
- X-REQUEST-ID-XXX
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
- request:
|
||||
body: '{"id":"1fc46982-0250-430d-897b-d12ac939f2ae","jsonrpc":"2.0","method":"message/stream","params":{"configuration":{"acceptedOutputModes":["application/json"],"blocking":true},"message":{"kind":"message","messageId":"fdd2c664-4eef-4b15-b62d-1ff72b18faf6","parts":[{"kind":"text","text":"Could
|
||||
you provide an overview of the recent developments in AI agent communication
|
||||
protocols? Specifically, updates or breakthroughs in how AI agents communicate
|
||||
and coordinate with each other."}],"referenceTaskIds":[],"role":"user"}}}'
|
||||
headers:
|
||||
User-Agent:
|
||||
- X-USER-AGENT-XXX
|
||||
accept:
|
||||
- '*/*, text/event-stream'
|
||||
accept-encoding:
|
||||
- ACCEPT-ENCODING-XXX
|
||||
cache-control:
|
||||
- no-store
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '520'
|
||||
content-type:
|
||||
- application/json
|
||||
host:
|
||||
- localhost:9999
|
||||
method: POST
|
||||
uri: http://localhost:9999
|
||||
response:
|
||||
body:
|
||||
string: "data: {\"id\":\"1fc46982-0250-430d-897b-d12ac939f2ae\",\"jsonrpc\":\"2.0\",\"result\":{\"contextId\":\"2f12f4df-96b1-41db-9c40-b345b214f107\",\"final\":false,\"kind\":\"status-update\",\"status\":{\"state\":\"submitted\"},\"taskId\":\"105db7f0-0cc6-43f8-a224-24b6d46c01fb\"}}\r\n\r\ndata:
|
||||
{\"id\":\"1fc46982-0250-430d-897b-d12ac939f2ae\",\"jsonrpc\":\"2.0\",\"result\":{\"contextId\":\"2f12f4df-96b1-41db-9c40-b345b214f107\",\"final\":false,\"kind\":\"status-update\",\"status\":{\"state\":\"working\"},\"taskId\":\"105db7f0-0cc6-43f8-a224-24b6d46c01fb\"}}\r\n\r\ndata:
|
||||
{\"id\":\"1fc46982-0250-430d-897b-d12ac939f2ae\",\"jsonrpc\":\"2.0\",\"result\":{\"contextId\":\"2f12f4df-96b1-41db-9c40-b345b214f107\",\"final\":true,\"kind\":\"status-update\",\"status\":{\"message\":{\"kind\":\"message\",\"messageId\":\"5a60601d-90f9-452b-8356-9335daa61d6a\",\"parts\":[{\"kind\":\"text\",\"text\":\"Recent
|
||||
developments in AI agent communication protocols have focused on enhancing
|
||||
interoperability, efficiency, and security among AI systems. Key breakthroughs
|
||||
include:\\n\\n1. **Standardization of Protocols**: Efforts such as the IEEE
|
||||
P2870 standard aim to create a unified communication framework that allows
|
||||
different AI agents to interact seamlessly across platforms.\\n\\n2. **Multi-agent
|
||||
Reinforcement Learning (MARL)**: Innovations in MARL enable agents to learn
|
||||
from each other and improve communication strategies, facilitating better
|
||||
coordination in dynamic environments.\\n\\n3. **Natural Language Processing
|
||||
(NLP) Enhancements**: Advancements in NLP have improved how AI agents understand
|
||||
and generate human language, enabling more intuitive communication both with
|
||||
humans and among themselves.\\n\\n4. **Decentralized Communication Frameworks**:
|
||||
Protocols using blockchain technology allow for secure and transparent communication
|
||||
between agents without a central authority, promoting trust and data integrity.\\n\\n5.
|
||||
**Contextual Understanding**: New algorithms enhance AI's ability to maintain
|
||||
context in conversations, allowing agents to communicate more effectively
|
||||
in multi-turn dialogues and complex scenarios.\\n\\n6. **Communication Efficiency**:
|
||||
Research into compressed communication techniques is helping reduce bandwidth
|
||||
usage while maintaining the effectiveness of information exchange between
|
||||
agents. \\n\\nThese advancements are paving the way for more autonomous and
|
||||
collaborative AI systems capable of complex task execution.\"}],\"role\":\"agent\"},\"state\":\"completed\"},\"taskId\":\"105db7f0-0cc6-43f8-a224-24b6d46c01fb\"}}\r\n\r\n"
|
||||
headers:
|
||||
cache-control:
|
||||
- no-store
|
||||
connection:
|
||||
- keep-alive
|
||||
content-type:
|
||||
- text/event-stream; charset=utf-8
|
||||
date:
|
||||
- Fri, 30 Jan 2026 21:33:56 GMT
|
||||
server:
|
||||
- uvicorn
|
||||
transfer-encoding:
|
||||
- chunked
|
||||
x-accel-buffering:
|
||||
- 'no'
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
- request:
|
||||
body: '{"messages":[{"role":"system","content":"You are Research Analyst. Expert
|
||||
researcher with access to remote specialized agents\nYour personal goal is:
|
||||
Find and analyze information about AI developments"},{"role":"user","content":"\nCurrent
|
||||
Task: Ask the remote A2A agent about recent developments in AI agent communication
|
||||
protocols.\n\nProvide your complete response:"}],"model":"gpt-4.1-mini","response_format":{"type":"json_schema","json_schema":{"schema":{"properties":{"a2a_ids":{"description":"A2A
|
||||
agent IDs to delegate to.","items":{"const":"http://localhost:9999/.well-known/agent-card.json","type":"string"},"maxItems":1,"title":"A2A
|
||||
Ids","type":"array"},"message":{"description":"The message content. If is_a2a=true,
|
||||
this is sent to the A2A agent. If is_a2a=false, this is your final answer ending
|
||||
the conversation.","title":"Message","type":"string"},"is_a2a":{"description":"Set
|
||||
to false when the remote agent has answered your question - extract their answer
|
||||
and return it as your final message. Set to true ONLY if you need to ask a NEW,
|
||||
DIFFERENT question. NEVER repeat the same request - if the conversation history
|
||||
shows the agent already answered, set is_a2a=false immediately.","title":"Is
|
||||
A2A","type":"boolean"}},"required":["a2a_ids","message","is_a2a"],"title":"AgentResponse","type":"object","additionalProperties":false},"name":"AgentResponse","strict":true}},"stream":false}'
|
||||
headers:
|
||||
User-Agent:
|
||||
- X-USER-AGENT-XXX
|
||||
accept:
|
||||
- application/json
|
||||
accept-encoding:
|
||||
- ACCEPT-ENCODING-XXX
|
||||
authorization:
|
||||
- AUTHORIZATION-XXX
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '1398'
|
||||
content-type:
|
||||
- application/json
|
||||
cookie:
|
||||
- COOKIE-XXX
|
||||
host:
|
||||
- api.openai.com
|
||||
x-stainless-arch:
|
||||
- X-STAINLESS-ARCH-XXX
|
||||
x-stainless-async:
|
||||
- 'false'
|
||||
x-stainless-helper-method:
|
||||
- beta.chat.completions.parse
|
||||
x-stainless-lang:
|
||||
- python
|
||||
x-stainless-os:
|
||||
- X-STAINLESS-OS-XXX
|
||||
x-stainless-package-version:
|
||||
- 1.83.0
|
||||
x-stainless-read-timeout:
|
||||
- X-STAINLESS-READ-TIMEOUT-XXX
|
||||
x-stainless-retry-count:
|
||||
- '0'
|
||||
x-stainless-runtime:
|
||||
- CPython
|
||||
x-stainless-runtime-version:
|
||||
- 3.12.10
|
||||
method: POST
|
||||
uri: https://api.openai.com/v1/chat/completions
|
||||
response:
|
||||
body:
|
||||
string: "{\n \"id\": \"chatcmpl-D3qPus9YgQVHUbVZ4ytiARJMcg7wS\",\n \"object\":
|
||||
\"chat.completion\",\n \"created\": 1769808846,\n \"model\": \"gpt-4.1-mini-2025-04-14\",\n
|
||||
\ \"choices\": [\n {\n \"index\": 0,\n \"message\": {\n \"role\":
|
||||
\"assistant\",\n \"content\": \"{\\\"a2a_ids\\\":[\\\"http://localhost:9999/.well-known/agent-card.json\\\"],\\\"message\\\":\\\"Could
|
||||
you please provide information about the recent developments in AI agent communication
|
||||
protocols?\\\",\\\"is_a2a\\\":true}\",\n \"refusal\": null,\n \"annotations\":
|
||||
[]\n },\n \"logprobs\": null,\n \"finish_reason\": \"stop\"\n
|
||||
\ }\n ],\n \"usage\": {\n \"prompt_tokens\": 271,\n \"completion_tokens\":
|
||||
46,\n \"total_tokens\": 317,\n \"prompt_tokens_details\": {\n \"cached_tokens\":
|
||||
0,\n \"audio_tokens\": 0\n },\n \"completion_tokens_details\":
|
||||
{\n \"reasoning_tokens\": 0,\n \"audio_tokens\": 0,\n \"accepted_prediction_tokens\":
|
||||
0,\n \"rejected_prediction_tokens\": 0\n }\n },\n \"service_tier\":
|
||||
\"default\",\n \"system_fingerprint\": \"fp_e01c6f58e1\"\n}\n"
|
||||
headers:
|
||||
CF-RAY:
|
||||
- CF-RAY-XXX
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Type:
|
||||
- application/json
|
||||
Date:
|
||||
- Fri, 30 Jan 2026 21:34:07 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
Strict-Transport-Security:
|
||||
- STS-XXX
|
||||
Transfer-Encoding:
|
||||
- chunked
|
||||
X-Content-Type-Options:
|
||||
- X-CONTENT-TYPE-XXX
|
||||
access-control-expose-headers:
|
||||
- ACCESS-CONTROL-XXX
|
||||
alt-svc:
|
||||
- h3=":443"; ma=86400
|
||||
cf-cache-status:
|
||||
- DYNAMIC
|
||||
openai-organization:
|
||||
- OPENAI-ORG-XXX
|
||||
openai-processing-ms:
|
||||
- '903'
|
||||
openai-project:
|
||||
- OPENAI-PROJECT-XXX
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
x-openai-proxy-wasm:
|
||||
- v0.1
|
||||
x-ratelimit-limit-requests:
|
||||
- X-RATELIMIT-LIMIT-REQUESTS-XXX
|
||||
x-ratelimit-limit-tokens:
|
||||
- X-RATELIMIT-LIMIT-TOKENS-XXX
|
||||
x-ratelimit-remaining-requests:
|
||||
- X-RATELIMIT-REMAINING-REQUESTS-XXX
|
||||
x-ratelimit-remaining-tokens:
|
||||
- X-RATELIMIT-REMAINING-TOKENS-XXX
|
||||
x-ratelimit-reset-requests:
|
||||
- X-RATELIMIT-RESET-REQUESTS-XXX
|
||||
x-ratelimit-reset-tokens:
|
||||
- X-RATELIMIT-RESET-TOKENS-XXX
|
||||
x-request-id:
|
||||
- X-REQUEST-ID-XXX
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
- request:
|
||||
body: '{"id":"b6af80c8-d1de-4149-9093-bac40b337eba","jsonrpc":"2.0","method":"message/stream","params":{"configuration":{"acceptedOutputModes":["application/json"],"blocking":true},"message":{"kind":"message","messageId":"8505e288-5d79-4420-88f4-79d8161cd1b5","parts":[{"kind":"text","text":"Could
|
||||
you please provide information about the recent developments in AI agent communication
|
||||
protocols?"}],"referenceTaskIds":[],"role":"user"}}}'
|
||||
headers:
|
||||
User-Agent:
|
||||
- X-USER-AGENT-XXX
|
||||
accept:
|
||||
- '*/*, text/event-stream'
|
||||
accept-encoding:
|
||||
- ACCEPT-ENCODING-XXX
|
||||
cache-control:
|
||||
- no-store
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '430'
|
||||
content-type:
|
||||
- application/json
|
||||
host:
|
||||
- localhost:9999
|
||||
method: POST
|
||||
uri: http://localhost:9999
|
||||
response:
|
||||
body:
|
||||
string: "data: {\"id\":\"b6af80c8-d1de-4149-9093-bac40b337eba\",\"jsonrpc\":\"2.0\",\"result\":{\"contextId\":\"3fa6f8f9-51c6-417c-8680-732f7de8b0f4\",\"final\":false,\"kind\":\"status-update\",\"status\":{\"state\":\"submitted\"},\"taskId\":\"d1464f6c-b2b3-4f24-8918-322f152caf74\"}}\r\n\r\ndata:
|
||||
{\"id\":\"b6af80c8-d1de-4149-9093-bac40b337eba\",\"jsonrpc\":\"2.0\",\"result\":{\"contextId\":\"3fa6f8f9-51c6-417c-8680-732f7de8b0f4\",\"final\":false,\"kind\":\"status-update\",\"status\":{\"state\":\"working\"},\"taskId\":\"d1464f6c-b2b3-4f24-8918-322f152caf74\"}}\r\n\r\ndata:
|
||||
{\"id\":\"b6af80c8-d1de-4149-9093-bac40b337eba\",\"jsonrpc\":\"2.0\",\"result\":{\"contextId\":\"3fa6f8f9-51c6-417c-8680-732f7de8b0f4\",\"final\":true,\"kind\":\"status-update\",\"status\":{\"message\":{\"kind\":\"message\",\"messageId\":\"bd09b7bc-4295-4933-92b9-b2ceeab38aba\",\"parts\":[{\"kind\":\"text\",\"text\":\"Recent
|
||||
developments in AI agent communication protocols have focused on enhancing
|
||||
interoperability, robustness, and security among different AI systems. Key
|
||||
trends include:\\n\\n1. **Standardization Efforts**: Organizations are working
|
||||
towards creating standardized protocols that allow various AI agents to communicate
|
||||
seamlessly, such as the use of IEEE's initiatives.\\n\\n2. **Interoperability
|
||||
Frameworks**: Frameworks such as Agent Communication Language (ACL) and FIPA
|
||||
standards are being updated to support richer interactions among AI systems.\\n\\n3.
|
||||
**Natural Language Processing (NLP) Improvements**: Advances in NLP are enabling
|
||||
agents to understand and generate human-like responses better, allowing for
|
||||
more natural communication.\\n\\n4. **Context Management**: New protocols
|
||||
are incorporating context-awareness so that agents can understand and adjust
|
||||
their responses based on the situational context in which communication occurs.\\n\\n5.
|
||||
**Security Protocol Enhancements**: With rising concerns over data privacy,
|
||||
new communication protocols are integrating stronger security measures, including
|
||||
encryption and authentication methods to protect the exchange of sensitive
|
||||
information.\\n\\n6. **Decentralized Communication**: There's an ongoing exploration
|
||||
into decentralized protocols using blockchain technology to ensure transparency
|
||||
and trust in agent communications.\\n\\nThese advancements are instrumental
|
||||
in creating a more connected and effective network of AI agents capable of
|
||||
cooperating and collaborating across diverse applications and industries.\"}],\"role\":\"agent\"},\"state\":\"completed\"},\"taskId\":\"d1464f6c-b2b3-4f24-8918-322f152caf74\"}}\r\n\r\n"
|
||||
headers:
|
||||
cache-control:
|
||||
- no-store
|
||||
connection:
|
||||
- keep-alive
|
||||
content-type:
|
||||
- text/event-stream; charset=utf-8
|
||||
date:
|
||||
- Fri, 30 Jan 2026 21:34:07 GMT
|
||||
server:
|
||||
- uvicorn
|
||||
transfer-encoding:
|
||||
- chunked
|
||||
x-accel-buffering:
|
||||
- 'no'
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
- request:
|
||||
body: '{"messages":[{"role":"system","content":"You are Research Analyst. Expert
|
||||
researcher with access to remote specialized agents\nYour personal goal is:
|
||||
Find and analyze information about AI developments"},{"role":"user","content":"\nCurrent
|
||||
Task: Ask the remote A2A agent about recent developments in AI agent communication
|
||||
protocols.\n\nProvide your complete response:"}],"model":"gpt-4.1-mini","response_format":{"type":"json_schema","json_schema":{"schema":{"properties":{"a2a_ids":{"description":"A2A
|
||||
agent IDs to delegate to.","items":{"const":"http://localhost:9999/.well-known/agent-card.json","type":"string"},"maxItems":1,"title":"A2A
|
||||
Ids","type":"array"},"message":{"description":"The message content. If is_a2a=true,
|
||||
this is sent to the A2A agent. If is_a2a=false, this is your final answer ending
|
||||
the conversation.","title":"Message","type":"string"},"is_a2a":{"description":"Set
|
||||
to false when the remote agent has answered your question - extract their answer
|
||||
and return it as your final message. Set to true ONLY if you need to ask a NEW,
|
||||
DIFFERENT question. NEVER repeat the same request - if the conversation history
|
||||
shows the agent already answered, set is_a2a=false immediately.","title":"Is
|
||||
A2A","type":"boolean"}},"required":["a2a_ids","message","is_a2a"],"title":"AgentResponse","type":"object","additionalProperties":false},"name":"AgentResponse","strict":true}},"stream":false}'
|
||||
headers:
|
||||
User-Agent:
|
||||
- X-USER-AGENT-XXX
|
||||
accept:
|
||||
- application/json
|
||||
accept-encoding:
|
||||
- ACCEPT-ENCODING-XXX
|
||||
authorization:
|
||||
- AUTHORIZATION-XXX
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '1398'
|
||||
content-type:
|
||||
- application/json
|
||||
cookie:
|
||||
- COOKIE-XXX
|
||||
host:
|
||||
- api.openai.com
|
||||
x-stainless-arch:
|
||||
- X-STAINLESS-ARCH-XXX
|
||||
x-stainless-async:
|
||||
- 'false'
|
||||
x-stainless-helper-method:
|
||||
- beta.chat.completions.parse
|
||||
x-stainless-lang:
|
||||
- python
|
||||
x-stainless-os:
|
||||
- X-STAINLESS-OS-XXX
|
||||
x-stainless-package-version:
|
||||
- 1.83.0
|
||||
x-stainless-read-timeout:
|
||||
- X-STAINLESS-READ-TIMEOUT-XXX
|
||||
x-stainless-retry-count:
|
||||
- '0'
|
||||
x-stainless-runtime:
|
||||
- CPython
|
||||
x-stainless-runtime-version:
|
||||
- 3.12.10
|
||||
method: POST
|
||||
uri: https://api.openai.com/v1/chat/completions
|
||||
response:
|
||||
body:
|
||||
string: "{\n \"id\": \"chatcmpl-D3qQ6ybuZARDZhmEoVvSYKa8jh4xN\",\n \"object\":
|
||||
\"chat.completion\",\n \"created\": 1769808858,\n \"model\": \"gpt-4.1-mini-2025-04-14\",\n
|
||||
\ \"choices\": [\n {\n \"index\": 0,\n \"message\": {\n \"role\":
|
||||
\"assistant\",\n \"content\": \"{\\\"a2a_ids\\\":[\\\"http://localhost:9999/.well-known/agent-card.json\\\"],\\\"message\\\":\\\"Can
|
||||
you provide the latest information on recent developments in AI agent communication
|
||||
protocols? Specifically, I am interested in new standards, methods, or tools
|
||||
that have emerged to enhance interoperability and efficiency among AI agents.\\\",\\\"is_a2a\\\":true}\",\n
|
||||
\ \"refusal\": null,\n \"annotations\": []\n },\n \"logprobs\":
|
||||
null,\n \"finish_reason\": \"stop\"\n }\n ],\n \"usage\": {\n \"prompt_tokens\":
|
||||
271,\n \"completion_tokens\": 70,\n \"total_tokens\": 341,\n \"prompt_tokens_details\":
|
||||
{\n \"cached_tokens\": 0,\n \"audio_tokens\": 0\n },\n \"completion_tokens_details\":
|
||||
{\n \"reasoning_tokens\": 0,\n \"audio_tokens\": 0,\n \"accepted_prediction_tokens\":
|
||||
0,\n \"rejected_prediction_tokens\": 0\n }\n },\n \"service_tier\":
|
||||
\"default\",\n \"system_fingerprint\": \"fp_e01c6f58e1\"\n}\n"
|
||||
headers:
|
||||
CF-RAY:
|
||||
- CF-RAY-XXX
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Type:
|
||||
- application/json
|
||||
Date:
|
||||
- Fri, 30 Jan 2026 21:34:19 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
Strict-Transport-Security:
|
||||
- STS-XXX
|
||||
Transfer-Encoding:
|
||||
- chunked
|
||||
X-Content-Type-Options:
|
||||
- X-CONTENT-TYPE-XXX
|
||||
access-control-expose-headers:
|
||||
- ACCESS-CONTROL-XXX
|
||||
alt-svc:
|
||||
- h3=":443"; ma=86400
|
||||
cf-cache-status:
|
||||
- DYNAMIC
|
||||
openai-organization:
|
||||
- OPENAI-ORG-XXX
|
||||
openai-processing-ms:
|
||||
- '1228'
|
||||
openai-project:
|
||||
- OPENAI-PROJECT-XXX
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
x-openai-proxy-wasm:
|
||||
- v0.1
|
||||
x-ratelimit-limit-requests:
|
||||
- X-RATELIMIT-LIMIT-REQUESTS-XXX
|
||||
x-ratelimit-limit-tokens:
|
||||
- X-RATELIMIT-LIMIT-TOKENS-XXX
|
||||
x-ratelimit-remaining-requests:
|
||||
- X-RATELIMIT-REMAINING-REQUESTS-XXX
|
||||
x-ratelimit-remaining-tokens:
|
||||
- X-RATELIMIT-REMAINING-TOKENS-XXX
|
||||
x-ratelimit-reset-requests:
|
||||
- X-RATELIMIT-RESET-REQUESTS-XXX
|
||||
x-ratelimit-reset-tokens:
|
||||
- X-RATELIMIT-RESET-TOKENS-XXX
|
||||
x-request-id:
|
||||
- X-REQUEST-ID-XXX
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
- request:
|
||||
body: ''
|
||||
headers:
|
||||
User-Agent:
|
||||
- X-USER-AGENT-XXX
|
||||
accept:
|
||||
- '*/*'
|
||||
accept-encoding:
|
||||
- ACCEPT-ENCODING-XXX
|
||||
connection:
|
||||
- keep-alive
|
||||
host:
|
||||
- localhost:9999
|
||||
method: GET
|
||||
uri: http://localhost:9999/.well-known/agent-card.json
|
||||
response:
|
||||
body:
|
||||
string: '{"capabilities":{"pushNotifications":true,"streaming":true},"defaultInputModes":["text/plain","application/json"],"defaultOutputModes":["text/plain","application/json"],"description":"An
|
||||
AI assistant powered by OpenAI GPT with calculator and time tools. Ask questions,
|
||||
perform calculations, or get the current time in any timezone.","name":"GPT
|
||||
Assistant","preferredTransport":"JSONRPC","protocolVersion":"0.3.0","skills":[{"description":"Have
|
||||
a general conversation with the AI assistant. Ask questions, get explanations,
|
||||
or just chat.","examples":["Hello, how are you?","Explain quantum computing
|
||||
in simple terms","What can you help me with?"],"id":"conversation","name":"General
|
||||
Conversation","tags":["chat","conversation","general"]},{"description":"Perform
|
||||
mathematical calculations including arithmetic, exponents, and more.","examples":["What
|
||||
is 25 * 17?","Calculate 2^10","What''s (100 + 50) / 3?"],"id":"calculator","name":"Calculator","tags":["math","calculator","arithmetic"]},{"description":"Get
|
||||
the current date and time in any timezone.","examples":["What time is it?","What''s
|
||||
the current time in Tokyo?","What''s today''s date in New York?"],"id":"time","name":"Current
|
||||
Time","tags":["time","date","timezone"]}],"url":"http://localhost:9999","version":"1.0.0"}'
|
||||
headers:
|
||||
content-length:
|
||||
- '1272'
|
||||
content-type:
|
||||
- application/json
|
||||
date:
|
||||
- Fri, 30 Jan 2026 21:49:26 GMT
|
||||
server:
|
||||
- uvicorn
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
version: 1
|
||||
@@ -0,0 +1,622 @@
|
||||
interactions:
|
||||
- request:
|
||||
body: '{"messages":[{"role":"system","content":"You are Research Analyst. Expert
|
||||
researcher with access to remote specialized agents\nYour personal goal is:
|
||||
Find and analyze information about AI developments"},{"role":"user","content":"\nCurrent
|
||||
Task: Use the A2A agent to tell me what time it is.\n\nProvide your complete
|
||||
response:"}],"model":"gpt-4.1-mini","response_format":{"type":"json_schema","json_schema":{"schema":{"properties":{"a2a_ids":{"description":"A2A
|
||||
agent IDs to delegate to.","items":{"const":"http://localhost:9999/.well-known/agent-card.json","type":"string"},"maxItems":1,"title":"A2A
|
||||
Ids","type":"array"},"message":{"description":"The message content. If is_a2a=true,
|
||||
this is sent to the A2A agent. If is_a2a=false, this is your final answer ending
|
||||
the conversation.","title":"Message","type":"string"},"is_a2a":{"description":"Set
|
||||
to false when the remote agent has answered your question - extract their answer
|
||||
and return it as your final message. Set to true ONLY if you need to ask a NEW,
|
||||
DIFFERENT question. NEVER repeat the same request - if the conversation history
|
||||
shows the agent already answered, set is_a2a=false immediately.","title":"Is
|
||||
A2A","type":"boolean"}},"required":["a2a_ids","message","is_a2a"],"title":"AgentResponse","type":"object","additionalProperties":false},"name":"AgentResponse","strict":true}},"stream":false}'
|
||||
headers:
|
||||
User-Agent:
|
||||
- X-USER-AGENT-XXX
|
||||
accept:
|
||||
- application/json
|
||||
accept-encoding:
|
||||
- ACCEPT-ENCODING-XXX
|
||||
authorization:
|
||||
- AUTHORIZATION-XXX
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '1356'
|
||||
content-type:
|
||||
- application/json
|
||||
host:
|
||||
- api.openai.com
|
||||
x-stainless-arch:
|
||||
- X-STAINLESS-ARCH-XXX
|
||||
x-stainless-async:
|
||||
- 'false'
|
||||
x-stainless-helper-method:
|
||||
- beta.chat.completions.parse
|
||||
x-stainless-lang:
|
||||
- python
|
||||
x-stainless-os:
|
||||
- X-STAINLESS-OS-XXX
|
||||
x-stainless-package-version:
|
||||
- 1.83.0
|
||||
x-stainless-read-timeout:
|
||||
- X-STAINLESS-READ-TIMEOUT-XXX
|
||||
x-stainless-retry-count:
|
||||
- '0'
|
||||
x-stainless-runtime:
|
||||
- CPython
|
||||
x-stainless-runtime-version:
|
||||
- 3.12.10
|
||||
method: POST
|
||||
uri: https://api.openai.com/v1/chat/completions
|
||||
response:
|
||||
body:
|
||||
string: "{\n \"id\": \"chatcmpl-D3qP8bKsdUDPFNPsCgA6XfTcDS5RQ\",\n \"object\":
|
||||
\"chat.completion\",\n \"created\": 1769808798,\n \"model\": \"gpt-4.1-mini-2025-04-14\",\n
|
||||
\ \"choices\": [\n {\n \"index\": 0,\n \"message\": {\n \"role\":
|
||||
\"assistant\",\n \"content\": \"{\\\"a2a_ids\\\":[\\\"http://localhost:9999/.well-known/agent-card.json\\\"],\\\"message\\\":\\\"What
|
||||
time is it currently?\\\",\\\"is_a2a\\\":true}\",\n \"refusal\": null,\n
|
||||
\ \"annotations\": []\n },\n \"logprobs\": null,\n \"finish_reason\":
|
||||
\"stop\"\n }\n ],\n \"usage\": {\n \"prompt_tokens\": 269,\n \"completion_tokens\":
|
||||
37,\n \"total_tokens\": 306,\n \"prompt_tokens_details\": {\n \"cached_tokens\":
|
||||
0,\n \"audio_tokens\": 0\n },\n \"completion_tokens_details\":
|
||||
{\n \"reasoning_tokens\": 0,\n \"audio_tokens\": 0,\n \"accepted_prediction_tokens\":
|
||||
0,\n \"rejected_prediction_tokens\": 0\n }\n },\n \"service_tier\":
|
||||
\"default\",\n \"system_fingerprint\": \"fp_e01c6f58e1\"\n}\n"
|
||||
headers:
|
||||
CF-RAY:
|
||||
- CF-RAY-XXX
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Type:
|
||||
- application/json
|
||||
Date:
|
||||
- Fri, 30 Jan 2026 21:33:19 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
Set-Cookie:
|
||||
- SET-COOKIE-XXX
|
||||
Strict-Transport-Security:
|
||||
- STS-XXX
|
||||
Transfer-Encoding:
|
||||
- chunked
|
||||
X-Content-Type-Options:
|
||||
- X-CONTENT-TYPE-XXX
|
||||
access-control-expose-headers:
|
||||
- ACCESS-CONTROL-XXX
|
||||
alt-svc:
|
||||
- h3=":443"; ma=86400
|
||||
cf-cache-status:
|
||||
- DYNAMIC
|
||||
openai-organization:
|
||||
- OPENAI-ORG-XXX
|
||||
openai-processing-ms:
|
||||
- '959'
|
||||
openai-project:
|
||||
- OPENAI-PROJECT-XXX
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
x-openai-proxy-wasm:
|
||||
- v0.1
|
||||
x-ratelimit-limit-requests:
|
||||
- X-RATELIMIT-LIMIT-REQUESTS-XXX
|
||||
x-ratelimit-limit-tokens:
|
||||
- X-RATELIMIT-LIMIT-TOKENS-XXX
|
||||
x-ratelimit-remaining-requests:
|
||||
- X-RATELIMIT-REMAINING-REQUESTS-XXX
|
||||
x-ratelimit-remaining-tokens:
|
||||
- X-RATELIMIT-REMAINING-TOKENS-XXX
|
||||
x-ratelimit-reset-requests:
|
||||
- X-RATELIMIT-RESET-REQUESTS-XXX
|
||||
x-ratelimit-reset-tokens:
|
||||
- X-RATELIMIT-RESET-TOKENS-XXX
|
||||
x-request-id:
|
||||
- X-REQUEST-ID-XXX
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
- request:
|
||||
body: '{"id":"bf41cc3b-7d95-4456-af9b-4dc0c1f3837b","jsonrpc":"2.0","method":"message/stream","params":{"configuration":{"acceptedOutputModes":["application/json"],"blocking":true},"message":{"kind":"message","messageId":"4ae6375a-3c1f-49fd-8618-6b235be6ea5f","parts":[{"kind":"text","text":"What
|
||||
time is it currently?"}],"referenceTaskIds":[],"role":"user"}}}'
|
||||
headers:
|
||||
User-Agent:
|
||||
- X-USER-AGENT-XXX
|
||||
accept:
|
||||
- '*/*, text/event-stream'
|
||||
accept-encoding:
|
||||
- ACCEPT-ENCODING-XXX
|
||||
cache-control:
|
||||
- no-store
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '353'
|
||||
content-type:
|
||||
- application/json
|
||||
host:
|
||||
- localhost:9999
|
||||
method: POST
|
||||
uri: http://localhost:9999
|
||||
response:
|
||||
body:
|
||||
string: "data: {\"id\":\"bf41cc3b-7d95-4456-af9b-4dc0c1f3837b\",\"jsonrpc\":\"2.0\",\"result\":{\"contextId\":\"5d428854-df47-4326-aa42-2ca126a4ff08\",\"final\":false,\"kind\":\"status-update\",\"status\":{\"state\":\"submitted\"},\"taskId\":\"59cef95a-ba9d-486e-b9cb-3ce39dc95c08\"}}\r\n\r\ndata:
|
||||
{\"id\":\"bf41cc3b-7d95-4456-af9b-4dc0c1f3837b\",\"jsonrpc\":\"2.0\",\"result\":{\"contextId\":\"5d428854-df47-4326-aa42-2ca126a4ff08\",\"final\":false,\"kind\":\"status-update\",\"status\":{\"state\":\"working\"},\"taskId\":\"59cef95a-ba9d-486e-b9cb-3ce39dc95c08\"}}\r\n\r\ndata:
|
||||
{\"id\":\"bf41cc3b-7d95-4456-af9b-4dc0c1f3837b\",\"jsonrpc\":\"2.0\",\"result\":{\"contextId\":\"5d428854-df47-4326-aa42-2ca126a4ff08\",\"final\":true,\"kind\":\"status-update\",\"status\":{\"message\":{\"kind\":\"message\",\"messageId\":\"9d3b99cd-8853-4f78-8a4b-4c82e640a6a8\",\"parts\":[{\"kind\":\"text\",\"text\":\"[Tool:
|
||||
get_time] 2026-01-30 21:33:20 UTC (UTC)\\nThe current time is 21:33:20 UTC
|
||||
on January 30, 2026.\"}],\"role\":\"agent\"},\"state\":\"completed\"},\"taskId\":\"59cef95a-ba9d-486e-b9cb-3ce39dc95c08\"}}\r\n\r\n"
|
||||
headers:
|
||||
cache-control:
|
||||
- no-store
|
||||
connection:
|
||||
- keep-alive
|
||||
content-type:
|
||||
- text/event-stream; charset=utf-8
|
||||
date:
|
||||
- Fri, 30 Jan 2026 21:33:19 GMT
|
||||
server:
|
||||
- uvicorn
|
||||
transfer-encoding:
|
||||
- chunked
|
||||
x-accel-buffering:
|
||||
- 'no'
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
- request:
|
||||
body: '{"messages":[{"role":"system","content":"You are Research Analyst. Expert
|
||||
researcher with access to remote specialized agents\nYour personal goal is:
|
||||
Find and analyze information about AI developments"},{"role":"user","content":"\nCurrent
|
||||
Task: Use the A2A agent to tell me what time it is.\n\nProvide your complete
|
||||
response:"}],"model":"gpt-4.1-mini","response_format":{"type":"json_schema","json_schema":{"schema":{"properties":{"a2a_ids":{"description":"A2A
|
||||
agent IDs to delegate to.","items":{"const":"http://localhost:9999/.well-known/agent-card.json","type":"string"},"maxItems":1,"title":"A2A
|
||||
Ids","type":"array"},"message":{"description":"The message content. If is_a2a=true,
|
||||
this is sent to the A2A agent. If is_a2a=false, this is your final answer ending
|
||||
the conversation.","title":"Message","type":"string"},"is_a2a":{"description":"Set
|
||||
to false when the remote agent has answered your question - extract their answer
|
||||
and return it as your final message. Set to true ONLY if you need to ask a NEW,
|
||||
DIFFERENT question. NEVER repeat the same request - if the conversation history
|
||||
shows the agent already answered, set is_a2a=false immediately.","title":"Is
|
||||
A2A","type":"boolean"}},"required":["a2a_ids","message","is_a2a"],"title":"AgentResponse","type":"object","additionalProperties":false},"name":"AgentResponse","strict":true}},"stream":false}'
|
||||
headers:
|
||||
User-Agent:
|
||||
- X-USER-AGENT-XXX
|
||||
accept:
|
||||
- application/json
|
||||
accept-encoding:
|
||||
- ACCEPT-ENCODING-XXX
|
||||
authorization:
|
||||
- AUTHORIZATION-XXX
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '1356'
|
||||
content-type:
|
||||
- application/json
|
||||
cookie:
|
||||
- COOKIE-XXX
|
||||
host:
|
||||
- api.openai.com
|
||||
x-stainless-arch:
|
||||
- X-STAINLESS-ARCH-XXX
|
||||
x-stainless-async:
|
||||
- 'false'
|
||||
x-stainless-helper-method:
|
||||
- beta.chat.completions.parse
|
||||
x-stainless-lang:
|
||||
- python
|
||||
x-stainless-os:
|
||||
- X-STAINLESS-OS-XXX
|
||||
x-stainless-package-version:
|
||||
- 1.83.0
|
||||
x-stainless-read-timeout:
|
||||
- X-STAINLESS-READ-TIMEOUT-XXX
|
||||
x-stainless-retry-count:
|
||||
- '0'
|
||||
x-stainless-runtime:
|
||||
- CPython
|
||||
x-stainless-runtime-version:
|
||||
- 3.12.10
|
||||
method: POST
|
||||
uri: https://api.openai.com/v1/chat/completions
|
||||
response:
|
||||
body:
|
||||
string: "{\n \"id\": \"chatcmpl-D3qPCXrfeCIRc2rGEuz7HncDWi0mt\",\n \"object\":
|
||||
\"chat.completion\",\n \"created\": 1769808802,\n \"model\": \"gpt-4.1-mini-2025-04-14\",\n
|
||||
\ \"choices\": [\n {\n \"index\": 0,\n \"message\": {\n \"role\":
|
||||
\"assistant\",\n \"content\": \"{\\\"a2a_ids\\\":[\\\"http://localhost:9999/.well-known/agent-card.json\\\"],\\\"message\\\":\\\"What
|
||||
time is it currently?\\\",\\\"is_a2a\\\":true}\",\n \"refusal\": null,\n
|
||||
\ \"annotations\": []\n },\n \"logprobs\": null,\n \"finish_reason\":
|
||||
\"stop\"\n }\n ],\n \"usage\": {\n \"prompt_tokens\": 269,\n \"completion_tokens\":
|
||||
37,\n \"total_tokens\": 306,\n \"prompt_tokens_details\": {\n \"cached_tokens\":
|
||||
0,\n \"audio_tokens\": 0\n },\n \"completion_tokens_details\":
|
||||
{\n \"reasoning_tokens\": 0,\n \"audio_tokens\": 0,\n \"accepted_prediction_tokens\":
|
||||
0,\n \"rejected_prediction_tokens\": 0\n }\n },\n \"service_tier\":
|
||||
\"default\",\n \"system_fingerprint\": \"fp_e01c6f58e1\"\n}\n"
|
||||
headers:
|
||||
CF-RAY:
|
||||
- CF-RAY-XXX
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Type:
|
||||
- application/json
|
||||
Date:
|
||||
- Fri, 30 Jan 2026 21:33:23 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
Strict-Transport-Security:
|
||||
- STS-XXX
|
||||
Transfer-Encoding:
|
||||
- chunked
|
||||
X-Content-Type-Options:
|
||||
- X-CONTENT-TYPE-XXX
|
||||
access-control-expose-headers:
|
||||
- ACCESS-CONTROL-XXX
|
||||
alt-svc:
|
||||
- h3=":443"; ma=86400
|
||||
cf-cache-status:
|
||||
- DYNAMIC
|
||||
openai-organization:
|
||||
- OPENAI-ORG-XXX
|
||||
openai-processing-ms:
|
||||
- '762'
|
||||
openai-project:
|
||||
- OPENAI-PROJECT-XXX
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
x-openai-proxy-wasm:
|
||||
- v0.1
|
||||
x-ratelimit-limit-requests:
|
||||
- X-RATELIMIT-LIMIT-REQUESTS-XXX
|
||||
x-ratelimit-limit-tokens:
|
||||
- X-RATELIMIT-LIMIT-TOKENS-XXX
|
||||
x-ratelimit-remaining-requests:
|
||||
- X-RATELIMIT-REMAINING-REQUESTS-XXX
|
||||
x-ratelimit-remaining-tokens:
|
||||
- X-RATELIMIT-REMAINING-TOKENS-XXX
|
||||
x-ratelimit-reset-requests:
|
||||
- X-RATELIMIT-RESET-REQUESTS-XXX
|
||||
x-ratelimit-reset-tokens:
|
||||
- X-RATELIMIT-RESET-TOKENS-XXX
|
||||
x-request-id:
|
||||
- X-REQUEST-ID-XXX
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
- request:
|
||||
body: '{"id":"5145c746-1044-4724-9c51-cbcb3f338fbc","jsonrpc":"2.0","method":"message/stream","params":{"configuration":{"acceptedOutputModes":["application/json"],"blocking":true},"message":{"kind":"message","messageId":"a4f9ea5b-9ccc-48f2-b5ad-517c0137af72","parts":[{"kind":"text","text":"What
|
||||
time is it currently?"}],"referenceTaskIds":[],"role":"user"}}}'
|
||||
headers:
|
||||
User-Agent:
|
||||
- X-USER-AGENT-XXX
|
||||
accept:
|
||||
- '*/*, text/event-stream'
|
||||
accept-encoding:
|
||||
- ACCEPT-ENCODING-XXX
|
||||
cache-control:
|
||||
- no-store
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '353'
|
||||
content-type:
|
||||
- application/json
|
||||
host:
|
||||
- localhost:9999
|
||||
method: POST
|
||||
uri: http://localhost:9999
|
||||
response:
|
||||
body:
|
||||
string: "data: {\"id\":\"5145c746-1044-4724-9c51-cbcb3f338fbc\",\"jsonrpc\":\"2.0\",\"result\":{\"contextId\":\"23a29826-dfe4-4c2b-bca2-8c3b69baef39\",\"final\":false,\"kind\":\"status-update\",\"status\":{\"state\":\"submitted\"},\"taskId\":\"3ef1907c-b2b1-4b3b-a9ca-80678639aa6b\"}}\r\n\r\ndata:
|
||||
{\"id\":\"5145c746-1044-4724-9c51-cbcb3f338fbc\",\"jsonrpc\":\"2.0\",\"result\":{\"contextId\":\"23a29826-dfe4-4c2b-bca2-8c3b69baef39\",\"final\":false,\"kind\":\"status-update\",\"status\":{\"state\":\"working\"},\"taskId\":\"3ef1907c-b2b1-4b3b-a9ca-80678639aa6b\"}}\r\n\r\ndata:
|
||||
{\"id\":\"5145c746-1044-4724-9c51-cbcb3f338fbc\",\"jsonrpc\":\"2.0\",\"result\":{\"contextId\":\"23a29826-dfe4-4c2b-bca2-8c3b69baef39\",\"final\":true,\"kind\":\"status-update\",\"status\":{\"message\":{\"kind\":\"message\",\"messageId\":\"4fd468de-7e34-4e88-9635-2b13c4acca32\",\"parts\":[{\"kind\":\"text\",\"text\":\"[Tool:
|
||||
get_time] 2026-01-30 21:33:24 UTC (UTC)\\nThe current time is 21:33:24 UTC.\"}],\"role\":\"agent\"},\"state\":\"completed\"},\"taskId\":\"3ef1907c-b2b1-4b3b-a9ca-80678639aa6b\"}}\r\n\r\n"
|
||||
headers:
|
||||
cache-control:
|
||||
- no-store
|
||||
connection:
|
||||
- keep-alive
|
||||
content-type:
|
||||
- text/event-stream; charset=utf-8
|
||||
date:
|
||||
- Fri, 30 Jan 2026 21:33:23 GMT
|
||||
server:
|
||||
- uvicorn
|
||||
transfer-encoding:
|
||||
- chunked
|
||||
x-accel-buffering:
|
||||
- 'no'
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
- request:
|
||||
body: '{"messages":[{"role":"system","content":"You are Research Analyst. Expert
|
||||
researcher with access to remote specialized agents\nYour personal goal is:
|
||||
Find and analyze information about AI developments"},{"role":"user","content":"\nCurrent
|
||||
Task: Use the A2A agent to tell me what time it is.\n\nProvide your complete
|
||||
response:"}],"model":"gpt-4.1-mini","response_format":{"type":"json_schema","json_schema":{"schema":{"properties":{"a2a_ids":{"description":"A2A
|
||||
agent IDs to delegate to.","items":{"const":"http://localhost:9999/.well-known/agent-card.json","type":"string"},"maxItems":1,"title":"A2A
|
||||
Ids","type":"array"},"message":{"description":"The message content. If is_a2a=true,
|
||||
this is sent to the A2A agent. If is_a2a=false, this is your final answer ending
|
||||
the conversation.","title":"Message","type":"string"},"is_a2a":{"description":"Set
|
||||
to false when the remote agent has answered your question - extract their answer
|
||||
and return it as your final message. Set to true ONLY if you need to ask a NEW,
|
||||
DIFFERENT question. NEVER repeat the same request - if the conversation history
|
||||
shows the agent already answered, set is_a2a=false immediately.","title":"Is
|
||||
A2A","type":"boolean"}},"required":["a2a_ids","message","is_a2a"],"title":"AgentResponse","type":"object","additionalProperties":false},"name":"AgentResponse","strict":true}},"stream":false}'
|
||||
headers:
|
||||
User-Agent:
|
||||
- X-USER-AGENT-XXX
|
||||
accept:
|
||||
- application/json
|
||||
accept-encoding:
|
||||
- ACCEPT-ENCODING-XXX
|
||||
authorization:
|
||||
- AUTHORIZATION-XXX
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '1356'
|
||||
content-type:
|
||||
- application/json
|
||||
cookie:
|
||||
- COOKIE-XXX
|
||||
host:
|
||||
- api.openai.com
|
||||
x-stainless-arch:
|
||||
- X-STAINLESS-ARCH-XXX
|
||||
x-stainless-async:
|
||||
- 'false'
|
||||
x-stainless-helper-method:
|
||||
- beta.chat.completions.parse
|
||||
x-stainless-lang:
|
||||
- python
|
||||
x-stainless-os:
|
||||
- X-STAINLESS-OS-XXX
|
||||
x-stainless-package-version:
|
||||
- 1.83.0
|
||||
x-stainless-read-timeout:
|
||||
- X-STAINLESS-READ-TIMEOUT-XXX
|
||||
x-stainless-retry-count:
|
||||
- '0'
|
||||
x-stainless-runtime:
|
||||
- CPython
|
||||
x-stainless-runtime-version:
|
||||
- 3.12.10
|
||||
method: POST
|
||||
uri: https://api.openai.com/v1/chat/completions
|
||||
response:
|
||||
body:
|
||||
string: "{\n \"id\": \"chatcmpl-D3qPFhtpO4QMdnHFH73Fum0rpqCi5\",\n \"object\":
|
||||
\"chat.completion\",\n \"created\": 1769808805,\n \"model\": \"gpt-4.1-mini-2025-04-14\",\n
|
||||
\ \"choices\": [\n {\n \"index\": 0,\n \"message\": {\n \"role\":
|
||||
\"assistant\",\n \"content\": \"{\\\"a2a_ids\\\":[\\\"http://localhost:9999/.well-known/agent-card.json\\\"],\\\"message\\\":\\\"What
|
||||
is the current time?\\\",\\\"is_a2a\\\":true}\",\n \"refusal\": null,\n
|
||||
\ \"annotations\": []\n },\n \"logprobs\": null,\n \"finish_reason\":
|
||||
\"stop\"\n }\n ],\n \"usage\": {\n \"prompt_tokens\": 269,\n \"completion_tokens\":
|
||||
37,\n \"total_tokens\": 306,\n \"prompt_tokens_details\": {\n \"cached_tokens\":
|
||||
0,\n \"audio_tokens\": 0\n },\n \"completion_tokens_details\":
|
||||
{\n \"reasoning_tokens\": 0,\n \"audio_tokens\": 0,\n \"accepted_prediction_tokens\":
|
||||
0,\n \"rejected_prediction_tokens\": 0\n }\n },\n \"service_tier\":
|
||||
\"default\",\n \"system_fingerprint\": \"fp_e01c6f58e1\"\n}\n"
|
||||
headers:
|
||||
CF-RAY:
|
||||
- CF-RAY-XXX
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Type:
|
||||
- application/json
|
||||
Date:
|
||||
- Fri, 30 Jan 2026 21:33:26 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
Strict-Transport-Security:
|
||||
- STS-XXX
|
||||
Transfer-Encoding:
|
||||
- chunked
|
||||
X-Content-Type-Options:
|
||||
- X-CONTENT-TYPE-XXX
|
||||
access-control-expose-headers:
|
||||
- ACCESS-CONTROL-XXX
|
||||
alt-svc:
|
||||
- h3=":443"; ma=86400
|
||||
cf-cache-status:
|
||||
- DYNAMIC
|
||||
openai-organization:
|
||||
- OPENAI-ORG-XXX
|
||||
openai-processing-ms:
|
||||
- '700'
|
||||
openai-project:
|
||||
- OPENAI-PROJECT-XXX
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
x-openai-proxy-wasm:
|
||||
- v0.1
|
||||
x-ratelimit-limit-requests:
|
||||
- X-RATELIMIT-LIMIT-REQUESTS-XXX
|
||||
x-ratelimit-limit-tokens:
|
||||
- X-RATELIMIT-LIMIT-TOKENS-XXX
|
||||
x-ratelimit-remaining-requests:
|
||||
- X-RATELIMIT-REMAINING-REQUESTS-XXX
|
||||
x-ratelimit-remaining-tokens:
|
||||
- X-RATELIMIT-REMAINING-TOKENS-XXX
|
||||
x-ratelimit-reset-requests:
|
||||
- X-RATELIMIT-RESET-REQUESTS-XXX
|
||||
x-ratelimit-reset-tokens:
|
||||
- X-RATELIMIT-RESET-TOKENS-XXX
|
||||
x-request-id:
|
||||
- X-REQUEST-ID-XXX
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
- request:
|
||||
body: '{"id":"fa7576f7-dbfb-4605-ad70-a0ecadf0f1ac","jsonrpc":"2.0","method":"message/stream","params":{"configuration":{"acceptedOutputModes":["application/json"],"blocking":true},"message":{"kind":"message","messageId":"ff083812-cb3a-42d3-b410-7941106f68ac","parts":[{"kind":"text","text":"What
|
||||
is the current time?"}],"referenceTaskIds":[],"role":"user"}}}'
|
||||
headers:
|
||||
User-Agent:
|
||||
- X-USER-AGENT-XXX
|
||||
accept:
|
||||
- '*/*, text/event-stream'
|
||||
accept-encoding:
|
||||
- ACCEPT-ENCODING-XXX
|
||||
cache-control:
|
||||
- no-store
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '352'
|
||||
content-type:
|
||||
- application/json
|
||||
host:
|
||||
- localhost:9999
|
||||
method: POST
|
||||
uri: http://localhost:9999
|
||||
response:
|
||||
body:
|
||||
string: "data: {\"id\":\"fa7576f7-dbfb-4605-ad70-a0ecadf0f1ac\",\"jsonrpc\":\"2.0\",\"result\":{\"contextId\":\"0eb501cf-ef11-4b46-b17d-639fb1a9aa3a\",\"final\":false,\"kind\":\"status-update\",\"status\":{\"state\":\"submitted\"},\"taskId\":\"a8c1ccbf-6f13-4894-a4d2-d87d0c3b9ffb\"}}\r\n\r\ndata:
|
||||
{\"id\":\"fa7576f7-dbfb-4605-ad70-a0ecadf0f1ac\",\"jsonrpc\":\"2.0\",\"result\":{\"contextId\":\"0eb501cf-ef11-4b46-b17d-639fb1a9aa3a\",\"final\":false,\"kind\":\"status-update\",\"status\":{\"state\":\"working\"},\"taskId\":\"a8c1ccbf-6f13-4894-a4d2-d87d0c3b9ffb\"}}\r\n\r\ndata:
|
||||
{\"id\":\"fa7576f7-dbfb-4605-ad70-a0ecadf0f1ac\",\"jsonrpc\":\"2.0\",\"result\":{\"contextId\":\"0eb501cf-ef11-4b46-b17d-639fb1a9aa3a\",\"final\":true,\"kind\":\"status-update\",\"status\":{\"message\":{\"kind\":\"message\",\"messageId\":\"2a5368d0-ac8d-4c7c-b272-a711b96bf277\",\"parts\":[{\"kind\":\"text\",\"text\":\"[Tool:
|
||||
get_time] 2026-01-30 21:33:27 UTC (UTC)\\nThe current time is 21:33:27 UTC
|
||||
on January 30, 2026.\"}],\"role\":\"agent\"},\"state\":\"completed\"},\"taskId\":\"a8c1ccbf-6f13-4894-a4d2-d87d0c3b9ffb\"}}\r\n\r\n"
|
||||
headers:
|
||||
cache-control:
|
||||
- no-store
|
||||
connection:
|
||||
- keep-alive
|
||||
content-type:
|
||||
- text/event-stream; charset=utf-8
|
||||
date:
|
||||
- Fri, 30 Jan 2026 21:33:26 GMT
|
||||
server:
|
||||
- uvicorn
|
||||
transfer-encoding:
|
||||
- chunked
|
||||
x-accel-buffering:
|
||||
- 'no'
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
- request:
|
||||
body: '{"messages":[{"role":"system","content":"You are Research Analyst. Expert
|
||||
researcher with access to remote specialized agents\nYour personal goal is:
|
||||
Find and analyze information about AI developments"},{"role":"user","content":"\nCurrent
|
||||
Task: Use the A2A agent to tell me what time it is.\n\nProvide your complete
|
||||
response:"}],"model":"gpt-4.1-mini","response_format":{"type":"json_schema","json_schema":{"schema":{"properties":{"a2a_ids":{"description":"A2A
|
||||
agent IDs to delegate to.","items":{"const":"http://localhost:9999/.well-known/agent-card.json","type":"string"},"maxItems":1,"title":"A2A
|
||||
Ids","type":"array"},"message":{"description":"The message content. If is_a2a=true,
|
||||
this is sent to the A2A agent. If is_a2a=false, this is your final answer ending
|
||||
the conversation.","title":"Message","type":"string"},"is_a2a":{"description":"Set
|
||||
to false when the remote agent has answered your question - extract their answer
|
||||
and return it as your final message. Set to true ONLY if you need to ask a NEW,
|
||||
DIFFERENT question. NEVER repeat the same request - if the conversation history
|
||||
shows the agent already answered, set is_a2a=false immediately.","title":"Is
|
||||
A2A","type":"boolean"}},"required":["a2a_ids","message","is_a2a"],"title":"AgentResponse","type":"object","additionalProperties":false},"name":"AgentResponse","strict":true}},"stream":false}'
|
||||
headers:
|
||||
User-Agent:
|
||||
- X-USER-AGENT-XXX
|
||||
accept:
|
||||
- application/json
|
||||
accept-encoding:
|
||||
- ACCEPT-ENCODING-XXX
|
||||
authorization:
|
||||
- AUTHORIZATION-XXX
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '1356'
|
||||
content-type:
|
||||
- application/json
|
||||
cookie:
|
||||
- COOKIE-XXX
|
||||
host:
|
||||
- api.openai.com
|
||||
x-stainless-arch:
|
||||
- X-STAINLESS-ARCH-XXX
|
||||
x-stainless-async:
|
||||
- 'false'
|
||||
x-stainless-helper-method:
|
||||
- beta.chat.completions.parse
|
||||
x-stainless-lang:
|
||||
- python
|
||||
x-stainless-os:
|
||||
- X-STAINLESS-OS-XXX
|
||||
x-stainless-package-version:
|
||||
- 1.83.0
|
||||
x-stainless-read-timeout:
|
||||
- X-STAINLESS-READ-TIMEOUT-XXX
|
||||
x-stainless-retry-count:
|
||||
- '0'
|
||||
x-stainless-runtime:
|
||||
- CPython
|
||||
x-stainless-runtime-version:
|
||||
- 3.12.10
|
||||
method: POST
|
||||
uri: https://api.openai.com/v1/chat/completions
|
||||
response:
|
||||
body:
|
||||
string: "{\n \"id\": \"chatcmpl-D3qPJqK3Nr9ySPgvb0LGVLS3ZGxGi\",\n \"object\":
|
||||
\"chat.completion\",\n \"created\": 1769808809,\n \"model\": \"gpt-4.1-mini-2025-04-14\",\n
|
||||
\ \"choices\": [\n {\n \"index\": 0,\n \"message\": {\n \"role\":
|
||||
\"assistant\",\n \"content\": \"{\\\"a2a_ids\\\":[\\\"http://localhost:9999/.well-known/agent-card.json\\\"],\\\"message\\\":\\\"What
|
||||
is the current time?\\\",\\\"is_a2a\\\":true}\",\n \"refusal\": null,\n
|
||||
\ \"annotations\": []\n },\n \"logprobs\": null,\n \"finish_reason\":
|
||||
\"stop\"\n }\n ],\n \"usage\": {\n \"prompt_tokens\": 269,\n \"completion_tokens\":
|
||||
37,\n \"total_tokens\": 306,\n \"prompt_tokens_details\": {\n \"cached_tokens\":
|
||||
0,\n \"audio_tokens\": 0\n },\n \"completion_tokens_details\":
|
||||
{\n \"reasoning_tokens\": 0,\n \"audio_tokens\": 0,\n \"accepted_prediction_tokens\":
|
||||
0,\n \"rejected_prediction_tokens\": 0\n }\n },\n \"service_tier\":
|
||||
\"default\",\n \"system_fingerprint\": \"fp_e01c6f58e1\"\n}\n"
|
||||
headers:
|
||||
CF-RAY:
|
||||
- CF-RAY-XXX
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Type:
|
||||
- application/json
|
||||
Date:
|
||||
- Fri, 30 Jan 2026 21:33:30 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
Strict-Transport-Security:
|
||||
- STS-XXX
|
||||
Transfer-Encoding:
|
||||
- chunked
|
||||
X-Content-Type-Options:
|
||||
- X-CONTENT-TYPE-XXX
|
||||
access-control-expose-headers:
|
||||
- ACCESS-CONTROL-XXX
|
||||
alt-svc:
|
||||
- h3=":443"; ma=86400
|
||||
cf-cache-status:
|
||||
- DYNAMIC
|
||||
openai-organization:
|
||||
- OPENAI-ORG-XXX
|
||||
openai-processing-ms:
|
||||
- '877'
|
||||
openai-project:
|
||||
- OPENAI-PROJECT-XXX
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
x-openai-proxy-wasm:
|
||||
- v0.1
|
||||
x-ratelimit-limit-requests:
|
||||
- X-RATELIMIT-LIMIT-REQUESTS-XXX
|
||||
x-ratelimit-limit-tokens:
|
||||
- X-RATELIMIT-LIMIT-TOKENS-XXX
|
||||
x-ratelimit-remaining-requests:
|
||||
- X-RATELIMIT-REMAINING-REQUESTS-XXX
|
||||
x-ratelimit-remaining-tokens:
|
||||
- X-RATELIMIT-REMAINING-TOKENS-XXX
|
||||
x-ratelimit-reset-requests:
|
||||
- X-RATELIMIT-RESET-REQUESTS-XXX
|
||||
x-ratelimit-reset-tokens:
|
||||
- X-RATELIMIT-RESET-TOKENS-XXX
|
||||
x-request-id:
|
||||
- X-REQUEST-ID-XXX
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
version: 1
|
||||
@@ -0,0 +1,662 @@
|
||||
interactions:
|
||||
- request:
|
||||
body: ''
|
||||
headers:
|
||||
User-Agent:
|
||||
- X-USER-AGENT-XXX
|
||||
accept:
|
||||
- '*/*'
|
||||
accept-encoding:
|
||||
- ACCEPT-ENCODING-XXX
|
||||
connection:
|
||||
- keep-alive
|
||||
host:
|
||||
- localhost:9999
|
||||
method: GET
|
||||
uri: http://localhost:9999/.well-known/agent-card.json
|
||||
response:
|
||||
body:
|
||||
string: '{"capabilities":{"pushNotifications":true,"streaming":true},"defaultInputModes":["text/plain","application/json"],"defaultOutputModes":["text/plain","application/json"],"description":"An
|
||||
AI assistant powered by OpenAI GPT with calculator and time tools. Ask questions,
|
||||
perform calculations, or get the current time in any timezone.","name":"GPT
|
||||
Assistant","preferredTransport":"JSONRPC","protocolVersion":"0.3.0","skills":[{"description":"Have
|
||||
a general conversation with the AI assistant. Ask questions, get explanations,
|
||||
or just chat.","examples":["Hello, how are you?","Explain quantum computing
|
||||
in simple terms","What can you help me with?"],"id":"conversation","name":"General
|
||||
Conversation","tags":["chat","conversation","general"]},{"description":"Perform
|
||||
mathematical calculations including arithmetic, exponents, and more.","examples":["What
|
||||
is 25 * 17?","Calculate 2^10","What''s (100 + 50) / 3?"],"id":"calculator","name":"Calculator","tags":["math","calculator","arithmetic"]},{"description":"Get
|
||||
the current date and time in any timezone.","examples":["What time is it?","What''s
|
||||
the current time in Tokyo?","What''s today''s date in New York?"],"id":"time","name":"Current
|
||||
Time","tags":["time","date","timezone"]}],"url":"http://localhost:9999","version":"1.0.0"}'
|
||||
headers:
|
||||
content-length:
|
||||
- '1272'
|
||||
content-type:
|
||||
- application/json
|
||||
date:
|
||||
- Fri, 30 Jan 2026 21:33:04 GMT
|
||||
server:
|
||||
- uvicorn
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
- request:
|
||||
body: '{"messages":[{"role":"system","content":"You are Research Analyst. Expert
|
||||
researcher with access to remote specialized agents\nYour personal goal is:
|
||||
Find and analyze information about AI developments"},{"role":"user","content":"\nCurrent
|
||||
Task: Ask the remote A2A agent to calculate 25 times 17.\n\nProvide your complete
|
||||
response:"}],"model":"gpt-4.1-mini","response_format":{"type":"json_schema","json_schema":{"schema":{"properties":{"a2a_ids":{"description":"A2A
|
||||
agent IDs to delegate to.","items":{"const":"http://localhost:9999/.well-known/agent-card.json","type":"string"},"maxItems":1,"title":"A2A
|
||||
Ids","type":"array"},"message":{"description":"The message content. If is_a2a=true,
|
||||
this is sent to the A2A agent. If is_a2a=false, this is your final answer ending
|
||||
the conversation.","title":"Message","type":"string"},"is_a2a":{"description":"Set
|
||||
to false when the remote agent has answered your question - extract their answer
|
||||
and return it as your final message. Set to true ONLY if you need to ask a NEW,
|
||||
DIFFERENT question. NEVER repeat the same request - if the conversation history
|
||||
shows the agent already answered, set is_a2a=false immediately.","title":"Is
|
||||
A2A","type":"boolean"}},"required":["a2a_ids","message","is_a2a"],"title":"AgentResponse","type":"object","additionalProperties":false},"name":"AgentResponse","strict":true}},"stream":false}'
|
||||
headers:
|
||||
User-Agent:
|
||||
- X-USER-AGENT-XXX
|
||||
accept:
|
||||
- application/json
|
||||
accept-encoding:
|
||||
- ACCEPT-ENCODING-XXX
|
||||
authorization:
|
||||
- AUTHORIZATION-XXX
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '1361'
|
||||
content-type:
|
||||
- application/json
|
||||
host:
|
||||
- api.openai.com
|
||||
x-stainless-arch:
|
||||
- X-STAINLESS-ARCH-XXX
|
||||
x-stainless-async:
|
||||
- 'false'
|
||||
x-stainless-helper-method:
|
||||
- beta.chat.completions.parse
|
||||
x-stainless-lang:
|
||||
- python
|
||||
x-stainless-os:
|
||||
- X-STAINLESS-OS-XXX
|
||||
x-stainless-package-version:
|
||||
- 1.83.0
|
||||
x-stainless-read-timeout:
|
||||
- X-STAINLESS-READ-TIMEOUT-XXX
|
||||
x-stainless-retry-count:
|
||||
- '0'
|
||||
x-stainless-runtime:
|
||||
- CPython
|
||||
x-stainless-runtime-version:
|
||||
- 3.12.10
|
||||
method: POST
|
||||
uri: https://api.openai.com/v1/chat/completions
|
||||
response:
|
||||
body:
|
||||
string: "{\n \"id\": \"chatcmpl-D3qOvSd5OuQsjAaD7aJtOwEXE0l29\",\n \"object\":
|
||||
\"chat.completion\",\n \"created\": 1769808785,\n \"model\": \"gpt-4.1-mini-2025-04-14\",\n
|
||||
\ \"choices\": [\n {\n \"index\": 0,\n \"message\": {\n \"role\":
|
||||
\"assistant\",\n \"content\": \"{\\\"a2a_ids\\\":[\\\"http://localhost:9999/.well-known/agent-card.json\\\"],\\\"message\\\":\\\"What
|
||||
is 25 times 17?\\\",\\\"is_a2a\\\":true}\",\n \"refusal\": null,\n
|
||||
\ \"annotations\": []\n },\n \"logprobs\": null,\n \"finish_reason\":
|
||||
\"stop\"\n }\n ],\n \"usage\": {\n \"prompt_tokens\": 270,\n \"completion_tokens\":
|
||||
39,\n \"total_tokens\": 309,\n \"prompt_tokens_details\": {\n \"cached_tokens\":
|
||||
0,\n \"audio_tokens\": 0\n },\n \"completion_tokens_details\":
|
||||
{\n \"reasoning_tokens\": 0,\n \"audio_tokens\": 0,\n \"accepted_prediction_tokens\":
|
||||
0,\n \"rejected_prediction_tokens\": 0\n }\n },\n \"service_tier\":
|
||||
\"default\",\n \"system_fingerprint\": \"fp_e01c6f58e1\"\n}\n"
|
||||
headers:
|
||||
CF-RAY:
|
||||
- CF-RAY-XXX
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Type:
|
||||
- application/json
|
||||
Date:
|
||||
- Fri, 30 Jan 2026 21:33:05 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
Set-Cookie:
|
||||
- SET-COOKIE-XXX
|
||||
Strict-Transport-Security:
|
||||
- STS-XXX
|
||||
Transfer-Encoding:
|
||||
- chunked
|
||||
X-Content-Type-Options:
|
||||
- X-CONTENT-TYPE-XXX
|
||||
access-control-expose-headers:
|
||||
- ACCESS-CONTROL-XXX
|
||||
alt-svc:
|
||||
- h3=":443"; ma=86400
|
||||
cf-cache-status:
|
||||
- DYNAMIC
|
||||
openai-organization:
|
||||
- OPENAI-ORG-XXX
|
||||
openai-processing-ms:
|
||||
- '697'
|
||||
openai-project:
|
||||
- OPENAI-PROJECT-XXX
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
x-openai-proxy-wasm:
|
||||
- v0.1
|
||||
x-ratelimit-limit-requests:
|
||||
- X-RATELIMIT-LIMIT-REQUESTS-XXX
|
||||
x-ratelimit-limit-tokens:
|
||||
- X-RATELIMIT-LIMIT-TOKENS-XXX
|
||||
x-ratelimit-remaining-requests:
|
||||
- X-RATELIMIT-REMAINING-REQUESTS-XXX
|
||||
x-ratelimit-remaining-tokens:
|
||||
- X-RATELIMIT-REMAINING-TOKENS-XXX
|
||||
x-ratelimit-reset-requests:
|
||||
- X-RATELIMIT-RESET-REQUESTS-XXX
|
||||
x-ratelimit-reset-tokens:
|
||||
- X-RATELIMIT-RESET-TOKENS-XXX
|
||||
x-request-id:
|
||||
- X-REQUEST-ID-XXX
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
- request:
|
||||
body: '{"id":"ac9d0ac3-5e35-48c7-a2cd-59e2a66966dd","jsonrpc":"2.0","method":"message/stream","params":{"configuration":{"acceptedOutputModes":["application/json"],"blocking":true},"message":{"kind":"message","messageId":"1cccc8dc-7253-4bbd-8e32-bed4004181c8","parts":[{"kind":"text","text":"What
|
||||
is 25 times 17?"}],"referenceTaskIds":[],"role":"user"}}}'
|
||||
headers:
|
||||
User-Agent:
|
||||
- X-USER-AGENT-XXX
|
||||
accept:
|
||||
- '*/*, text/event-stream'
|
||||
accept-encoding:
|
||||
- ACCEPT-ENCODING-XXX
|
||||
cache-control:
|
||||
- no-store
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '347'
|
||||
content-type:
|
||||
- application/json
|
||||
host:
|
||||
- localhost:9999
|
||||
method: POST
|
||||
uri: http://localhost:9999
|
||||
response:
|
||||
body:
|
||||
string: "data: {\"id\":\"ac9d0ac3-5e35-48c7-a2cd-59e2a66966dd\",\"jsonrpc\":\"2.0\",\"result\":{\"contextId\":\"5da04daa-d638-4124-8f65-7a73627679ad\",\"final\":false,\"kind\":\"status-update\",\"status\":{\"state\":\"submitted\"},\"taskId\":\"837a9eed-eea6-4d12-94e4-545c6869efc6\"}}\r\n\r\ndata:
|
||||
{\"id\":\"ac9d0ac3-5e35-48c7-a2cd-59e2a66966dd\",\"jsonrpc\":\"2.0\",\"result\":{\"contextId\":\"5da04daa-d638-4124-8f65-7a73627679ad\",\"final\":false,\"kind\":\"status-update\",\"status\":{\"state\":\"working\"},\"taskId\":\"837a9eed-eea6-4d12-94e4-545c6869efc6\"}}\r\n\r\ndata:
|
||||
{\"id\":\"ac9d0ac3-5e35-48c7-a2cd-59e2a66966dd\",\"jsonrpc\":\"2.0\",\"result\":{\"contextId\":\"5da04daa-d638-4124-8f65-7a73627679ad\",\"final\":true,\"kind\":\"status-update\",\"status\":{\"message\":{\"kind\":\"message\",\"messageId\":\"d01e8e15-61d1-4b3d-a356-95d3add99291\",\"parts\":[{\"kind\":\"text\",\"text\":\"[Tool:
|
||||
calculator] 25 * 17 = 425\\n25 times 17 is 425.\"}],\"role\":\"agent\"},\"state\":\"completed\"},\"taskId\":\"837a9eed-eea6-4d12-94e4-545c6869efc6\"}}\r\n\r\n"
|
||||
headers:
|
||||
cache-control:
|
||||
- no-store
|
||||
connection:
|
||||
- keep-alive
|
||||
content-type:
|
||||
- text/event-stream; charset=utf-8
|
||||
date:
|
||||
- Fri, 30 Jan 2026 21:33:04 GMT
|
||||
server:
|
||||
- uvicorn
|
||||
transfer-encoding:
|
||||
- chunked
|
||||
x-accel-buffering:
|
||||
- 'no'
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
- request:
|
||||
body: '{"messages":[{"role":"system","content":"You are Research Analyst. Expert
|
||||
researcher with access to remote specialized agents\nYour personal goal is:
|
||||
Find and analyze information about AI developments"},{"role":"user","content":"\nCurrent
|
||||
Task: Ask the remote A2A agent to calculate 25 times 17.\n\nProvide your complete
|
||||
response:"}],"model":"gpt-4.1-mini","response_format":{"type":"json_schema","json_schema":{"schema":{"properties":{"a2a_ids":{"description":"A2A
|
||||
agent IDs to delegate to.","items":{"const":"http://localhost:9999/.well-known/agent-card.json","type":"string"},"maxItems":1,"title":"A2A
|
||||
Ids","type":"array"},"message":{"description":"The message content. If is_a2a=true,
|
||||
this is sent to the A2A agent. If is_a2a=false, this is your final answer ending
|
||||
the conversation.","title":"Message","type":"string"},"is_a2a":{"description":"Set
|
||||
to false when the remote agent has answered your question - extract their answer
|
||||
and return it as your final message. Set to true ONLY if you need to ask a NEW,
|
||||
DIFFERENT question. NEVER repeat the same request - if the conversation history
|
||||
shows the agent already answered, set is_a2a=false immediately.","title":"Is
|
||||
A2A","type":"boolean"}},"required":["a2a_ids","message","is_a2a"],"title":"AgentResponse","type":"object","additionalProperties":false},"name":"AgentResponse","strict":true}},"stream":false}'
|
||||
headers:
|
||||
User-Agent:
|
||||
- X-USER-AGENT-XXX
|
||||
accept:
|
||||
- application/json
|
||||
accept-encoding:
|
||||
- ACCEPT-ENCODING-XXX
|
||||
authorization:
|
||||
- AUTHORIZATION-XXX
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '1361'
|
||||
content-type:
|
||||
- application/json
|
||||
cookie:
|
||||
- COOKIE-XXX
|
||||
host:
|
||||
- api.openai.com
|
||||
x-stainless-arch:
|
||||
- X-STAINLESS-ARCH-XXX
|
||||
x-stainless-async:
|
||||
- 'false'
|
||||
x-stainless-helper-method:
|
||||
- beta.chat.completions.parse
|
||||
x-stainless-lang:
|
||||
- python
|
||||
x-stainless-os:
|
||||
- X-STAINLESS-OS-XXX
|
||||
x-stainless-package-version:
|
||||
- 1.83.0
|
||||
x-stainless-read-timeout:
|
||||
- X-STAINLESS-READ-TIMEOUT-XXX
|
||||
x-stainless-retry-count:
|
||||
- '0'
|
||||
x-stainless-runtime:
|
||||
- CPython
|
||||
x-stainless-runtime-version:
|
||||
- 3.12.10
|
||||
method: POST
|
||||
uri: https://api.openai.com/v1/chat/completions
|
||||
response:
|
||||
body:
|
||||
string: "{\n \"id\": \"chatcmpl-D3qOzCessPAoHkJZsUYJIw48NwJrS\",\n \"object\":
|
||||
\"chat.completion\",\n \"created\": 1769808789,\n \"model\": \"gpt-4.1-mini-2025-04-14\",\n
|
||||
\ \"choices\": [\n {\n \"index\": 0,\n \"message\": {\n \"role\":
|
||||
\"assistant\",\n \"content\": \"{\\\"a2a_ids\\\":[\\\"http://localhost:9999/.well-known/agent-card.json\\\"],\\\"message\\\":\\\"Please
|
||||
calculate 25 times 17.\\\",\\\"is_a2a\\\":true}\",\n \"refusal\": null,\n
|
||||
\ \"annotations\": []\n },\n \"logprobs\": null,\n \"finish_reason\":
|
||||
\"stop\"\n }\n ],\n \"usage\": {\n \"prompt_tokens\": 270,\n \"completion_tokens\":
|
||||
38,\n \"total_tokens\": 308,\n \"prompt_tokens_details\": {\n \"cached_tokens\":
|
||||
0,\n \"audio_tokens\": 0\n },\n \"completion_tokens_details\":
|
||||
{\n \"reasoning_tokens\": 0,\n \"audio_tokens\": 0,\n \"accepted_prediction_tokens\":
|
||||
0,\n \"rejected_prediction_tokens\": 0\n }\n },\n \"service_tier\":
|
||||
\"default\",\n \"system_fingerprint\": \"fp_e01c6f58e1\"\n}\n"
|
||||
headers:
|
||||
CF-RAY:
|
||||
- CF-RAY-XXX
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Type:
|
||||
- application/json
|
||||
Date:
|
||||
- Fri, 30 Jan 2026 21:33:10 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
Strict-Transport-Security:
|
||||
- STS-XXX
|
||||
Transfer-Encoding:
|
||||
- chunked
|
||||
X-Content-Type-Options:
|
||||
- X-CONTENT-TYPE-XXX
|
||||
access-control-expose-headers:
|
||||
- ACCESS-CONTROL-XXX
|
||||
alt-svc:
|
||||
- h3=":443"; ma=86400
|
||||
cf-cache-status:
|
||||
- DYNAMIC
|
||||
openai-organization:
|
||||
- OPENAI-ORG-XXX
|
||||
openai-processing-ms:
|
||||
- '666'
|
||||
openai-project:
|
||||
- OPENAI-PROJECT-XXX
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
x-openai-proxy-wasm:
|
||||
- v0.1
|
||||
x-ratelimit-limit-requests:
|
||||
- X-RATELIMIT-LIMIT-REQUESTS-XXX
|
||||
x-ratelimit-limit-tokens:
|
||||
- X-RATELIMIT-LIMIT-TOKENS-XXX
|
||||
x-ratelimit-remaining-requests:
|
||||
- X-RATELIMIT-REMAINING-REQUESTS-XXX
|
||||
x-ratelimit-remaining-tokens:
|
||||
- X-RATELIMIT-REMAINING-TOKENS-XXX
|
||||
x-ratelimit-reset-requests:
|
||||
- X-RATELIMIT-RESET-REQUESTS-XXX
|
||||
x-ratelimit-reset-tokens:
|
||||
- X-RATELIMIT-RESET-TOKENS-XXX
|
||||
x-request-id:
|
||||
- X-REQUEST-ID-XXX
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
- request:
|
||||
body: '{"id":"89e94967-f28d-4639-9c9c-87719483d15d","jsonrpc":"2.0","method":"message/stream","params":{"configuration":{"acceptedOutputModes":["application/json"],"blocking":true},"message":{"kind":"message","messageId":"6ef5d359-8604-4520-a539-518e88d75456","parts":[{"kind":"text","text":"Please
|
||||
calculate 25 times 17."}],"referenceTaskIds":[],"role":"user"}}}'
|
||||
headers:
|
||||
User-Agent:
|
||||
- X-USER-AGENT-XXX
|
||||
accept:
|
||||
- '*/*, text/event-stream'
|
||||
accept-encoding:
|
||||
- ACCEPT-ENCODING-XXX
|
||||
cache-control:
|
||||
- no-store
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '356'
|
||||
content-type:
|
||||
- application/json
|
||||
host:
|
||||
- localhost:9999
|
||||
method: POST
|
||||
uri: http://localhost:9999
|
||||
response:
|
||||
body:
|
||||
string: "data: {\"id\":\"89e94967-f28d-4639-9c9c-87719483d15d\",\"jsonrpc\":\"2.0\",\"result\":{\"contextId\":\"cdd17068-acc2-462a-9e9d-bf1a2403a432\",\"final\":false,\"kind\":\"status-update\",\"status\":{\"state\":\"submitted\"},\"taskId\":\"c96047bb-5ddf-410a-a4bd-03c6bcd4457e\"}}\r\n\r\ndata:
|
||||
{\"id\":\"89e94967-f28d-4639-9c9c-87719483d15d\",\"jsonrpc\":\"2.0\",\"result\":{\"contextId\":\"cdd17068-acc2-462a-9e9d-bf1a2403a432\",\"final\":false,\"kind\":\"status-update\",\"status\":{\"state\":\"working\"},\"taskId\":\"c96047bb-5ddf-410a-a4bd-03c6bcd4457e\"}}\r\n\r\ndata:
|
||||
{\"id\":\"89e94967-f28d-4639-9c9c-87719483d15d\",\"jsonrpc\":\"2.0\",\"result\":{\"contextId\":\"cdd17068-acc2-462a-9e9d-bf1a2403a432\",\"final\":true,\"kind\":\"status-update\",\"status\":{\"message\":{\"kind\":\"message\",\"messageId\":\"4138a4a4-217a-4f99-a6b1-970229a6bedd\",\"parts\":[{\"kind\":\"text\",\"text\":\"[Tool:
|
||||
calculator] 25 * 17 = 425\\nThe result of 25 times 17 is 425.\"}],\"role\":\"agent\"},\"state\":\"completed\"},\"taskId\":\"c96047bb-5ddf-410a-a4bd-03c6bcd4457e\"}}\r\n\r\n"
|
||||
headers:
|
||||
cache-control:
|
||||
- no-store
|
||||
connection:
|
||||
- keep-alive
|
||||
content-type:
|
||||
- text/event-stream; charset=utf-8
|
||||
date:
|
||||
- Fri, 30 Jan 2026 21:33:10 GMT
|
||||
server:
|
||||
- uvicorn
|
||||
transfer-encoding:
|
||||
- chunked
|
||||
x-accel-buffering:
|
||||
- 'no'
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
- request:
|
||||
body: '{"messages":[{"role":"system","content":"You are Research Analyst. Expert
|
||||
researcher with access to remote specialized agents\nYour personal goal is:
|
||||
Find and analyze information about AI developments"},{"role":"user","content":"\nCurrent
|
||||
Task: Ask the remote A2A agent to calculate 25 times 17.\n\nProvide your complete
|
||||
response:"}],"model":"gpt-4.1-mini","response_format":{"type":"json_schema","json_schema":{"schema":{"properties":{"a2a_ids":{"description":"A2A
|
||||
agent IDs to delegate to.","items":{"const":"http://localhost:9999/.well-known/agent-card.json","type":"string"},"maxItems":1,"title":"A2A
|
||||
Ids","type":"array"},"message":{"description":"The message content. If is_a2a=true,
|
||||
this is sent to the A2A agent. If is_a2a=false, this is your final answer ending
|
||||
the conversation.","title":"Message","type":"string"},"is_a2a":{"description":"Set
|
||||
to false when the remote agent has answered your question - extract their answer
|
||||
and return it as your final message. Set to true ONLY if you need to ask a NEW,
|
||||
DIFFERENT question. NEVER repeat the same request - if the conversation history
|
||||
shows the agent already answered, set is_a2a=false immediately.","title":"Is
|
||||
A2A","type":"boolean"}},"required":["a2a_ids","message","is_a2a"],"title":"AgentResponse","type":"object","additionalProperties":false},"name":"AgentResponse","strict":true}},"stream":false}'
|
||||
headers:
|
||||
User-Agent:
|
||||
- X-USER-AGENT-XXX
|
||||
accept:
|
||||
- application/json
|
||||
accept-encoding:
|
||||
- ACCEPT-ENCODING-XXX
|
||||
authorization:
|
||||
- AUTHORIZATION-XXX
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '1361'
|
||||
content-type:
|
||||
- application/json
|
||||
cookie:
|
||||
- COOKIE-XXX
|
||||
host:
|
||||
- api.openai.com
|
||||
x-stainless-arch:
|
||||
- X-STAINLESS-ARCH-XXX
|
||||
x-stainless-async:
|
||||
- 'false'
|
||||
x-stainless-helper-method:
|
||||
- beta.chat.completions.parse
|
||||
x-stainless-lang:
|
||||
- python
|
||||
x-stainless-os:
|
||||
- X-STAINLESS-OS-XXX
|
||||
x-stainless-package-version:
|
||||
- 1.83.0
|
||||
x-stainless-read-timeout:
|
||||
- X-STAINLESS-READ-TIMEOUT-XXX
|
||||
x-stainless-retry-count:
|
||||
- '0'
|
||||
x-stainless-runtime:
|
||||
- CPython
|
||||
x-stainless-runtime-version:
|
||||
- 3.12.10
|
||||
method: POST
|
||||
uri: https://api.openai.com/v1/chat/completions
|
||||
response:
|
||||
body:
|
||||
string: "{\n \"id\": \"chatcmpl-D3qP2eXgsxYsRuFnwu5tCdE8jZyiI\",\n \"object\":
|
||||
\"chat.completion\",\n \"created\": 1769808792,\n \"model\": \"gpt-4.1-mini-2025-04-14\",\n
|
||||
\ \"choices\": [\n {\n \"index\": 0,\n \"message\": {\n \"role\":
|
||||
\"assistant\",\n \"content\": \"{\\\"a2a_ids\\\":[\\\"http://localhost:9999/.well-known/agent-card.json\\\"],\\\"message\\\":\\\"Calculate
|
||||
the product of 25 and 17.\\\",\\\"is_a2a\\\":true}\",\n \"refusal\":
|
||||
null,\n \"annotations\": []\n },\n \"logprobs\": null,\n
|
||||
\ \"finish_reason\": \"stop\"\n }\n ],\n \"usage\": {\n \"prompt_tokens\":
|
||||
270,\n \"completion_tokens\": 40,\n \"total_tokens\": 310,\n \"prompt_tokens_details\":
|
||||
{\n \"cached_tokens\": 0,\n \"audio_tokens\": 0\n },\n \"completion_tokens_details\":
|
||||
{\n \"reasoning_tokens\": 0,\n \"audio_tokens\": 0,\n \"accepted_prediction_tokens\":
|
||||
0,\n \"rejected_prediction_tokens\": 0\n }\n },\n \"service_tier\":
|
||||
\"default\",\n \"system_fingerprint\": \"fp_e01c6f58e1\"\n}\n"
|
||||
headers:
|
||||
CF-RAY:
|
||||
- CF-RAY-XXX
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Type:
|
||||
- application/json
|
||||
Date:
|
||||
- Fri, 30 Jan 2026 21:33:13 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
Strict-Transport-Security:
|
||||
- STS-XXX
|
||||
Transfer-Encoding:
|
||||
- chunked
|
||||
X-Content-Type-Options:
|
||||
- X-CONTENT-TYPE-XXX
|
||||
access-control-expose-headers:
|
||||
- ACCESS-CONTROL-XXX
|
||||
alt-svc:
|
||||
- h3=":443"; ma=86400
|
||||
cf-cache-status:
|
||||
- DYNAMIC
|
||||
openai-organization:
|
||||
- OPENAI-ORG-XXX
|
||||
openai-processing-ms:
|
||||
- '977'
|
||||
openai-project:
|
||||
- OPENAI-PROJECT-XXX
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
x-openai-proxy-wasm:
|
||||
- v0.1
|
||||
x-ratelimit-limit-requests:
|
||||
- X-RATELIMIT-LIMIT-REQUESTS-XXX
|
||||
x-ratelimit-limit-tokens:
|
||||
- X-RATELIMIT-LIMIT-TOKENS-XXX
|
||||
x-ratelimit-remaining-requests:
|
||||
- X-RATELIMIT-REMAINING-REQUESTS-XXX
|
||||
x-ratelimit-remaining-tokens:
|
||||
- X-RATELIMIT-REMAINING-TOKENS-XXX
|
||||
x-ratelimit-reset-requests:
|
||||
- X-RATELIMIT-RESET-REQUESTS-XXX
|
||||
x-ratelimit-reset-tokens:
|
||||
- X-RATELIMIT-RESET-TOKENS-XXX
|
||||
x-request-id:
|
||||
- X-REQUEST-ID-XXX
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
- request:
|
||||
body: '{"id":"be0426a2-3de6-44ce-b7de-22b5992b5025","jsonrpc":"2.0","method":"message/stream","params":{"configuration":{"acceptedOutputModes":["application/json"],"blocking":true},"message":{"kind":"message","messageId":"2974dcee-5712-40df-aa41-4877f36c4749","parts":[{"kind":"text","text":"Calculate
|
||||
the product of 25 and 17."}],"referenceTaskIds":[],"role":"user"}}}'
|
||||
headers:
|
||||
User-Agent:
|
||||
- X-USER-AGENT-XXX
|
||||
accept:
|
||||
- '*/*, text/event-stream'
|
||||
accept-encoding:
|
||||
- ACCEPT-ENCODING-XXX
|
||||
cache-control:
|
||||
- no-store
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '362'
|
||||
content-type:
|
||||
- application/json
|
||||
host:
|
||||
- localhost:9999
|
||||
method: POST
|
||||
uri: http://localhost:9999
|
||||
response:
|
||||
body:
|
||||
string: "data: {\"id\":\"be0426a2-3de6-44ce-b7de-22b5992b5025\",\"jsonrpc\":\"2.0\",\"result\":{\"contextId\":\"f9311c55-c6ef-413a-818b-d0ebeb8466be\",\"final\":false,\"kind\":\"status-update\",\"status\":{\"state\":\"submitted\"},\"taskId\":\"fd2d5eee-8bd6-4ead-b96b-917782444d10\"}}\r\n\r\ndata:
|
||||
{\"id\":\"be0426a2-3de6-44ce-b7de-22b5992b5025\",\"jsonrpc\":\"2.0\",\"result\":{\"contextId\":\"f9311c55-c6ef-413a-818b-d0ebeb8466be\",\"final\":false,\"kind\":\"status-update\",\"status\":{\"state\":\"working\"},\"taskId\":\"fd2d5eee-8bd6-4ead-b96b-917782444d10\"}}\r\n\r\ndata:
|
||||
{\"id\":\"be0426a2-3de6-44ce-b7de-22b5992b5025\",\"jsonrpc\":\"2.0\",\"result\":{\"contextId\":\"f9311c55-c6ef-413a-818b-d0ebeb8466be\",\"final\":true,\"kind\":\"status-update\",\"status\":{\"message\":{\"kind\":\"message\",\"messageId\":\"a27f8c49-b5d4-4421-b7cf-9b7455273f34\",\"parts\":[{\"kind\":\"text\",\"text\":\"[Tool:
|
||||
calculator] 25 * 17 = 425\\nThe product of 25 and 17 is 425.\"}],\"role\":\"agent\"},\"state\":\"completed\"},\"taskId\":\"fd2d5eee-8bd6-4ead-b96b-917782444d10\"}}\r\n\r\n"
|
||||
headers:
|
||||
cache-control:
|
||||
- no-store
|
||||
connection:
|
||||
- keep-alive
|
||||
content-type:
|
||||
- text/event-stream; charset=utf-8
|
||||
date:
|
||||
- Fri, 30 Jan 2026 21:33:13 GMT
|
||||
server:
|
||||
- uvicorn
|
||||
transfer-encoding:
|
||||
- chunked
|
||||
x-accel-buffering:
|
||||
- 'no'
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
- request:
|
||||
body: '{"messages":[{"role":"system","content":"You are Research Analyst. Expert
|
||||
researcher with access to remote specialized agents\nYour personal goal is:
|
||||
Find and analyze information about AI developments"},{"role":"user","content":"\nCurrent
|
||||
Task: Ask the remote A2A agent to calculate 25 times 17.\n\nProvide your complete
|
||||
response:"}],"model":"gpt-4.1-mini","response_format":{"type":"json_schema","json_schema":{"schema":{"properties":{"a2a_ids":{"description":"A2A
|
||||
agent IDs to delegate to.","items":{"const":"http://localhost:9999/.well-known/agent-card.json","type":"string"},"maxItems":1,"title":"A2A
|
||||
Ids","type":"array"},"message":{"description":"The message content. If is_a2a=true,
|
||||
this is sent to the A2A agent. If is_a2a=false, this is your final answer ending
|
||||
the conversation.","title":"Message","type":"string"},"is_a2a":{"description":"Set
|
||||
to false when the remote agent has answered your question - extract their answer
|
||||
and return it as your final message. Set to true ONLY if you need to ask a NEW,
|
||||
DIFFERENT question. NEVER repeat the same request - if the conversation history
|
||||
shows the agent already answered, set is_a2a=false immediately.","title":"Is
|
||||
A2A","type":"boolean"}},"required":["a2a_ids","message","is_a2a"],"title":"AgentResponse","type":"object","additionalProperties":false},"name":"AgentResponse","strict":true}},"stream":false}'
|
||||
headers:
|
||||
User-Agent:
|
||||
- X-USER-AGENT-XXX
|
||||
accept:
|
||||
- application/json
|
||||
accept-encoding:
|
||||
- ACCEPT-ENCODING-XXX
|
||||
authorization:
|
||||
- AUTHORIZATION-XXX
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '1361'
|
||||
content-type:
|
||||
- application/json
|
||||
cookie:
|
||||
- COOKIE-XXX
|
||||
host:
|
||||
- api.openai.com
|
||||
x-stainless-arch:
|
||||
- X-STAINLESS-ARCH-XXX
|
||||
x-stainless-async:
|
||||
- 'false'
|
||||
x-stainless-helper-method:
|
||||
- beta.chat.completions.parse
|
||||
x-stainless-lang:
|
||||
- python
|
||||
x-stainless-os:
|
||||
- X-STAINLESS-OS-XXX
|
||||
x-stainless-package-version:
|
||||
- 1.83.0
|
||||
x-stainless-read-timeout:
|
||||
- X-STAINLESS-READ-TIMEOUT-XXX
|
||||
x-stainless-retry-count:
|
||||
- '0'
|
||||
x-stainless-runtime:
|
||||
- CPython
|
||||
x-stainless-runtime-version:
|
||||
- 3.12.10
|
||||
method: POST
|
||||
uri: https://api.openai.com/v1/chat/completions
|
||||
response:
|
||||
body:
|
||||
string: "{\n \"id\": \"chatcmpl-D3qP6z5b62nfY2HkvSobGqRaK0JLD\",\n \"object\":
|
||||
\"chat.completion\",\n \"created\": 1769808796,\n \"model\": \"gpt-4.1-mini-2025-04-14\",\n
|
||||
\ \"choices\": [\n {\n \"index\": 0,\n \"message\": {\n \"role\":
|
||||
\"assistant\",\n \"content\": \"{\\\"a2a_ids\\\":[\\\"http://localhost:9999/.well-known/agent-card.json\\\"],\\\"message\\\":\\\"Calculate
|
||||
25 times 17.\\\",\\\"is_a2a\\\":true}\",\n \"refusal\": null,\n \"annotations\":
|
||||
[]\n },\n \"logprobs\": null,\n \"finish_reason\": \"stop\"\n
|
||||
\ }\n ],\n \"usage\": {\n \"prompt_tokens\": 270,\n \"completion_tokens\":
|
||||
37,\n \"total_tokens\": 307,\n \"prompt_tokens_details\": {\n \"cached_tokens\":
|
||||
0,\n \"audio_tokens\": 0\n },\n \"completion_tokens_details\":
|
||||
{\n \"reasoning_tokens\": 0,\n \"audio_tokens\": 0,\n \"accepted_prediction_tokens\":
|
||||
0,\n \"rejected_prediction_tokens\": 0\n }\n },\n \"service_tier\":
|
||||
\"default\",\n \"system_fingerprint\": \"fp_e01c6f58e1\"\n}\n"
|
||||
headers:
|
||||
CF-RAY:
|
||||
- CF-RAY-XXX
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Type:
|
||||
- application/json
|
||||
Date:
|
||||
- Fri, 30 Jan 2026 21:33:17 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
Strict-Transport-Security:
|
||||
- STS-XXX
|
||||
Transfer-Encoding:
|
||||
- chunked
|
||||
X-Content-Type-Options:
|
||||
- X-CONTENT-TYPE-XXX
|
||||
access-control-expose-headers:
|
||||
- ACCESS-CONTROL-XXX
|
||||
alt-svc:
|
||||
- h3=":443"; ma=86400
|
||||
cf-cache-status:
|
||||
- DYNAMIC
|
||||
openai-organization:
|
||||
- OPENAI-ORG-XXX
|
||||
openai-processing-ms:
|
||||
- '997'
|
||||
openai-project:
|
||||
- OPENAI-PROJECT-XXX
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
x-openai-proxy-wasm:
|
||||
- v0.1
|
||||
x-ratelimit-limit-requests:
|
||||
- X-RATELIMIT-LIMIT-REQUESTS-XXX
|
||||
x-ratelimit-limit-tokens:
|
||||
- X-RATELIMIT-LIMIT-TOKENS-XXX
|
||||
x-ratelimit-remaining-requests:
|
||||
- X-RATELIMIT-REMAINING-REQUESTS-XXX
|
||||
x-ratelimit-remaining-tokens:
|
||||
- X-RATELIMIT-REMAINING-TOKENS-XXX
|
||||
x-ratelimit-reset-requests:
|
||||
- X-RATELIMIT-RESET-REQUESTS-XXX
|
||||
x-ratelimit-reset-tokens:
|
||||
- X-RATELIMIT-RESET-TOKENS-XXX
|
||||
x-request-id:
|
||||
- X-REQUEST-ID-XXX
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
version: 1
|
||||
@@ -0,0 +1,669 @@
|
||||
interactions:
|
||||
- request:
|
||||
body: '{"messages":[{"role":"system","content":"You are Research Analyst. Expert
|
||||
researcher with access to remote specialized agents\nYour personal goal is:
|
||||
Find and analyze information about AI developments"},{"role":"user","content":"\nCurrent
|
||||
Task: Delegate to the remote A2A agent to explain quantum computing in simple
|
||||
terms.\n\nProvide your complete response:"}],"model":"gpt-4.1-mini","response_format":{"type":"json_schema","json_schema":{"schema":{"properties":{"a2a_ids":{"description":"A2A
|
||||
agent IDs to delegate to.","items":{"const":"http://localhost:9999/.well-known/agent-card.json","type":"string"},"maxItems":1,"title":"A2A
|
||||
Ids","type":"array"},"message":{"description":"The message content. If is_a2a=true,
|
||||
this is sent to the A2A agent. If is_a2a=false, this is your final answer ending
|
||||
the conversation.","title":"Message","type":"string"},"is_a2a":{"description":"Set
|
||||
to false when the remote agent has answered your question - extract their answer
|
||||
and return it as your final message. Set to true ONLY if you need to ask a NEW,
|
||||
DIFFERENT question. NEVER repeat the same request - if the conversation history
|
||||
shows the agent already answered, set is_a2a=false immediately.","title":"Is
|
||||
A2A","type":"boolean"}},"required":["a2a_ids","message","is_a2a"],"title":"AgentResponse","type":"object","additionalProperties":false},"name":"AgentResponse","strict":true}},"stream":false}'
|
||||
headers:
|
||||
User-Agent:
|
||||
- X-USER-AGENT-XXX
|
||||
accept:
|
||||
- application/json
|
||||
accept-encoding:
|
||||
- ACCEPT-ENCODING-XXX
|
||||
authorization:
|
||||
- AUTHORIZATION-XXX
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '1389'
|
||||
content-type:
|
||||
- application/json
|
||||
host:
|
||||
- api.openai.com
|
||||
x-stainless-arch:
|
||||
- X-STAINLESS-ARCH-XXX
|
||||
x-stainless-async:
|
||||
- 'false'
|
||||
x-stainless-helper-method:
|
||||
- beta.chat.completions.parse
|
||||
x-stainless-lang:
|
||||
- python
|
||||
x-stainless-os:
|
||||
- X-STAINLESS-OS-XXX
|
||||
x-stainless-package-version:
|
||||
- 1.83.0
|
||||
x-stainless-read-timeout:
|
||||
- X-STAINLESS-READ-TIMEOUT-XXX
|
||||
x-stainless-retry-count:
|
||||
- '0'
|
||||
x-stainless-runtime:
|
||||
- CPython
|
||||
x-stainless-runtime-version:
|
||||
- 3.12.10
|
||||
method: POST
|
||||
uri: https://api.openai.com/v1/chat/completions
|
||||
response:
|
||||
body:
|
||||
string: "{\n \"id\": \"chatcmpl-D3qQLp4DYPeaKABFeQVDKxePblbY7\",\n \"object\":
|
||||
\"chat.completion\",\n \"created\": 1769808873,\n \"model\": \"gpt-4.1-mini-2025-04-14\",\n
|
||||
\ \"choices\": [\n {\n \"index\": 0,\n \"message\": {\n \"role\":
|
||||
\"assistant\",\n \"content\": \"{\\\"a2a_ids\\\":[\\\"http://localhost:9999/.well-known/agent-card.json\\\"],\\\"message\\\":\\\"Can
|
||||
you explain quantum computing in simple terms, suitable for someone with no
|
||||
background in the subject?\\\",\\\"is_a2a\\\":true}\",\n \"refusal\":
|
||||
null,\n \"annotations\": []\n },\n \"logprobs\": null,\n
|
||||
\ \"finish_reason\": \"stop\"\n }\n ],\n \"usage\": {\n \"prompt_tokens\":
|
||||
271,\n \"completion_tokens\": 50,\n \"total_tokens\": 321,\n \"prompt_tokens_details\":
|
||||
{\n \"cached_tokens\": 0,\n \"audio_tokens\": 0\n },\n \"completion_tokens_details\":
|
||||
{\n \"reasoning_tokens\": 0,\n \"audio_tokens\": 0,\n \"accepted_prediction_tokens\":
|
||||
0,\n \"rejected_prediction_tokens\": 0\n }\n },\n \"service_tier\":
|
||||
\"default\",\n \"system_fingerprint\": \"fp_e01c6f58e1\"\n}\n"
|
||||
headers:
|
||||
CF-RAY:
|
||||
- CF-RAY-XXX
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Type:
|
||||
- application/json
|
||||
Date:
|
||||
- Fri, 30 Jan 2026 21:34:34 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
Set-Cookie:
|
||||
- SET-COOKIE-XXX
|
||||
Strict-Transport-Security:
|
||||
- STS-XXX
|
||||
Transfer-Encoding:
|
||||
- chunked
|
||||
X-Content-Type-Options:
|
||||
- X-CONTENT-TYPE-XXX
|
||||
access-control-expose-headers:
|
||||
- ACCESS-CONTROL-XXX
|
||||
alt-svc:
|
||||
- h3=":443"; ma=86400
|
||||
cf-cache-status:
|
||||
- DYNAMIC
|
||||
openai-organization:
|
||||
- OPENAI-ORG-XXX
|
||||
openai-processing-ms:
|
||||
- '834'
|
||||
openai-project:
|
||||
- OPENAI-PROJECT-XXX
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
x-openai-proxy-wasm:
|
||||
- v0.1
|
||||
x-ratelimit-limit-requests:
|
||||
- X-RATELIMIT-LIMIT-REQUESTS-XXX
|
||||
x-ratelimit-limit-tokens:
|
||||
- X-RATELIMIT-LIMIT-TOKENS-XXX
|
||||
x-ratelimit-remaining-requests:
|
||||
- X-RATELIMIT-REMAINING-REQUESTS-XXX
|
||||
x-ratelimit-remaining-tokens:
|
||||
- X-RATELIMIT-REMAINING-TOKENS-XXX
|
||||
x-ratelimit-reset-requests:
|
||||
- X-RATELIMIT-RESET-REQUESTS-XXX
|
||||
x-ratelimit-reset-tokens:
|
||||
- X-RATELIMIT-RESET-TOKENS-XXX
|
||||
x-request-id:
|
||||
- X-REQUEST-ID-XXX
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
- request:
|
||||
body: '{"id":"4f421e0d-c7ee-46ad-8c7b-9ff31404beba","jsonrpc":"2.0","method":"message/stream","params":{"configuration":{"acceptedOutputModes":["application/json"],"blocking":true},"message":{"kind":"message","messageId":"4b0f91a5-079c-4efe-ba3d-bee5592c67e5","parts":[{"kind":"text","text":"Can
|
||||
you explain quantum computing in simple terms, suitable for someone with no
|
||||
background in the subject?"}],"referenceTaskIds":[],"role":"user"}}}'
|
||||
headers:
|
||||
User-Agent:
|
||||
- X-USER-AGENT-XXX
|
||||
accept:
|
||||
- '*/*, text/event-stream'
|
||||
accept-encoding:
|
||||
- ACCEPT-ENCODING-XXX
|
||||
cache-control:
|
||||
- no-store
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '433'
|
||||
content-type:
|
||||
- application/json
|
||||
host:
|
||||
- localhost:9999
|
||||
method: POST
|
||||
uri: http://localhost:9999
|
||||
response:
|
||||
body:
|
||||
string: "data: {\"id\":\"4f421e0d-c7ee-46ad-8c7b-9ff31404beba\",\"jsonrpc\":\"2.0\",\"result\":{\"contextId\":\"cdfd9f23-8f2e-4cea-8839-cd52ce0584a7\",\"final\":false,\"kind\":\"status-update\",\"status\":{\"state\":\"submitted\"},\"taskId\":\"7c223d0c-f144-4bf8-b8d8-1c37efb04232\"}}\r\n\r\ndata:
|
||||
{\"id\":\"4f421e0d-c7ee-46ad-8c7b-9ff31404beba\",\"jsonrpc\":\"2.0\",\"result\":{\"contextId\":\"cdfd9f23-8f2e-4cea-8839-cd52ce0584a7\",\"final\":false,\"kind\":\"status-update\",\"status\":{\"state\":\"working\"},\"taskId\":\"7c223d0c-f144-4bf8-b8d8-1c37efb04232\"}}\r\n\r\ndata:
|
||||
{\"id\":\"4f421e0d-c7ee-46ad-8c7b-9ff31404beba\",\"jsonrpc\":\"2.0\",\"result\":{\"contextId\":\"cdfd9f23-8f2e-4cea-8839-cd52ce0584a7\",\"final\":true,\"kind\":\"status-update\",\"status\":{\"message\":{\"kind\":\"message\",\"messageId\":\"bdeb2af5-89c3-4368-93bd-f263b3ab40a9\",\"parts\":[{\"kind\":\"text\",\"text\":\"Quantum
|
||||
computing is a new type of computing that uses the principles of quantum mechanics,
|
||||
which is the science that explains how very small particles, like atoms and
|
||||
electrons, behave. \\n\\nIn classical computing, the basic unit of information
|
||||
is a bit, which can be either a 0 or a 1. This is like a light switch that
|
||||
can either be off (0) or on (1). \\n\\nQuantum computing, on the other hand,
|
||||
uses quantum bits or qubits. A qubit can be in a state of 0, 1, or both at
|
||||
the same time due to a phenomenon called superposition. You can think of this
|
||||
as a spinning coin that is both heads and tails while in the air, rather than
|
||||
fixed to one side when it lands. \\n\\nMoreover, qubits can be linked together
|
||||
through another phenomenon called entanglement. This means that the state
|
||||
of one qubit can depend on the state of another, no matter how far apart they
|
||||
are. This allows quantum computers to solve certain problems much more efficiently
|
||||
than classical computers. \\n\\nIn summary, quantum computing harnesses the
|
||||
strange and fascinating behaviors of particles at the quantum level to process
|
||||
information in ways that traditional computers cannot, potentially making
|
||||
them much more powerful for specific tasks, like breaking encryption or simulating
|
||||
complex molecules.\"}],\"role\":\"agent\"},\"state\":\"completed\"},\"taskId\":\"7c223d0c-f144-4bf8-b8d8-1c37efb04232\"}}\r\n\r\n"
|
||||
headers:
|
||||
cache-control:
|
||||
- no-store
|
||||
connection:
|
||||
- keep-alive
|
||||
content-type:
|
||||
- text/event-stream; charset=utf-8
|
||||
date:
|
||||
- Fri, 30 Jan 2026 21:34:33 GMT
|
||||
server:
|
||||
- uvicorn
|
||||
transfer-encoding:
|
||||
- chunked
|
||||
x-accel-buffering:
|
||||
- 'no'
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
- request:
|
||||
body: '{"messages":[{"role":"system","content":"You are Research Analyst. Expert
|
||||
researcher with access to remote specialized agents\nYour personal goal is:
|
||||
Find and analyze information about AI developments"},{"role":"user","content":"\nCurrent
|
||||
Task: Delegate to the remote A2A agent to explain quantum computing in simple
|
||||
terms.\n\nProvide your complete response:"}],"model":"gpt-4.1-mini","response_format":{"type":"json_schema","json_schema":{"schema":{"properties":{"a2a_ids":{"description":"A2A
|
||||
agent IDs to delegate to.","items":{"const":"http://localhost:9999/.well-known/agent-card.json","type":"string"},"maxItems":1,"title":"A2A
|
||||
Ids","type":"array"},"message":{"description":"The message content. If is_a2a=true,
|
||||
this is sent to the A2A agent. If is_a2a=false, this is your final answer ending
|
||||
the conversation.","title":"Message","type":"string"},"is_a2a":{"description":"Set
|
||||
to false when the remote agent has answered your question - extract their answer
|
||||
and return it as your final message. Set to true ONLY if you need to ask a NEW,
|
||||
DIFFERENT question. NEVER repeat the same request - if the conversation history
|
||||
shows the agent already answered, set is_a2a=false immediately.","title":"Is
|
||||
A2A","type":"boolean"}},"required":["a2a_ids","message","is_a2a"],"title":"AgentResponse","type":"object","additionalProperties":false},"name":"AgentResponse","strict":true}},"stream":false}'
|
||||
headers:
|
||||
User-Agent:
|
||||
- X-USER-AGENT-XXX
|
||||
accept:
|
||||
- application/json
|
||||
accept-encoding:
|
||||
- ACCEPT-ENCODING-XXX
|
||||
authorization:
|
||||
- AUTHORIZATION-XXX
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '1389'
|
||||
content-type:
|
||||
- application/json
|
||||
cookie:
|
||||
- COOKIE-XXX
|
||||
host:
|
||||
- api.openai.com
|
||||
x-stainless-arch:
|
||||
- X-STAINLESS-ARCH-XXX
|
||||
x-stainless-async:
|
||||
- 'false'
|
||||
x-stainless-helper-method:
|
||||
- beta.chat.completions.parse
|
||||
x-stainless-lang:
|
||||
- python
|
||||
x-stainless-os:
|
||||
- X-STAINLESS-OS-XXX
|
||||
x-stainless-package-version:
|
||||
- 1.83.0
|
||||
x-stainless-read-timeout:
|
||||
- X-STAINLESS-READ-TIMEOUT-XXX
|
||||
x-stainless-retry-count:
|
||||
- '0'
|
||||
x-stainless-runtime:
|
||||
- CPython
|
||||
x-stainless-runtime-version:
|
||||
- 3.12.10
|
||||
method: POST
|
||||
uri: https://api.openai.com/v1/chat/completions
|
||||
response:
|
||||
body:
|
||||
string: "{\n \"id\": \"chatcmpl-D3qQWQTMdXD4Ug72rmgl1eWkm40zL\",\n \"object\":
|
||||
\"chat.completion\",\n \"created\": 1769808884,\n \"model\": \"gpt-4.1-mini-2025-04-14\",\n
|
||||
\ \"choices\": [\n {\n \"index\": 0,\n \"message\": {\n \"role\":
|
||||
\"assistant\",\n \"content\": \"{\\\"a2a_ids\\\":[\\\"http://localhost:9999/.well-known/agent-card.json\\\"],\\\"message\\\":\\\"Please
|
||||
explain quantum computing in simple terms suitable for a general audience.\\\",\\\"is_a2a\\\":true}\",\n
|
||||
\ \"refusal\": null,\n \"annotations\": []\n },\n \"logprobs\":
|
||||
null,\n \"finish_reason\": \"stop\"\n }\n ],\n \"usage\": {\n \"prompt_tokens\":
|
||||
271,\n \"completion_tokens\": 43,\n \"total_tokens\": 314,\n \"prompt_tokens_details\":
|
||||
{\n \"cached_tokens\": 0,\n \"audio_tokens\": 0\n },\n \"completion_tokens_details\":
|
||||
{\n \"reasoning_tokens\": 0,\n \"audio_tokens\": 0,\n \"accepted_prediction_tokens\":
|
||||
0,\n \"rejected_prediction_tokens\": 0\n }\n },\n \"service_tier\":
|
||||
\"default\",\n \"system_fingerprint\": \"fp_e01c6f58e1\"\n}\n"
|
||||
headers:
|
||||
CF-RAY:
|
||||
- CF-RAY-XXX
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Type:
|
||||
- application/json
|
||||
Date:
|
||||
- Fri, 30 Jan 2026 21:34:45 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
Strict-Transport-Security:
|
||||
- STS-XXX
|
||||
Transfer-Encoding:
|
||||
- chunked
|
||||
X-Content-Type-Options:
|
||||
- X-CONTENT-TYPE-XXX
|
||||
access-control-expose-headers:
|
||||
- ACCESS-CONTROL-XXX
|
||||
alt-svc:
|
||||
- h3=":443"; ma=86400
|
||||
cf-cache-status:
|
||||
- DYNAMIC
|
||||
openai-organization:
|
||||
- OPENAI-ORG-XXX
|
||||
openai-processing-ms:
|
||||
- '714'
|
||||
openai-project:
|
||||
- OPENAI-PROJECT-XXX
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
x-openai-proxy-wasm:
|
||||
- v0.1
|
||||
x-ratelimit-limit-requests:
|
||||
- X-RATELIMIT-LIMIT-REQUESTS-XXX
|
||||
x-ratelimit-limit-tokens:
|
||||
- X-RATELIMIT-LIMIT-TOKENS-XXX
|
||||
x-ratelimit-remaining-requests:
|
||||
- X-RATELIMIT-REMAINING-REQUESTS-XXX
|
||||
x-ratelimit-remaining-tokens:
|
||||
- X-RATELIMIT-REMAINING-TOKENS-XXX
|
||||
x-ratelimit-reset-requests:
|
||||
- X-RATELIMIT-RESET-REQUESTS-XXX
|
||||
x-ratelimit-reset-tokens:
|
||||
- X-RATELIMIT-RESET-TOKENS-XXX
|
||||
x-request-id:
|
||||
- X-REQUEST-ID-XXX
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
- request:
|
||||
body: '{"id":"f5ff9e1d-598f-4b68-94ed-e7f2b942c467","jsonrpc":"2.0","method":"message/stream","params":{"configuration":{"acceptedOutputModes":["application/json"],"blocking":true},"message":{"kind":"message","messageId":"b4d6f61a-c854-487a-b238-25f03e0a5ef7","parts":[{"kind":"text","text":"Please
|
||||
explain quantum computing in simple terms suitable for a general audience."}],"referenceTaskIds":[],"role":"user"}}}'
|
||||
headers:
|
||||
User-Agent:
|
||||
- X-USER-AGENT-XXX
|
||||
accept:
|
||||
- '*/*, text/event-stream'
|
||||
accept-encoding:
|
||||
- ACCEPT-ENCODING-XXX
|
||||
cache-control:
|
||||
- no-store
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '408'
|
||||
content-type:
|
||||
- application/json
|
||||
host:
|
||||
- localhost:9999
|
||||
method: POST
|
||||
uri: http://localhost:9999
|
||||
response:
|
||||
body:
|
||||
string: "data: {\"id\":\"f5ff9e1d-598f-4b68-94ed-e7f2b942c467\",\"jsonrpc\":\"2.0\",\"result\":{\"contextId\":\"93c80ac2-eaef-4eeb-af7a-5aaa15f5b9ad\",\"final\":false,\"kind\":\"status-update\",\"status\":{\"state\":\"submitted\"},\"taskId\":\"a4288a18-56d4-4270-ad4a-da1efe6cf84b\"}}\r\n\r\ndata:
|
||||
{\"id\":\"f5ff9e1d-598f-4b68-94ed-e7f2b942c467\",\"jsonrpc\":\"2.0\",\"result\":{\"contextId\":\"93c80ac2-eaef-4eeb-af7a-5aaa15f5b9ad\",\"final\":false,\"kind\":\"status-update\",\"status\":{\"state\":\"working\"},\"taskId\":\"a4288a18-56d4-4270-ad4a-da1efe6cf84b\"}}\r\n\r\ndata:
|
||||
{\"id\":\"f5ff9e1d-598f-4b68-94ed-e7f2b942c467\",\"jsonrpc\":\"2.0\",\"result\":{\"contextId\":\"93c80ac2-eaef-4eeb-af7a-5aaa15f5b9ad\",\"final\":true,\"kind\":\"status-update\",\"status\":{\"message\":{\"kind\":\"message\",\"messageId\":\"c7303248-eebb-48a4-be78-791ac6d11083\",\"parts\":[{\"kind\":\"text\",\"text\":\"Quantum
|
||||
computing is a type of computing that uses the principles of quantum mechanics,
|
||||
which is the science that explains how very small particles, like atoms and
|
||||
photons, behave. Unlike traditional computers that use bits as the smallest
|
||||
unit of data (which can be either 0 or 1), quantum computers use quantum bits
|
||||
or qubits. \\n\\nHere\u2019s a simple breakdown:\\n- **Qubits**: A qubit can
|
||||
be in a state of 0, 1, or both at the same time (thanks to a property called
|
||||
superposition). This allows quantum computers to process a vast amount of
|
||||
possibilities simultaneously.\\n- **Entanglement**: Qubits can also be entangled,
|
||||
meaning the state of one qubit can depend on the state of another, no matter
|
||||
how far apart they are. This creates a powerful link that can improve computing
|
||||
efficiency.\\n- **Quantum Speedup**: Because of superposition and entanglement,
|
||||
quantum computers can solve certain complex problems much faster than traditional
|
||||
computers.\\n\\nIn simple terms, you can think of quantum computing as a new
|
||||
way of doing math and problem-solving that can tackle really difficult tasks
|
||||
much faster than the computers we use today. However, quantum computing is
|
||||
still in its early stages and is not yet widely used for everyday tasks.\"}],\"role\":\"agent\"},\"state\":\"completed\"},\"taskId\":\"a4288a18-56d4-4270-ad4a-da1efe6cf84b\"}}\r\n\r\n"
|
||||
headers:
|
||||
cache-control:
|
||||
- no-store
|
||||
connection:
|
||||
- keep-alive
|
||||
content-type:
|
||||
- text/event-stream; charset=utf-8
|
||||
date:
|
||||
- Fri, 30 Jan 2026 21:34:44 GMT
|
||||
server:
|
||||
- uvicorn
|
||||
transfer-encoding:
|
||||
- chunked
|
||||
x-accel-buffering:
|
||||
- 'no'
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
- request:
|
||||
body: '{"messages":[{"role":"system","content":"You are Research Analyst. Expert
|
||||
researcher with access to remote specialized agents\nYour personal goal is:
|
||||
Find and analyze information about AI developments"},{"role":"user","content":"\nCurrent
|
||||
Task: Delegate to the remote A2A agent to explain quantum computing in simple
|
||||
terms.\n\nProvide your complete response:"}],"model":"gpt-4.1-mini","response_format":{"type":"json_schema","json_schema":{"schema":{"properties":{"a2a_ids":{"description":"A2A
|
||||
agent IDs to delegate to.","items":{"const":"http://localhost:9999/.well-known/agent-card.json","type":"string"},"maxItems":1,"title":"A2A
|
||||
Ids","type":"array"},"message":{"description":"The message content. If is_a2a=true,
|
||||
this is sent to the A2A agent. If is_a2a=false, this is your final answer ending
|
||||
the conversation.","title":"Message","type":"string"},"is_a2a":{"description":"Set
|
||||
to false when the remote agent has answered your question - extract their answer
|
||||
and return it as your final message. Set to true ONLY if you need to ask a NEW,
|
||||
DIFFERENT question. NEVER repeat the same request - if the conversation history
|
||||
shows the agent already answered, set is_a2a=false immediately.","title":"Is
|
||||
A2A","type":"boolean"}},"required":["a2a_ids","message","is_a2a"],"title":"AgentResponse","type":"object","additionalProperties":false},"name":"AgentResponse","strict":true}},"stream":false}'
|
||||
headers:
|
||||
User-Agent:
|
||||
- X-USER-AGENT-XXX
|
||||
accept:
|
||||
- application/json
|
||||
accept-encoding:
|
||||
- ACCEPT-ENCODING-XXX
|
||||
authorization:
|
||||
- AUTHORIZATION-XXX
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '1389'
|
||||
content-type:
|
||||
- application/json
|
||||
cookie:
|
||||
- COOKIE-XXX
|
||||
host:
|
||||
- api.openai.com
|
||||
x-stainless-arch:
|
||||
- X-STAINLESS-ARCH-XXX
|
||||
x-stainless-async:
|
||||
- 'false'
|
||||
x-stainless-helper-method:
|
||||
- beta.chat.completions.parse
|
||||
x-stainless-lang:
|
||||
- python
|
||||
x-stainless-os:
|
||||
- X-STAINLESS-OS-XXX
|
||||
x-stainless-package-version:
|
||||
- 1.83.0
|
||||
x-stainless-read-timeout:
|
||||
- X-STAINLESS-READ-TIMEOUT-XXX
|
||||
x-stainless-retry-count:
|
||||
- '0'
|
||||
x-stainless-runtime:
|
||||
- CPython
|
||||
x-stainless-runtime-version:
|
||||
- 3.12.10
|
||||
method: POST
|
||||
uri: https://api.openai.com/v1/chat/completions
|
||||
response:
|
||||
body:
|
||||
string: "{\n \"id\": \"chatcmpl-D3qQhcQzT9KEetchwHOgQTpBYRw7w\",\n \"object\":
|
||||
\"chat.completion\",\n \"created\": 1769808895,\n \"model\": \"gpt-4.1-mini-2025-04-14\",\n
|
||||
\ \"choices\": [\n {\n \"index\": 0,\n \"message\": {\n \"role\":
|
||||
\"assistant\",\n \"content\": \"{\\\"a2a_ids\\\":[\\\"http://localhost:9999/.well-known/agent-card.json\\\"],\\\"message\\\":\\\"Please
|
||||
explain quantum computing in simple terms suitable for someone without a technical
|
||||
background.\\\",\\\"is_a2a\\\":true}\",\n \"refusal\": null,\n \"annotations\":
|
||||
[]\n },\n \"logprobs\": null,\n \"finish_reason\": \"stop\"\n
|
||||
\ }\n ],\n \"usage\": {\n \"prompt_tokens\": 271,\n \"completion_tokens\":
|
||||
45,\n \"total_tokens\": 316,\n \"prompt_tokens_details\": {\n \"cached_tokens\":
|
||||
0,\n \"audio_tokens\": 0\n },\n \"completion_tokens_details\":
|
||||
{\n \"reasoning_tokens\": 0,\n \"audio_tokens\": 0,\n \"accepted_prediction_tokens\":
|
||||
0,\n \"rejected_prediction_tokens\": 0\n }\n },\n \"service_tier\":
|
||||
\"default\",\n \"system_fingerprint\": \"fp_e01c6f58e1\"\n}\n"
|
||||
headers:
|
||||
CF-RAY:
|
||||
- CF-RAY-XXX
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Type:
|
||||
- application/json
|
||||
Date:
|
||||
- Fri, 30 Jan 2026 21:34:55 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
Strict-Transport-Security:
|
||||
- STS-XXX
|
||||
Transfer-Encoding:
|
||||
- chunked
|
||||
X-Content-Type-Options:
|
||||
- X-CONTENT-TYPE-XXX
|
||||
access-control-expose-headers:
|
||||
- ACCESS-CONTROL-XXX
|
||||
alt-svc:
|
||||
- h3=":443"; ma=86400
|
||||
cf-cache-status:
|
||||
- DYNAMIC
|
||||
openai-organization:
|
||||
- OPENAI-ORG-XXX
|
||||
openai-processing-ms:
|
||||
- '786'
|
||||
openai-project:
|
||||
- OPENAI-PROJECT-XXX
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
x-openai-proxy-wasm:
|
||||
- v0.1
|
||||
x-ratelimit-limit-requests:
|
||||
- X-RATELIMIT-LIMIT-REQUESTS-XXX
|
||||
x-ratelimit-limit-tokens:
|
||||
- X-RATELIMIT-LIMIT-TOKENS-XXX
|
||||
x-ratelimit-remaining-requests:
|
||||
- X-RATELIMIT-REMAINING-REQUESTS-XXX
|
||||
x-ratelimit-remaining-tokens:
|
||||
- X-RATELIMIT-REMAINING-TOKENS-XXX
|
||||
x-ratelimit-reset-requests:
|
||||
- X-RATELIMIT-RESET-REQUESTS-XXX
|
||||
x-ratelimit-reset-tokens:
|
||||
- X-RATELIMIT-RESET-TOKENS-XXX
|
||||
x-request-id:
|
||||
- X-REQUEST-ID-XXX
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
- request:
|
||||
body: '{"id":"10814663-4e33-4777-ac5d-ea9098a690e9","jsonrpc":"2.0","method":"message/stream","params":{"configuration":{"acceptedOutputModes":["application/json"],"blocking":true},"message":{"kind":"message","messageId":"a112c8d7-8cf8-47bc-b84d-a7c8d34326d8","parts":[{"kind":"text","text":"Please
|
||||
explain quantum computing in simple terms suitable for someone without a technical
|
||||
background."}],"referenceTaskIds":[],"role":"user"}}}'
|
||||
headers:
|
||||
User-Agent:
|
||||
- X-USER-AGENT-XXX
|
||||
accept:
|
||||
- '*/*, text/event-stream'
|
||||
accept-encoding:
|
||||
- ACCEPT-ENCODING-XXX
|
||||
cache-control:
|
||||
- no-store
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '428'
|
||||
content-type:
|
||||
- application/json
|
||||
host:
|
||||
- localhost:9999
|
||||
method: POST
|
||||
uri: http://localhost:9999
|
||||
response:
|
||||
body:
|
||||
string: "data: {\"id\":\"10814663-4e33-4777-ac5d-ea9098a690e9\",\"jsonrpc\":\"2.0\",\"result\":{\"contextId\":\"754dce7b-b2d5-426e-aaf8-3b5aa886984a\",\"final\":false,\"kind\":\"status-update\",\"status\":{\"state\":\"submitted\"},\"taskId\":\"2745bfc5-138f-4773-ad96-98d463fcbb3e\"}}\r\n\r\ndata:
|
||||
{\"id\":\"10814663-4e33-4777-ac5d-ea9098a690e9\",\"jsonrpc\":\"2.0\",\"result\":{\"contextId\":\"754dce7b-b2d5-426e-aaf8-3b5aa886984a\",\"final\":false,\"kind\":\"status-update\",\"status\":{\"state\":\"working\"},\"taskId\":\"2745bfc5-138f-4773-ad96-98d463fcbb3e\"}}\r\n\r\ndata:
|
||||
{\"id\":\"10814663-4e33-4777-ac5d-ea9098a690e9\",\"jsonrpc\":\"2.0\",\"result\":{\"contextId\":\"754dce7b-b2d5-426e-aaf8-3b5aa886984a\",\"final\":true,\"kind\":\"status-update\",\"status\":{\"message\":{\"kind\":\"message\",\"messageId\":\"5cfcafcb-5770-4c5a-9724-54dd475a12a1\",\"parts\":[{\"kind\":\"text\",\"text\":\"Quantum
|
||||
computing is a type of computing that uses the principles of quantum mechanics,
|
||||
which is the science that explains how very small particles, like atoms and
|
||||
photons, behave. \\n\\nIn traditional computers, information is processed
|
||||
using bits, which can be either 0 or 1. Think of bits like light switches
|
||||
that can be turned off (0) or on (1). \\n\\nQuantum computers, on the other
|
||||
hand, use quantum bits, or qubits. A qubit can be 0, 1, or both at the same
|
||||
time due to a property called superposition. This means that quantum computers
|
||||
can process a vast amount of information at once compared to regular computers.
|
||||
\\n\\nAnother important concept in quantum computing is entanglement, where
|
||||
qubits become linked in such a way that the state of one qubit can depend
|
||||
on the state of another, no matter how far apart they are. This can lead to
|
||||
faster processing speeds and the ability to solve complex problems more efficiently.
|
||||
\\n\\nIn summary, quantum computing is like a supercharged version of traditional
|
||||
computing that can perform many calculations simultaneously by taking advantage
|
||||
of the quirks of quantum physics.\"}],\"role\":\"agent\"},\"state\":\"completed\"},\"taskId\":\"2745bfc5-138f-4773-ad96-98d463fcbb3e\"}}\r\n\r\n"
|
||||
headers:
|
||||
cache-control:
|
||||
- no-store
|
||||
connection:
|
||||
- keep-alive
|
||||
content-type:
|
||||
- text/event-stream; charset=utf-8
|
||||
date:
|
||||
- Fri, 30 Jan 2026 21:34:54 GMT
|
||||
server:
|
||||
- uvicorn
|
||||
transfer-encoding:
|
||||
- chunked
|
||||
x-accel-buffering:
|
||||
- 'no'
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
- request:
|
||||
body: '{"messages":[{"role":"system","content":"You are Research Analyst. Expert
|
||||
researcher with access to remote specialized agents\nYour personal goal is:
|
||||
Find and analyze information about AI developments"},{"role":"user","content":"\nCurrent
|
||||
Task: Delegate to the remote A2A agent to explain quantum computing in simple
|
||||
terms.\n\nProvide your complete response:"}],"model":"gpt-4.1-mini","response_format":{"type":"json_schema","json_schema":{"schema":{"properties":{"a2a_ids":{"description":"A2A
|
||||
agent IDs to delegate to.","items":{"const":"http://localhost:9999/.well-known/agent-card.json","type":"string"},"maxItems":1,"title":"A2A
|
||||
Ids","type":"array"},"message":{"description":"The message content. If is_a2a=true,
|
||||
this is sent to the A2A agent. If is_a2a=false, this is your final answer ending
|
||||
the conversation.","title":"Message","type":"string"},"is_a2a":{"description":"Set
|
||||
to false when the remote agent has answered your question - extract their answer
|
||||
and return it as your final message. Set to true ONLY if you need to ask a NEW,
|
||||
DIFFERENT question. NEVER repeat the same request - if the conversation history
|
||||
shows the agent already answered, set is_a2a=false immediately.","title":"Is
|
||||
A2A","type":"boolean"}},"required":["a2a_ids","message","is_a2a"],"title":"AgentResponse","type":"object","additionalProperties":false},"name":"AgentResponse","strict":true}},"stream":false}'
|
||||
headers:
|
||||
User-Agent:
|
||||
- X-USER-AGENT-XXX
|
||||
accept:
|
||||
- application/json
|
||||
accept-encoding:
|
||||
- ACCEPT-ENCODING-XXX
|
||||
authorization:
|
||||
- AUTHORIZATION-XXX
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '1389'
|
||||
content-type:
|
||||
- application/json
|
||||
cookie:
|
||||
- COOKIE-XXX
|
||||
host:
|
||||
- api.openai.com
|
||||
x-stainless-arch:
|
||||
- X-STAINLESS-ARCH-XXX
|
||||
x-stainless-async:
|
||||
- 'false'
|
||||
x-stainless-helper-method:
|
||||
- beta.chat.completions.parse
|
||||
x-stainless-lang:
|
||||
- python
|
||||
x-stainless-os:
|
||||
- X-STAINLESS-OS-XXX
|
||||
x-stainless-package-version:
|
||||
- 1.83.0
|
||||
x-stainless-read-timeout:
|
||||
- X-STAINLESS-READ-TIMEOUT-XXX
|
||||
x-stainless-retry-count:
|
||||
- '0'
|
||||
x-stainless-runtime:
|
||||
- CPython
|
||||
x-stainless-runtime-version:
|
||||
- 3.12.10
|
||||
method: POST
|
||||
uri: https://api.openai.com/v1/chat/completions
|
||||
response:
|
||||
body:
|
||||
string: "{\n \"id\": \"chatcmpl-D3qQqDYsO5v1fvanhv4G5kXB0YHwW\",\n \"object\":
|
||||
\"chat.completion\",\n \"created\": 1769808904,\n \"model\": \"gpt-4.1-mini-2025-04-14\",\n
|
||||
\ \"choices\": [\n {\n \"index\": 0,\n \"message\": {\n \"role\":
|
||||
\"assistant\",\n \"content\": \"{\\\"a2a_ids\\\":[\\\"http://localhost:9999/.well-known/agent-card.json\\\"],\\\"message\\\":\\\"Please
|
||||
explain quantum computing in simple terms.\\\",\\\"is_a2a\\\":true}\",\n \"refusal\":
|
||||
null,\n \"annotations\": []\n },\n \"logprobs\": null,\n
|
||||
\ \"finish_reason\": \"stop\"\n }\n ],\n \"usage\": {\n \"prompt_tokens\":
|
||||
271,\n \"completion_tokens\": 38,\n \"total_tokens\": 309,\n \"prompt_tokens_details\":
|
||||
{\n \"cached_tokens\": 0,\n \"audio_tokens\": 0\n },\n \"completion_tokens_details\":
|
||||
{\n \"reasoning_tokens\": 0,\n \"audio_tokens\": 0,\n \"accepted_prediction_tokens\":
|
||||
0,\n \"rejected_prediction_tokens\": 0\n }\n },\n \"service_tier\":
|
||||
\"default\",\n \"system_fingerprint\": \"fp_e01c6f58e1\"\n}\n"
|
||||
headers:
|
||||
CF-RAY:
|
||||
- CF-RAY-XXX
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Type:
|
||||
- application/json
|
||||
Date:
|
||||
- Fri, 30 Jan 2026 21:35:05 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
Strict-Transport-Security:
|
||||
- STS-XXX
|
||||
Transfer-Encoding:
|
||||
- chunked
|
||||
X-Content-Type-Options:
|
||||
- X-CONTENT-TYPE-XXX
|
||||
access-control-expose-headers:
|
||||
- ACCESS-CONTROL-XXX
|
||||
alt-svc:
|
||||
- h3=":443"; ma=86400
|
||||
cf-cache-status:
|
||||
- DYNAMIC
|
||||
openai-organization:
|
||||
- OPENAI-ORG-XXX
|
||||
openai-processing-ms:
|
||||
- '918'
|
||||
openai-project:
|
||||
- OPENAI-PROJECT-XXX
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
x-openai-proxy-wasm:
|
||||
- v0.1
|
||||
x-ratelimit-limit-requests:
|
||||
- X-RATELIMIT-LIMIT-REQUESTS-XXX
|
||||
x-ratelimit-limit-tokens:
|
||||
- X-RATELIMIT-LIMIT-TOKENS-XXX
|
||||
x-ratelimit-remaining-requests:
|
||||
- X-RATELIMIT-REMAINING-REQUESTS-XXX
|
||||
x-ratelimit-remaining-tokens:
|
||||
- X-RATELIMIT-REMAINING-TOKENS-XXX
|
||||
x-ratelimit-reset-requests:
|
||||
- X-RATELIMIT-RESET-REQUESTS-XXX
|
||||
x-ratelimit-reset-tokens:
|
||||
- X-RATELIMIT-RESET-TOKENS-XXX
|
||||
x-request-id:
|
||||
- X-REQUEST-ID-XXX
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
version: 1
|
||||
@@ -0,0 +1,216 @@
|
||||
interactions:
|
||||
- request:
|
||||
body: '{"messages":[{"role":"system","content":"You are Research Analyst. Expert
|
||||
researcher\nYour personal goal is: Find information"},{"role":"user","content":"\nCurrent
|
||||
Task: What is 2 + 2?\n\nProvide your complete response:"}],"model":"gpt-4.1-mini"}'
|
||||
headers:
|
||||
User-Agent:
|
||||
- X-USER-AGENT-XXX
|
||||
accept:
|
||||
- application/json
|
||||
accept-encoding:
|
||||
- ACCEPT-ENCODING-XXX
|
||||
authorization:
|
||||
- AUTHORIZATION-XXX
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '246'
|
||||
content-type:
|
||||
- application/json
|
||||
host:
|
||||
- api.openai.com
|
||||
x-stainless-arch:
|
||||
- X-STAINLESS-ARCH-XXX
|
||||
x-stainless-async:
|
||||
- 'false'
|
||||
x-stainless-lang:
|
||||
- python
|
||||
x-stainless-os:
|
||||
- X-STAINLESS-OS-XXX
|
||||
x-stainless-package-version:
|
||||
- 1.83.0
|
||||
x-stainless-read-timeout:
|
||||
- X-STAINLESS-READ-TIMEOUT-XXX
|
||||
x-stainless-retry-count:
|
||||
- '0'
|
||||
x-stainless-runtime:
|
||||
- CPython
|
||||
x-stainless-runtime-version:
|
||||
- 3.13.3
|
||||
method: POST
|
||||
uri: https://api.openai.com/v1/chat/completions
|
||||
response:
|
||||
body:
|
||||
string: "{\n \"id\": \"chatcmpl-D8WiGEDTbwLcrRjnvxgSpt9XISVwN\",\n \"object\":
|
||||
\"chat.completion\",\n \"created\": 1770924744,\n \"model\": \"gpt-4.1-mini-2025-04-14\",\n
|
||||
\ \"choices\": [\n {\n \"index\": 0,\n \"message\": {\n \"role\":
|
||||
\"assistant\",\n \"content\": \"The sum of 2 + 2 is 4.\",\n \"refusal\":
|
||||
null,\n \"annotations\": []\n },\n \"logprobs\": null,\n
|
||||
\ \"finish_reason\": \"stop\"\n }\n ],\n \"usage\": {\n \"prompt_tokens\":
|
||||
43,\n \"completion_tokens\": 12,\n \"total_tokens\": 55,\n \"prompt_tokens_details\":
|
||||
{\n \"cached_tokens\": 0,\n \"audio_tokens\": 0\n },\n \"completion_tokens_details\":
|
||||
{\n \"reasoning_tokens\": 0,\n \"audio_tokens\": 0,\n \"accepted_prediction_tokens\":
|
||||
0,\n \"rejected_prediction_tokens\": 0\n }\n },\n \"service_tier\":
|
||||
\"default\",\n \"system_fingerprint\": \"fp_75546bd1a7\"\n}\n"
|
||||
headers:
|
||||
CF-RAY:
|
||||
- CF-RAY-XXX
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Type:
|
||||
- application/json
|
||||
Date:
|
||||
- Thu, 12 Feb 2026 19:32:25 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
Strict-Transport-Security:
|
||||
- STS-XXX
|
||||
Transfer-Encoding:
|
||||
- chunked
|
||||
X-Content-Type-Options:
|
||||
- X-CONTENT-TYPE-XXX
|
||||
access-control-expose-headers:
|
||||
- ACCESS-CONTROL-XXX
|
||||
alt-svc:
|
||||
- h3=":443"; ma=86400
|
||||
cf-cache-status:
|
||||
- DYNAMIC
|
||||
openai-organization:
|
||||
- OPENAI-ORG-XXX
|
||||
openai-processing-ms:
|
||||
- '988'
|
||||
openai-project:
|
||||
- OPENAI-PROJECT-XXX
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
set-cookie:
|
||||
- SET-COOKIE-XXX
|
||||
x-openai-proxy-wasm:
|
||||
- v0.1
|
||||
x-ratelimit-limit-requests:
|
||||
- X-RATELIMIT-LIMIT-REQUESTS-XXX
|
||||
x-ratelimit-limit-tokens:
|
||||
- X-RATELIMIT-LIMIT-TOKENS-XXX
|
||||
x-ratelimit-remaining-requests:
|
||||
- X-RATELIMIT-REMAINING-REQUESTS-XXX
|
||||
x-ratelimit-remaining-tokens:
|
||||
- X-RATELIMIT-REMAINING-TOKENS-XXX
|
||||
x-ratelimit-reset-requests:
|
||||
- X-RATELIMIT-RESET-REQUESTS-XXX
|
||||
x-ratelimit-reset-tokens:
|
||||
- X-RATELIMIT-RESET-TOKENS-XXX
|
||||
x-request-id:
|
||||
- X-REQUEST-ID-XXX
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
- request:
|
||||
body: '{"messages":[{"role":"system","content":"You are Research Analyst. Expert
|
||||
researcher\nYour personal goal is: Find information"},{"role":"user","content":"\nCurrent
|
||||
Task: What is 2 + 2?\n\nProvide your complete response:"}],"model":"gpt-4.1-mini"}'
|
||||
headers:
|
||||
User-Agent:
|
||||
- X-USER-AGENT-XXX
|
||||
accept:
|
||||
- application/json
|
||||
accept-encoding:
|
||||
- ACCEPT-ENCODING-XXX
|
||||
authorization:
|
||||
- AUTHORIZATION-XXX
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '246'
|
||||
content-type:
|
||||
- application/json
|
||||
cookie:
|
||||
- COOKIE-XXX
|
||||
host:
|
||||
- api.openai.com
|
||||
x-stainless-arch:
|
||||
- X-STAINLESS-ARCH-XXX
|
||||
x-stainless-async:
|
||||
- 'false'
|
||||
x-stainless-lang:
|
||||
- python
|
||||
x-stainless-os:
|
||||
- X-STAINLESS-OS-XXX
|
||||
x-stainless-package-version:
|
||||
- 1.83.0
|
||||
x-stainless-read-timeout:
|
||||
- X-STAINLESS-READ-TIMEOUT-XXX
|
||||
x-stainless-retry-count:
|
||||
- '0'
|
||||
x-stainless-runtime:
|
||||
- CPython
|
||||
x-stainless-runtime-version:
|
||||
- 3.13.3
|
||||
method: POST
|
||||
uri: https://api.openai.com/v1/chat/completions
|
||||
response:
|
||||
body:
|
||||
string: "{\n \"id\": \"chatcmpl-D8WiHquzE7A8dBalX3phbPaOSXEnQ\",\n \"object\":
|
||||
\"chat.completion\",\n \"created\": 1770924745,\n \"model\": \"gpt-4.1-mini-2025-04-14\",\n
|
||||
\ \"choices\": [\n {\n \"index\": 0,\n \"message\": {\n \"role\":
|
||||
\"assistant\",\n \"content\": \"The sum of 2 + 2 is 4.\",\n \"refusal\":
|
||||
null,\n \"annotations\": []\n },\n \"logprobs\": null,\n
|
||||
\ \"finish_reason\": \"stop\"\n }\n ],\n \"usage\": {\n \"prompt_tokens\":
|
||||
43,\n \"completion_tokens\": 12,\n \"total_tokens\": 55,\n \"prompt_tokens_details\":
|
||||
{\n \"cached_tokens\": 0,\n \"audio_tokens\": 0\n },\n \"completion_tokens_details\":
|
||||
{\n \"reasoning_tokens\": 0,\n \"audio_tokens\": 0,\n \"accepted_prediction_tokens\":
|
||||
0,\n \"rejected_prediction_tokens\": 0\n }\n },\n \"service_tier\":
|
||||
\"default\",\n \"system_fingerprint\": \"fp_75546bd1a7\"\n}\n"
|
||||
headers:
|
||||
CF-RAY:
|
||||
- CF-RAY-XXX
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Type:
|
||||
- application/json
|
||||
Date:
|
||||
- Thu, 12 Feb 2026 19:32:26 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
Strict-Transport-Security:
|
||||
- STS-XXX
|
||||
Transfer-Encoding:
|
||||
- chunked
|
||||
X-Content-Type-Options:
|
||||
- X-CONTENT-TYPE-XXX
|
||||
access-control-expose-headers:
|
||||
- ACCESS-CONTROL-XXX
|
||||
alt-svc:
|
||||
- h3=":443"; ma=86400
|
||||
cf-cache-status:
|
||||
- DYNAMIC
|
||||
openai-organization:
|
||||
- OPENAI-ORG-XXX
|
||||
openai-processing-ms:
|
||||
- '415'
|
||||
openai-project:
|
||||
- OPENAI-PROJECT-XXX
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
set-cookie:
|
||||
- SET-COOKIE-XXX
|
||||
x-openai-proxy-wasm:
|
||||
- v0.1
|
||||
x-ratelimit-limit-requests:
|
||||
- X-RATELIMIT-LIMIT-REQUESTS-XXX
|
||||
x-ratelimit-limit-tokens:
|
||||
- X-RATELIMIT-LIMIT-TOKENS-XXX
|
||||
x-ratelimit-remaining-requests:
|
||||
- X-RATELIMIT-REMAINING-REQUESTS-XXX
|
||||
x-ratelimit-remaining-tokens:
|
||||
- X-RATELIMIT-REMAINING-TOKENS-XXX
|
||||
x-ratelimit-reset-requests:
|
||||
- X-RATELIMIT-RESET-REQUESTS-XXX
|
||||
x-ratelimit-reset-tokens:
|
||||
- X-RATELIMIT-RESET-TOKENS-XXX
|
||||
x-request-id:
|
||||
- X-REQUEST-ID-XXX
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
version: 1
|
||||
@@ -0,0 +1,623 @@
|
||||
interactions:
|
||||
- request:
|
||||
body: '{"messages":[{"role":"system","content":"You are Research Analyst. Expert
|
||||
researcher with access to remote specialized agents\nYour personal goal is:
|
||||
Find and analyze information about AI developments"},{"role":"user","content":"\nCurrent
|
||||
Task: Delegate to the A2A agent to find the current time in Tokyo.\n\nProvide
|
||||
your complete response:"}],"model":"gpt-4.1-mini","response_format":{"type":"json_schema","json_schema":{"schema":{"properties":{"a2a_ids":{"description":"A2A
|
||||
agent IDs to delegate to.","items":{"const":"http://localhost:9999/.well-known/agent-card.json","type":"string"},"maxItems":1,"title":"A2A
|
||||
Ids","type":"array"},"message":{"description":"The message content. If is_a2a=true,
|
||||
this is sent to the A2A agent. If is_a2a=false, this is your final answer ending
|
||||
the conversation.","title":"Message","type":"string"},"is_a2a":{"description":"Set
|
||||
to false when the remote agent has answered your question - extract their answer
|
||||
and return it as your final message. Set to true ONLY if you need to ask a NEW,
|
||||
DIFFERENT question. NEVER repeat the same request - if the conversation history
|
||||
shows the agent already answered, set is_a2a=false immediately.","title":"Is
|
||||
A2A","type":"boolean"}},"required":["a2a_ids","message","is_a2a"],"title":"AgentResponse","type":"object","additionalProperties":false},"name":"AgentResponse","strict":true}},"stream":false}'
|
||||
headers:
|
||||
User-Agent:
|
||||
- X-USER-AGENT-XXX
|
||||
accept:
|
||||
- application/json
|
||||
accept-encoding:
|
||||
- ACCEPT-ENCODING-XXX
|
||||
authorization:
|
||||
- AUTHORIZATION-XXX
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '1371'
|
||||
content-type:
|
||||
- application/json
|
||||
host:
|
||||
- api.openai.com
|
||||
x-stainless-arch:
|
||||
- X-STAINLESS-ARCH-XXX
|
||||
x-stainless-async:
|
||||
- 'false'
|
||||
x-stainless-helper-method:
|
||||
- beta.chat.completions.parse
|
||||
x-stainless-lang:
|
||||
- python
|
||||
x-stainless-os:
|
||||
- X-STAINLESS-OS-XXX
|
||||
x-stainless-package-version:
|
||||
- 1.83.0
|
||||
x-stainless-read-timeout:
|
||||
- X-STAINLESS-READ-TIMEOUT-XXX
|
||||
x-stainless-retry-count:
|
||||
- '0'
|
||||
x-stainless-runtime:
|
||||
- CPython
|
||||
x-stainless-runtime-version:
|
||||
- 3.12.10
|
||||
method: POST
|
||||
uri: https://api.openai.com/v1/chat/completions
|
||||
response:
|
||||
body:
|
||||
string: "{\n \"id\": \"chatcmpl-D3qQ7mKtA8Q7mQ8dWafrjQAtMBV7G\",\n \"object\":
|
||||
\"chat.completion\",\n \"created\": 1769808859,\n \"model\": \"gpt-4.1-mini-2025-04-14\",\n
|
||||
\ \"choices\": [\n {\n \"index\": 0,\n \"message\": {\n \"role\":
|
||||
\"assistant\",\n \"content\": \"{\\\"a2a_ids\\\":[\\\"http://localhost:9999/.well-known/agent-card.json\\\"],\\\"message\\\":\\\"What
|
||||
is the current time in Tokyo?\\\",\\\"is_a2a\\\":true}\",\n \"refusal\":
|
||||
null,\n \"annotations\": []\n },\n \"logprobs\": null,\n
|
||||
\ \"finish_reason\": \"stop\"\n }\n ],\n \"usage\": {\n \"prompt_tokens\":
|
||||
270,\n \"completion_tokens\": 39,\n \"total_tokens\": 309,\n \"prompt_tokens_details\":
|
||||
{\n \"cached_tokens\": 0,\n \"audio_tokens\": 0\n },\n \"completion_tokens_details\":
|
||||
{\n \"reasoning_tokens\": 0,\n \"audio_tokens\": 0,\n \"accepted_prediction_tokens\":
|
||||
0,\n \"rejected_prediction_tokens\": 0\n }\n },\n \"service_tier\":
|
||||
\"default\",\n \"system_fingerprint\": \"fp_e01c6f58e1\"\n}\n"
|
||||
headers:
|
||||
CF-RAY:
|
||||
- CF-RAY-XXX
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Type:
|
||||
- application/json
|
||||
Date:
|
||||
- Fri, 30 Jan 2026 21:34:20 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
Set-Cookie:
|
||||
- SET-COOKIE-XXX
|
||||
Strict-Transport-Security:
|
||||
- STS-XXX
|
||||
Transfer-Encoding:
|
||||
- chunked
|
||||
X-Content-Type-Options:
|
||||
- X-CONTENT-TYPE-XXX
|
||||
access-control-expose-headers:
|
||||
- ACCESS-CONTROL-XXX
|
||||
alt-svc:
|
||||
- h3=":443"; ma=86400
|
||||
cf-cache-status:
|
||||
- DYNAMIC
|
||||
openai-organization:
|
||||
- OPENAI-ORG-XXX
|
||||
openai-processing-ms:
|
||||
- '1100'
|
||||
openai-project:
|
||||
- OPENAI-PROJECT-XXX
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
x-openai-proxy-wasm:
|
||||
- v0.1
|
||||
x-ratelimit-limit-requests:
|
||||
- X-RATELIMIT-LIMIT-REQUESTS-XXX
|
||||
x-ratelimit-limit-tokens:
|
||||
- X-RATELIMIT-LIMIT-TOKENS-XXX
|
||||
x-ratelimit-remaining-requests:
|
||||
- X-RATELIMIT-REMAINING-REQUESTS-XXX
|
||||
x-ratelimit-remaining-tokens:
|
||||
- X-RATELIMIT-REMAINING-TOKENS-XXX
|
||||
x-ratelimit-reset-requests:
|
||||
- X-RATELIMIT-RESET-REQUESTS-XXX
|
||||
x-ratelimit-reset-tokens:
|
||||
- X-RATELIMIT-RESET-TOKENS-XXX
|
||||
x-request-id:
|
||||
- X-REQUEST-ID-XXX
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
- request:
|
||||
body: '{"id":"ef23c1ef-ef7e-4422-8fd0-330f074e5de8","jsonrpc":"2.0","method":"message/stream","params":{"configuration":{"acceptedOutputModes":["application/json"],"blocking":true},"message":{"kind":"message","messageId":"f5952732-b12e-40f9-a9e9-9779501d6467","parts":[{"kind":"text","text":"What
|
||||
is the current time in Tokyo?"}],"referenceTaskIds":[],"role":"user"}}}'
|
||||
headers:
|
||||
User-Agent:
|
||||
- X-USER-AGENT-XXX
|
||||
accept:
|
||||
- '*/*, text/event-stream'
|
||||
accept-encoding:
|
||||
- ACCEPT-ENCODING-XXX
|
||||
cache-control:
|
||||
- no-store
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '361'
|
||||
content-type:
|
||||
- application/json
|
||||
host:
|
||||
- localhost:9999
|
||||
method: POST
|
||||
uri: http://localhost:9999
|
||||
response:
|
||||
body:
|
||||
string: "data: {\"id\":\"ef23c1ef-ef7e-4422-8fd0-330f074e5de8\",\"jsonrpc\":\"2.0\",\"result\":{\"contextId\":\"913b5bd8-c755-4e0c-bbd0-fcf11fcdf257\",\"final\":false,\"kind\":\"status-update\",\"status\":{\"state\":\"submitted\"},\"taskId\":\"eb05446a-6d96-4964-8e94-3f4ffcc612a7\"}}\r\n\r\ndata:
|
||||
{\"id\":\"ef23c1ef-ef7e-4422-8fd0-330f074e5de8\",\"jsonrpc\":\"2.0\",\"result\":{\"contextId\":\"913b5bd8-c755-4e0c-bbd0-fcf11fcdf257\",\"final\":false,\"kind\":\"status-update\",\"status\":{\"state\":\"working\"},\"taskId\":\"eb05446a-6d96-4964-8e94-3f4ffcc612a7\"}}\r\n\r\ndata:
|
||||
{\"id\":\"ef23c1ef-ef7e-4422-8fd0-330f074e5de8\",\"jsonrpc\":\"2.0\",\"result\":{\"contextId\":\"913b5bd8-c755-4e0c-bbd0-fcf11fcdf257\",\"final\":true,\"kind\":\"status-update\",\"status\":{\"message\":{\"kind\":\"message\",\"messageId\":\"41813b78-0e9a-41a4-bb6c-7176f9d4e5b2\",\"parts\":[{\"kind\":\"text\",\"text\":\"[Tool:
|
||||
get_time] 2026-01-31 06:34:21 JST (Asia/Tokyo)\\nThe current time in Tokyo
|
||||
is 06:34:21 JST on January 31, 2026.\"}],\"role\":\"agent\"},\"state\":\"completed\"},\"taskId\":\"eb05446a-6d96-4964-8e94-3f4ffcc612a7\"}}\r\n\r\n"
|
||||
headers:
|
||||
cache-control:
|
||||
- no-store
|
||||
connection:
|
||||
- keep-alive
|
||||
content-type:
|
||||
- text/event-stream; charset=utf-8
|
||||
date:
|
||||
- Fri, 30 Jan 2026 21:34:20 GMT
|
||||
server:
|
||||
- uvicorn
|
||||
transfer-encoding:
|
||||
- chunked
|
||||
x-accel-buffering:
|
||||
- 'no'
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
- request:
|
||||
body: '{"messages":[{"role":"system","content":"You are Research Analyst. Expert
|
||||
researcher with access to remote specialized agents\nYour personal goal is:
|
||||
Find and analyze information about AI developments"},{"role":"user","content":"\nCurrent
|
||||
Task: Delegate to the A2A agent to find the current time in Tokyo.\n\nProvide
|
||||
your complete response:"}],"model":"gpt-4.1-mini","response_format":{"type":"json_schema","json_schema":{"schema":{"properties":{"a2a_ids":{"description":"A2A
|
||||
agent IDs to delegate to.","items":{"const":"http://localhost:9999/.well-known/agent-card.json","type":"string"},"maxItems":1,"title":"A2A
|
||||
Ids","type":"array"},"message":{"description":"The message content. If is_a2a=true,
|
||||
this is sent to the A2A agent. If is_a2a=false, this is your final answer ending
|
||||
the conversation.","title":"Message","type":"string"},"is_a2a":{"description":"Set
|
||||
to false when the remote agent has answered your question - extract their answer
|
||||
and return it as your final message. Set to true ONLY if you need to ask a NEW,
|
||||
DIFFERENT question. NEVER repeat the same request - if the conversation history
|
||||
shows the agent already answered, set is_a2a=false immediately.","title":"Is
|
||||
A2A","type":"boolean"}},"required":["a2a_ids","message","is_a2a"],"title":"AgentResponse","type":"object","additionalProperties":false},"name":"AgentResponse","strict":true}},"stream":false}'
|
||||
headers:
|
||||
User-Agent:
|
||||
- X-USER-AGENT-XXX
|
||||
accept:
|
||||
- application/json
|
||||
accept-encoding:
|
||||
- ACCEPT-ENCODING-XXX
|
||||
authorization:
|
||||
- AUTHORIZATION-XXX
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '1371'
|
||||
content-type:
|
||||
- application/json
|
||||
cookie:
|
||||
- COOKIE-XXX
|
||||
host:
|
||||
- api.openai.com
|
||||
x-stainless-arch:
|
||||
- X-STAINLESS-ARCH-XXX
|
||||
x-stainless-async:
|
||||
- 'false'
|
||||
x-stainless-helper-method:
|
||||
- beta.chat.completions.parse
|
||||
x-stainless-lang:
|
||||
- python
|
||||
x-stainless-os:
|
||||
- X-STAINLESS-OS-XXX
|
||||
x-stainless-package-version:
|
||||
- 1.83.0
|
||||
x-stainless-read-timeout:
|
||||
- X-STAINLESS-READ-TIMEOUT-XXX
|
||||
x-stainless-retry-count:
|
||||
- '0'
|
||||
x-stainless-runtime:
|
||||
- CPython
|
||||
x-stainless-runtime-version:
|
||||
- 3.12.10
|
||||
method: POST
|
||||
uri: https://api.openai.com/v1/chat/completions
|
||||
response:
|
||||
body:
|
||||
string: "{\n \"id\": \"chatcmpl-D3qQBrUkvfrIi9SeEtGbWxFJjum3o\",\n \"object\":
|
||||
\"chat.completion\",\n \"created\": 1769808863,\n \"model\": \"gpt-4.1-mini-2025-04-14\",\n
|
||||
\ \"choices\": [\n {\n \"index\": 0,\n \"message\": {\n \"role\":
|
||||
\"assistant\",\n \"content\": \"{\\\"a2a_ids\\\":[\\\"http://localhost:9999/.well-known/agent-card.json\\\"],\\\"message\\\":\\\"Please
|
||||
find the current time in Tokyo and provide it.\\\",\\\"is_a2a\\\":true}\",\n
|
||||
\ \"refusal\": null,\n \"annotations\": []\n },\n \"logprobs\":
|
||||
null,\n \"finish_reason\": \"stop\"\n }\n ],\n \"usage\": {\n \"prompt_tokens\":
|
||||
270,\n \"completion_tokens\": 41,\n \"total_tokens\": 311,\n \"prompt_tokens_details\":
|
||||
{\n \"cached_tokens\": 0,\n \"audio_tokens\": 0\n },\n \"completion_tokens_details\":
|
||||
{\n \"reasoning_tokens\": 0,\n \"audio_tokens\": 0,\n \"accepted_prediction_tokens\":
|
||||
0,\n \"rejected_prediction_tokens\": 0\n }\n },\n \"service_tier\":
|
||||
\"default\",\n \"system_fingerprint\": \"fp_e01c6f58e1\"\n}\n"
|
||||
headers:
|
||||
CF-RAY:
|
||||
- CF-RAY-XXX
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Type:
|
||||
- application/json
|
||||
Date:
|
||||
- Fri, 30 Jan 2026 21:34:24 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
Strict-Transport-Security:
|
||||
- STS-XXX
|
||||
Transfer-Encoding:
|
||||
- chunked
|
||||
X-Content-Type-Options:
|
||||
- X-CONTENT-TYPE-XXX
|
||||
access-control-expose-headers:
|
||||
- ACCESS-CONTROL-XXX
|
||||
alt-svc:
|
||||
- h3=":443"; ma=86400
|
||||
cf-cache-status:
|
||||
- DYNAMIC
|
||||
openai-organization:
|
||||
- OPENAI-ORG-XXX
|
||||
openai-processing-ms:
|
||||
- '966'
|
||||
openai-project:
|
||||
- OPENAI-PROJECT-XXX
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
x-openai-proxy-wasm:
|
||||
- v0.1
|
||||
x-ratelimit-limit-requests:
|
||||
- X-RATELIMIT-LIMIT-REQUESTS-XXX
|
||||
x-ratelimit-limit-tokens:
|
||||
- X-RATELIMIT-LIMIT-TOKENS-XXX
|
||||
x-ratelimit-remaining-requests:
|
||||
- X-RATELIMIT-REMAINING-REQUESTS-XXX
|
||||
x-ratelimit-remaining-tokens:
|
||||
- X-RATELIMIT-REMAINING-TOKENS-XXX
|
||||
x-ratelimit-reset-requests:
|
||||
- X-RATELIMIT-RESET-REQUESTS-XXX
|
||||
x-ratelimit-reset-tokens:
|
||||
- X-RATELIMIT-RESET-TOKENS-XXX
|
||||
x-request-id:
|
||||
- X-REQUEST-ID-XXX
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
- request:
|
||||
body: '{"id":"dc04364b-9ad9-4ba4-81a8-bae8a71bee8c","jsonrpc":"2.0","method":"message/stream","params":{"configuration":{"acceptedOutputModes":["application/json"],"blocking":true},"message":{"kind":"message","messageId":"781be5cc-2c77-4c24-a0af-c9d30609ecd7","parts":[{"kind":"text","text":"Please
|
||||
find the current time in Tokyo and provide it."}],"referenceTaskIds":[],"role":"user"}}}'
|
||||
headers:
|
||||
User-Agent:
|
||||
- X-USER-AGENT-XXX
|
||||
accept:
|
||||
- '*/*, text/event-stream'
|
||||
accept-encoding:
|
||||
- ACCEPT-ENCODING-XXX
|
||||
cache-control:
|
||||
- no-store
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '380'
|
||||
content-type:
|
||||
- application/json
|
||||
host:
|
||||
- localhost:9999
|
||||
method: POST
|
||||
uri: http://localhost:9999
|
||||
response:
|
||||
body:
|
||||
string: "data: {\"id\":\"dc04364b-9ad9-4ba4-81a8-bae8a71bee8c\",\"jsonrpc\":\"2.0\",\"result\":{\"contextId\":\"ae6fa54d-f6b0-4c15-b8b0-eb310731bc1a\",\"final\":false,\"kind\":\"status-update\",\"status\":{\"state\":\"submitted\"},\"taskId\":\"0bda253d-b860-4bd4-9129-691b886e4f8b\"}}\r\n\r\ndata:
|
||||
{\"id\":\"dc04364b-9ad9-4ba4-81a8-bae8a71bee8c\",\"jsonrpc\":\"2.0\",\"result\":{\"contextId\":\"ae6fa54d-f6b0-4c15-b8b0-eb310731bc1a\",\"final\":false,\"kind\":\"status-update\",\"status\":{\"state\":\"working\"},\"taskId\":\"0bda253d-b860-4bd4-9129-691b886e4f8b\"}}\r\n\r\ndata:
|
||||
{\"id\":\"dc04364b-9ad9-4ba4-81a8-bae8a71bee8c\",\"jsonrpc\":\"2.0\",\"result\":{\"contextId\":\"ae6fa54d-f6b0-4c15-b8b0-eb310731bc1a\",\"final\":true,\"kind\":\"status-update\",\"status\":{\"message\":{\"kind\":\"message\",\"messageId\":\"82eec93e-3c91-44c8-9acb-343454eb8fb8\",\"parts\":[{\"kind\":\"text\",\"text\":\"[Tool:
|
||||
get_time] 2026-01-31 06:34:25 JST (Asia/Tokyo)\\nThe current time in Tokyo
|
||||
is 06:34 AM on January 31, 2026.\"}],\"role\":\"agent\"},\"state\":\"completed\"},\"taskId\":\"0bda253d-b860-4bd4-9129-691b886e4f8b\"}}\r\n\r\n"
|
||||
headers:
|
||||
cache-control:
|
||||
- no-store
|
||||
connection:
|
||||
- keep-alive
|
||||
content-type:
|
||||
- text/event-stream; charset=utf-8
|
||||
date:
|
||||
- Fri, 30 Jan 2026 21:34:24 GMT
|
||||
server:
|
||||
- uvicorn
|
||||
transfer-encoding:
|
||||
- chunked
|
||||
x-accel-buffering:
|
||||
- 'no'
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
- request:
|
||||
body: '{"messages":[{"role":"system","content":"You are Research Analyst. Expert
|
||||
researcher with access to remote specialized agents\nYour personal goal is:
|
||||
Find and analyze information about AI developments"},{"role":"user","content":"\nCurrent
|
||||
Task: Delegate to the A2A agent to find the current time in Tokyo.\n\nProvide
|
||||
your complete response:"}],"model":"gpt-4.1-mini","response_format":{"type":"json_schema","json_schema":{"schema":{"properties":{"a2a_ids":{"description":"A2A
|
||||
agent IDs to delegate to.","items":{"const":"http://localhost:9999/.well-known/agent-card.json","type":"string"},"maxItems":1,"title":"A2A
|
||||
Ids","type":"array"},"message":{"description":"The message content. If is_a2a=true,
|
||||
this is sent to the A2A agent. If is_a2a=false, this is your final answer ending
|
||||
the conversation.","title":"Message","type":"string"},"is_a2a":{"description":"Set
|
||||
to false when the remote agent has answered your question - extract their answer
|
||||
and return it as your final message. Set to true ONLY if you need to ask a NEW,
|
||||
DIFFERENT question. NEVER repeat the same request - if the conversation history
|
||||
shows the agent already answered, set is_a2a=false immediately.","title":"Is
|
||||
A2A","type":"boolean"}},"required":["a2a_ids","message","is_a2a"],"title":"AgentResponse","type":"object","additionalProperties":false},"name":"AgentResponse","strict":true}},"stream":false}'
|
||||
headers:
|
||||
User-Agent:
|
||||
- X-USER-AGENT-XXX
|
||||
accept:
|
||||
- application/json
|
||||
accept-encoding:
|
||||
- ACCEPT-ENCODING-XXX
|
||||
authorization:
|
||||
- AUTHORIZATION-XXX
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '1371'
|
||||
content-type:
|
||||
- application/json
|
||||
cookie:
|
||||
- COOKIE-XXX
|
||||
host:
|
||||
- api.openai.com
|
||||
x-stainless-arch:
|
||||
- X-STAINLESS-ARCH-XXX
|
||||
x-stainless-async:
|
||||
- 'false'
|
||||
x-stainless-helper-method:
|
||||
- beta.chat.completions.parse
|
||||
x-stainless-lang:
|
||||
- python
|
||||
x-stainless-os:
|
||||
- X-STAINLESS-OS-XXX
|
||||
x-stainless-package-version:
|
||||
- 1.83.0
|
||||
x-stainless-read-timeout:
|
||||
- X-STAINLESS-READ-TIMEOUT-XXX
|
||||
x-stainless-retry-count:
|
||||
- '0'
|
||||
x-stainless-runtime:
|
||||
- CPython
|
||||
x-stainless-runtime-version:
|
||||
- 3.12.10
|
||||
method: POST
|
||||
uri: https://api.openai.com/v1/chat/completions
|
||||
response:
|
||||
body:
|
||||
string: "{\n \"id\": \"chatcmpl-D3qQGaNP21E5XkyFMtOtKogiqwKnN\",\n \"object\":
|
||||
\"chat.completion\",\n \"created\": 1769808868,\n \"model\": \"gpt-4.1-mini-2025-04-14\",\n
|
||||
\ \"choices\": [\n {\n \"index\": 0,\n \"message\": {\n \"role\":
|
||||
\"assistant\",\n \"content\": \"{\\\"a2a_ids\\\":[\\\"http://localhost:9999/.well-known/agent-card.json\\\"],\\\"message\\\":\\\"What
|
||||
is the current time in Tokyo? Please provide the current local time there.\\\",\\\"is_a2a\\\":true}\",\n
|
||||
\ \"refusal\": null,\n \"annotations\": []\n },\n \"logprobs\":
|
||||
null,\n \"finish_reason\": \"stop\"\n }\n ],\n \"usage\": {\n \"prompt_tokens\":
|
||||
270,\n \"completion_tokens\": 46,\n \"total_tokens\": 316,\n \"prompt_tokens_details\":
|
||||
{\n \"cached_tokens\": 0,\n \"audio_tokens\": 0\n },\n \"completion_tokens_details\":
|
||||
{\n \"reasoning_tokens\": 0,\n \"audio_tokens\": 0,\n \"accepted_prediction_tokens\":
|
||||
0,\n \"rejected_prediction_tokens\": 0\n }\n },\n \"service_tier\":
|
||||
\"default\",\n \"system_fingerprint\": \"fp_e01c6f58e1\"\n}\n"
|
||||
headers:
|
||||
CF-RAY:
|
||||
- CF-RAY-XXX
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Type:
|
||||
- application/json
|
||||
Date:
|
||||
- Fri, 30 Jan 2026 21:34:29 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
Strict-Transport-Security:
|
||||
- STS-XXX
|
||||
Transfer-Encoding:
|
||||
- chunked
|
||||
X-Content-Type-Options:
|
||||
- X-CONTENT-TYPE-XXX
|
||||
access-control-expose-headers:
|
||||
- ACCESS-CONTROL-XXX
|
||||
alt-svc:
|
||||
- h3=":443"; ma=86400
|
||||
cf-cache-status:
|
||||
- DYNAMIC
|
||||
openai-organization:
|
||||
- OPENAI-ORG-XXX
|
||||
openai-processing-ms:
|
||||
- '786'
|
||||
openai-project:
|
||||
- OPENAI-PROJECT-XXX
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
x-openai-proxy-wasm:
|
||||
- v0.1
|
||||
x-ratelimit-limit-requests:
|
||||
- X-RATELIMIT-LIMIT-REQUESTS-XXX
|
||||
x-ratelimit-limit-tokens:
|
||||
- X-RATELIMIT-LIMIT-TOKENS-XXX
|
||||
x-ratelimit-remaining-requests:
|
||||
- X-RATELIMIT-REMAINING-REQUESTS-XXX
|
||||
x-ratelimit-remaining-tokens:
|
||||
- X-RATELIMIT-REMAINING-TOKENS-XXX
|
||||
x-ratelimit-reset-requests:
|
||||
- X-RATELIMIT-RESET-REQUESTS-XXX
|
||||
x-ratelimit-reset-tokens:
|
||||
- X-RATELIMIT-RESET-TOKENS-XXX
|
||||
x-request-id:
|
||||
- X-REQUEST-ID-XXX
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
- request:
|
||||
body: '{"id":"06a22a9f-0969-414f-a17b-e3e9d4a3bbef","jsonrpc":"2.0","method":"message/stream","params":{"configuration":{"acceptedOutputModes":["application/json"],"blocking":true},"message":{"kind":"message","messageId":"c2ab08c5-ffb1-43e9-9f43-6e8adbaba95b","parts":[{"kind":"text","text":"What
|
||||
is the current time in Tokyo? Please provide the current local time there."}],"referenceTaskIds":[],"role":"user"}}}'
|
||||
headers:
|
||||
User-Agent:
|
||||
- X-USER-AGENT-XXX
|
||||
accept:
|
||||
- '*/*, text/event-stream'
|
||||
accept-encoding:
|
||||
- ACCEPT-ENCODING-XXX
|
||||
cache-control:
|
||||
- no-store
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '406'
|
||||
content-type:
|
||||
- application/json
|
||||
host:
|
||||
- localhost:9999
|
||||
method: POST
|
||||
uri: http://localhost:9999
|
||||
response:
|
||||
body:
|
||||
string: "data: {\"id\":\"06a22a9f-0969-414f-a17b-e3e9d4a3bbef\",\"jsonrpc\":\"2.0\",\"result\":{\"contextId\":\"f86582e4-7d3f-448e-ab51-3522667da9a0\",\"final\":false,\"kind\":\"status-update\",\"status\":{\"state\":\"submitted\"},\"taskId\":\"5b92d2cb-100e-4be5-ba4d-39b320c3dd4f\"}}\r\n\r\ndata:
|
||||
{\"id\":\"06a22a9f-0969-414f-a17b-e3e9d4a3bbef\",\"jsonrpc\":\"2.0\",\"result\":{\"contextId\":\"f86582e4-7d3f-448e-ab51-3522667da9a0\",\"final\":false,\"kind\":\"status-update\",\"status\":{\"state\":\"working\"},\"taskId\":\"5b92d2cb-100e-4be5-ba4d-39b320c3dd4f\"}}\r\n\r\ndata:
|
||||
{\"id\":\"06a22a9f-0969-414f-a17b-e3e9d4a3bbef\",\"jsonrpc\":\"2.0\",\"result\":{\"contextId\":\"f86582e4-7d3f-448e-ab51-3522667da9a0\",\"final\":true,\"kind\":\"status-update\",\"status\":{\"message\":{\"kind\":\"message\",\"messageId\":\"befc6d8b-3618-410b-910d-614dc6de77fd\",\"parts\":[{\"kind\":\"text\",\"text\":\"[Tool:
|
||||
get_time] 2026-01-31 06:34:30 JST (Asia/Tokyo)\\nThe current local time in
|
||||
Tokyo is 06:34 AM on January 31, 2026.\"}],\"role\":\"agent\"},\"state\":\"completed\"},\"taskId\":\"5b92d2cb-100e-4be5-ba4d-39b320c3dd4f\"}}\r\n\r\n"
|
||||
headers:
|
||||
cache-control:
|
||||
- no-store
|
||||
connection:
|
||||
- keep-alive
|
||||
content-type:
|
||||
- text/event-stream; charset=utf-8
|
||||
date:
|
||||
- Fri, 30 Jan 2026 21:34:28 GMT
|
||||
server:
|
||||
- uvicorn
|
||||
transfer-encoding:
|
||||
- chunked
|
||||
x-accel-buffering:
|
||||
- 'no'
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
- request:
|
||||
body: '{"messages":[{"role":"system","content":"You are Research Analyst. Expert
|
||||
researcher with access to remote specialized agents\nYour personal goal is:
|
||||
Find and analyze information about AI developments"},{"role":"user","content":"\nCurrent
|
||||
Task: Delegate to the A2A agent to find the current time in Tokyo.\n\nProvide
|
||||
your complete response:"}],"model":"gpt-4.1-mini","response_format":{"type":"json_schema","json_schema":{"schema":{"properties":{"a2a_ids":{"description":"A2A
|
||||
agent IDs to delegate to.","items":{"const":"http://localhost:9999/.well-known/agent-card.json","type":"string"},"maxItems":1,"title":"A2A
|
||||
Ids","type":"array"},"message":{"description":"The message content. If is_a2a=true,
|
||||
this is sent to the A2A agent. If is_a2a=false, this is your final answer ending
|
||||
the conversation.","title":"Message","type":"string"},"is_a2a":{"description":"Set
|
||||
to false when the remote agent has answered your question - extract their answer
|
||||
and return it as your final message. Set to true ONLY if you need to ask a NEW,
|
||||
DIFFERENT question. NEVER repeat the same request - if the conversation history
|
||||
shows the agent already answered, set is_a2a=false immediately.","title":"Is
|
||||
A2A","type":"boolean"}},"required":["a2a_ids","message","is_a2a"],"title":"AgentResponse","type":"object","additionalProperties":false},"name":"AgentResponse","strict":true}},"stream":false}'
|
||||
headers:
|
||||
User-Agent:
|
||||
- X-USER-AGENT-XXX
|
||||
accept:
|
||||
- application/json
|
||||
accept-encoding:
|
||||
- ACCEPT-ENCODING-XXX
|
||||
authorization:
|
||||
- AUTHORIZATION-XXX
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '1371'
|
||||
content-type:
|
||||
- application/json
|
||||
cookie:
|
||||
- COOKIE-XXX
|
||||
host:
|
||||
- api.openai.com
|
||||
x-stainless-arch:
|
||||
- X-STAINLESS-ARCH-XXX
|
||||
x-stainless-async:
|
||||
- 'false'
|
||||
x-stainless-helper-method:
|
||||
- beta.chat.completions.parse
|
||||
x-stainless-lang:
|
||||
- python
|
||||
x-stainless-os:
|
||||
- X-STAINLESS-OS-XXX
|
||||
x-stainless-package-version:
|
||||
- 1.83.0
|
||||
x-stainless-read-timeout:
|
||||
- X-STAINLESS-READ-TIMEOUT-XXX
|
||||
x-stainless-retry-count:
|
||||
- '0'
|
||||
x-stainless-runtime:
|
||||
- CPython
|
||||
x-stainless-runtime-version:
|
||||
- 3.12.10
|
||||
method: POST
|
||||
uri: https://api.openai.com/v1/chat/completions
|
||||
response:
|
||||
body:
|
||||
string: "{\n \"id\": \"chatcmpl-D3qQKvrKehCGN5jjCKyRqyN6g88gg\",\n \"object\":
|
||||
\"chat.completion\",\n \"created\": 1769808872,\n \"model\": \"gpt-4.1-mini-2025-04-14\",\n
|
||||
\ \"choices\": [\n {\n \"index\": 0,\n \"message\": {\n \"role\":
|
||||
\"assistant\",\n \"content\": \"{\\\"a2a_ids\\\":[\\\"http://localhost:9999/.well-known/agent-card.json\\\"],\\\"message\\\":\\\"What
|
||||
is the current time in Tokyo?\\\",\\\"is_a2a\\\":true}\",\n \"refusal\":
|
||||
null,\n \"annotations\": []\n },\n \"logprobs\": null,\n
|
||||
\ \"finish_reason\": \"stop\"\n }\n ],\n \"usage\": {\n \"prompt_tokens\":
|
||||
270,\n \"completion_tokens\": 39,\n \"total_tokens\": 309,\n \"prompt_tokens_details\":
|
||||
{\n \"cached_tokens\": 0,\n \"audio_tokens\": 0\n },\n \"completion_tokens_details\":
|
||||
{\n \"reasoning_tokens\": 0,\n \"audio_tokens\": 0,\n \"accepted_prediction_tokens\":
|
||||
0,\n \"rejected_prediction_tokens\": 0\n }\n },\n \"service_tier\":
|
||||
\"default\",\n \"system_fingerprint\": \"fp_e01c6f58e1\"\n}\n"
|
||||
headers:
|
||||
CF-RAY:
|
||||
- CF-RAY-XXX
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Type:
|
||||
- application/json
|
||||
Date:
|
||||
- Fri, 30 Jan 2026 21:34:32 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
Strict-Transport-Security:
|
||||
- STS-XXX
|
||||
Transfer-Encoding:
|
||||
- chunked
|
||||
X-Content-Type-Options:
|
||||
- X-CONTENT-TYPE-XXX
|
||||
access-control-expose-headers:
|
||||
- ACCESS-CONTROL-XXX
|
||||
alt-svc:
|
||||
- h3=":443"; ma=86400
|
||||
cf-cache-status:
|
||||
- DYNAMIC
|
||||
openai-organization:
|
||||
- OPENAI-ORG-XXX
|
||||
openai-processing-ms:
|
||||
- '742'
|
||||
openai-project:
|
||||
- OPENAI-PROJECT-XXX
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
x-openai-proxy-wasm:
|
||||
- v0.1
|
||||
x-ratelimit-limit-requests:
|
||||
- X-RATELIMIT-LIMIT-REQUESTS-XXX
|
||||
x-ratelimit-limit-tokens:
|
||||
- X-RATELIMIT-LIMIT-TOKENS-XXX
|
||||
x-ratelimit-remaining-requests:
|
||||
- X-RATELIMIT-REMAINING-REQUESTS-XXX
|
||||
x-ratelimit-remaining-tokens:
|
||||
- X-RATELIMIT-REMAINING-TOKENS-XXX
|
||||
x-ratelimit-reset-requests:
|
||||
- X-RATELIMIT-RESET-REQUESTS-XXX
|
||||
x-ratelimit-reset-tokens:
|
||||
- X-RATELIMIT-RESET-TOKENS-XXX
|
||||
x-request-id:
|
||||
- X-REQUEST-ID-XXX
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
version: 1
|
||||
@@ -0,0 +1,216 @@
|
||||
interactions:
|
||||
- request:
|
||||
body: '{"messages":[{"role":"system","content":"You are Simple Assistant. A helpful
|
||||
assistant\nYour personal goal is: Help with basic tasks"},{"role":"user","content":"\nCurrent
|
||||
Task: Say hello\n\nProvide your complete response:"}],"model":"gpt-4.1-mini"}'
|
||||
headers:
|
||||
User-Agent:
|
||||
- X-USER-AGENT-XXX
|
||||
accept:
|
||||
- application/json
|
||||
accept-encoding:
|
||||
- ACCEPT-ENCODING-XXX
|
||||
authorization:
|
||||
- AUTHORIZATION-XXX
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '248'
|
||||
content-type:
|
||||
- application/json
|
||||
host:
|
||||
- api.openai.com
|
||||
x-stainless-arch:
|
||||
- X-STAINLESS-ARCH-XXX
|
||||
x-stainless-async:
|
||||
- 'false'
|
||||
x-stainless-lang:
|
||||
- python
|
||||
x-stainless-os:
|
||||
- X-STAINLESS-OS-XXX
|
||||
x-stainless-package-version:
|
||||
- 1.83.0
|
||||
x-stainless-read-timeout:
|
||||
- X-STAINLESS-READ-TIMEOUT-XXX
|
||||
x-stainless-retry-count:
|
||||
- '0'
|
||||
x-stainless-runtime:
|
||||
- CPython
|
||||
x-stainless-runtime-version:
|
||||
- 3.13.3
|
||||
method: POST
|
||||
uri: https://api.openai.com/v1/chat/completions
|
||||
response:
|
||||
body:
|
||||
string: "{\n \"id\": \"chatcmpl-D8WiFd3X8iE0Xk2N1S3L2k798qWFq\",\n \"object\":
|
||||
\"chat.completion\",\n \"created\": 1770924743,\n \"model\": \"gpt-4.1-mini-2025-04-14\",\n
|
||||
\ \"choices\": [\n {\n \"index\": 0,\n \"message\": {\n \"role\":
|
||||
\"assistant\",\n \"content\": \"Hello! How can I assist you today?\",\n
|
||||
\ \"refusal\": null,\n \"annotations\": []\n },\n \"logprobs\":
|
||||
null,\n \"finish_reason\": \"stop\"\n }\n ],\n \"usage\": {\n \"prompt_tokens\":
|
||||
41,\n \"completion_tokens\": 9,\n \"total_tokens\": 50,\n \"prompt_tokens_details\":
|
||||
{\n \"cached_tokens\": 0,\n \"audio_tokens\": 0\n },\n \"completion_tokens_details\":
|
||||
{\n \"reasoning_tokens\": 0,\n \"audio_tokens\": 0,\n \"accepted_prediction_tokens\":
|
||||
0,\n \"rejected_prediction_tokens\": 0\n }\n },\n \"service_tier\":
|
||||
\"default\",\n \"system_fingerprint\": \"fp_75546bd1a7\"\n}\n"
|
||||
headers:
|
||||
CF-RAY:
|
||||
- CF-RAY-XXX
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Type:
|
||||
- application/json
|
||||
Date:
|
||||
- Thu, 12 Feb 2026 19:32:23 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
Strict-Transport-Security:
|
||||
- STS-XXX
|
||||
Transfer-Encoding:
|
||||
- chunked
|
||||
X-Content-Type-Options:
|
||||
- X-CONTENT-TYPE-XXX
|
||||
access-control-expose-headers:
|
||||
- ACCESS-CONTROL-XXX
|
||||
alt-svc:
|
||||
- h3=":443"; ma=86400
|
||||
cf-cache-status:
|
||||
- DYNAMIC
|
||||
openai-organization:
|
||||
- OPENAI-ORG-XXX
|
||||
openai-processing-ms:
|
||||
- '346'
|
||||
openai-project:
|
||||
- OPENAI-PROJECT-XXX
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
set-cookie:
|
||||
- SET-COOKIE-XXX
|
||||
x-openai-proxy-wasm:
|
||||
- v0.1
|
||||
x-ratelimit-limit-requests:
|
||||
- X-RATELIMIT-LIMIT-REQUESTS-XXX
|
||||
x-ratelimit-limit-tokens:
|
||||
- X-RATELIMIT-LIMIT-TOKENS-XXX
|
||||
x-ratelimit-remaining-requests:
|
||||
- X-RATELIMIT-REMAINING-REQUESTS-XXX
|
||||
x-ratelimit-remaining-tokens:
|
||||
- X-RATELIMIT-REMAINING-TOKENS-XXX
|
||||
x-ratelimit-reset-requests:
|
||||
- X-RATELIMIT-RESET-REQUESTS-XXX
|
||||
x-ratelimit-reset-tokens:
|
||||
- X-RATELIMIT-RESET-TOKENS-XXX
|
||||
x-request-id:
|
||||
- X-REQUEST-ID-XXX
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
- request:
|
||||
body: '{"messages":[{"role":"system","content":"You are Simple Assistant. A helpful
|
||||
assistant\nYour personal goal is: Help with basic tasks"},{"role":"user","content":"\nCurrent
|
||||
Task: Say hello\n\nProvide your complete response:"}],"model":"gpt-4.1-mini"}'
|
||||
headers:
|
||||
User-Agent:
|
||||
- X-USER-AGENT-XXX
|
||||
accept:
|
||||
- application/json
|
||||
accept-encoding:
|
||||
- ACCEPT-ENCODING-XXX
|
||||
authorization:
|
||||
- AUTHORIZATION-XXX
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '248'
|
||||
content-type:
|
||||
- application/json
|
||||
cookie:
|
||||
- COOKIE-XXX
|
||||
host:
|
||||
- api.openai.com
|
||||
x-stainless-arch:
|
||||
- X-STAINLESS-ARCH-XXX
|
||||
x-stainless-async:
|
||||
- 'false'
|
||||
x-stainless-lang:
|
||||
- python
|
||||
x-stainless-os:
|
||||
- X-STAINLESS-OS-XXX
|
||||
x-stainless-package-version:
|
||||
- 1.83.0
|
||||
x-stainless-read-timeout:
|
||||
- X-STAINLESS-READ-TIMEOUT-XXX
|
||||
x-stainless-retry-count:
|
||||
- '0'
|
||||
x-stainless-runtime:
|
||||
- CPython
|
||||
x-stainless-runtime-version:
|
||||
- 3.13.3
|
||||
method: POST
|
||||
uri: https://api.openai.com/v1/chat/completions
|
||||
response:
|
||||
body:
|
||||
string: "{\n \"id\": \"chatcmpl-D8WiFOaYAAKsuxLAXe6PwTk5AjYdk\",\n \"object\":
|
||||
\"chat.completion\",\n \"created\": 1770924743,\n \"model\": \"gpt-4.1-mini-2025-04-14\",\n
|
||||
\ \"choices\": [\n {\n \"index\": 0,\n \"message\": {\n \"role\":
|
||||
\"assistant\",\n \"content\": \"Hello! How can I assist you today?\",\n
|
||||
\ \"refusal\": null,\n \"annotations\": []\n },\n \"logprobs\":
|
||||
null,\n \"finish_reason\": \"stop\"\n }\n ],\n \"usage\": {\n \"prompt_tokens\":
|
||||
41,\n \"completion_tokens\": 9,\n \"total_tokens\": 50,\n \"prompt_tokens_details\":
|
||||
{\n \"cached_tokens\": 0,\n \"audio_tokens\": 0\n },\n \"completion_tokens_details\":
|
||||
{\n \"reasoning_tokens\": 0,\n \"audio_tokens\": 0,\n \"accepted_prediction_tokens\":
|
||||
0,\n \"rejected_prediction_tokens\": 0\n }\n },\n \"service_tier\":
|
||||
\"default\",\n \"system_fingerprint\": \"fp_75546bd1a7\"\n}\n"
|
||||
headers:
|
||||
CF-RAY:
|
||||
- CF-RAY-XXX
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Type:
|
||||
- application/json
|
||||
Date:
|
||||
- Thu, 12 Feb 2026 19:32:24 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
Strict-Transport-Security:
|
||||
- STS-XXX
|
||||
Transfer-Encoding:
|
||||
- chunked
|
||||
X-Content-Type-Options:
|
||||
- X-CONTENT-TYPE-XXX
|
||||
access-control-expose-headers:
|
||||
- ACCESS-CONTROL-XXX
|
||||
alt-svc:
|
||||
- h3=":443"; ma=86400
|
||||
cf-cache-status:
|
||||
- DYNAMIC
|
||||
openai-organization:
|
||||
- OPENAI-ORG-XXX
|
||||
openai-processing-ms:
|
||||
- '618'
|
||||
openai-project:
|
||||
- OPENAI-PROJECT-XXX
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
set-cookie:
|
||||
- SET-COOKIE-XXX
|
||||
x-openai-proxy-wasm:
|
||||
- v0.1
|
||||
x-ratelimit-limit-requests:
|
||||
- X-RATELIMIT-LIMIT-REQUESTS-XXX
|
||||
x-ratelimit-limit-tokens:
|
||||
- X-RATELIMIT-LIMIT-TOKENS-XXX
|
||||
x-ratelimit-remaining-requests:
|
||||
- X-RATELIMIT-REMAINING-REQUESTS-XXX
|
||||
x-ratelimit-remaining-tokens:
|
||||
- X-RATELIMIT-REMAINING-TOKENS-XXX
|
||||
x-ratelimit-reset-requests:
|
||||
- X-RATELIMIT-RESET-REQUESTS-XXX
|
||||
x-ratelimit-reset-tokens:
|
||||
- X-RATELIMIT-RESET-TOKENS-XXX
|
||||
x-request-id:
|
||||
- X-REQUEST-ID-XXX
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
version: 1
|
||||
@@ -0,0 +1,220 @@
|
||||
interactions:
|
||||
- request:
|
||||
body: '{"messages":[{"role":"system","content":"You are Research Analyst. Expert
|
||||
researcher with access to remote agents\nYour personal goal is: Find and analyze
|
||||
information"},{"role":"user","content":"\nCurrent Task: Use the remote A2A agent
|
||||
to calculate 10 plus 15.\n\nProvide your complete response:"}],"model":"gpt-4.1-mini"}'
|
||||
headers:
|
||||
User-Agent:
|
||||
- X-USER-AGENT-XXX
|
||||
accept:
|
||||
- application/json
|
||||
accept-encoding:
|
||||
- ACCEPT-ENCODING-XXX
|
||||
authorization:
|
||||
- AUTHORIZATION-XXX
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '322'
|
||||
content-type:
|
||||
- application/json
|
||||
host:
|
||||
- api.openai.com
|
||||
x-stainless-arch:
|
||||
- X-STAINLESS-ARCH-XXX
|
||||
x-stainless-async:
|
||||
- 'false'
|
||||
x-stainless-lang:
|
||||
- python
|
||||
x-stainless-os:
|
||||
- X-STAINLESS-OS-XXX
|
||||
x-stainless-package-version:
|
||||
- 1.83.0
|
||||
x-stainless-read-timeout:
|
||||
- X-STAINLESS-READ-TIMEOUT-XXX
|
||||
x-stainless-retry-count:
|
||||
- '0'
|
||||
x-stainless-runtime:
|
||||
- CPython
|
||||
x-stainless-runtime-version:
|
||||
- 3.13.3
|
||||
method: POST
|
||||
uri: https://api.openai.com/v1/chat/completions
|
||||
response:
|
||||
body:
|
||||
string: "{\n \"id\": \"chatcmpl-D8WiD3djMj91vXlZgRexuoagt4YjK\",\n \"object\":
|
||||
\"chat.completion\",\n \"created\": 1770924741,\n \"model\": \"gpt-4.1-mini-2025-04-14\",\n
|
||||
\ \"choices\": [\n {\n \"index\": 0,\n \"message\": {\n \"role\":
|
||||
\"assistant\",\n \"content\": \"I am using the remote A2A agent to
|
||||
calculate 10 plus 15.\\n\\nCalculation result: 10 + 15 = 25\",\n \"refusal\":
|
||||
null,\n \"annotations\": []\n },\n \"logprobs\": null,\n
|
||||
\ \"finish_reason\": \"stop\"\n }\n ],\n \"usage\": {\n \"prompt_tokens\":
|
||||
57,\n \"completion_tokens\": 28,\n \"total_tokens\": 85,\n \"prompt_tokens_details\":
|
||||
{\n \"cached_tokens\": 0,\n \"audio_tokens\": 0\n },\n \"completion_tokens_details\":
|
||||
{\n \"reasoning_tokens\": 0,\n \"audio_tokens\": 0,\n \"accepted_prediction_tokens\":
|
||||
0,\n \"rejected_prediction_tokens\": 0\n }\n },\n \"service_tier\":
|
||||
\"default\",\n \"system_fingerprint\": \"fp_75546bd1a7\"\n}\n"
|
||||
headers:
|
||||
CF-RAY:
|
||||
- CF-RAY-XXX
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Type:
|
||||
- application/json
|
||||
Date:
|
||||
- Thu, 12 Feb 2026 19:32:21 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
Strict-Transport-Security:
|
||||
- STS-XXX
|
||||
Transfer-Encoding:
|
||||
- chunked
|
||||
X-Content-Type-Options:
|
||||
- X-CONTENT-TYPE-XXX
|
||||
access-control-expose-headers:
|
||||
- ACCESS-CONTROL-XXX
|
||||
alt-svc:
|
||||
- h3=":443"; ma=86400
|
||||
cf-cache-status:
|
||||
- DYNAMIC
|
||||
openai-organization:
|
||||
- OPENAI-ORG-XXX
|
||||
openai-processing-ms:
|
||||
- '633'
|
||||
openai-project:
|
||||
- OPENAI-PROJECT-XXX
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
set-cookie:
|
||||
- SET-COOKIE-XXX
|
||||
x-openai-proxy-wasm:
|
||||
- v0.1
|
||||
x-ratelimit-limit-requests:
|
||||
- X-RATELIMIT-LIMIT-REQUESTS-XXX
|
||||
x-ratelimit-limit-tokens:
|
||||
- X-RATELIMIT-LIMIT-TOKENS-XXX
|
||||
x-ratelimit-remaining-requests:
|
||||
- X-RATELIMIT-REMAINING-REQUESTS-XXX
|
||||
x-ratelimit-remaining-tokens:
|
||||
- X-RATELIMIT-REMAINING-TOKENS-XXX
|
||||
x-ratelimit-reset-requests:
|
||||
- X-RATELIMIT-RESET-REQUESTS-XXX
|
||||
x-ratelimit-reset-tokens:
|
||||
- X-RATELIMIT-RESET-TOKENS-XXX
|
||||
x-request-id:
|
||||
- X-REQUEST-ID-XXX
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
- request:
|
||||
body: '{"messages":[{"role":"system","content":"You are Research Analyst. Expert
|
||||
researcher with access to remote agents\nYour personal goal is: Find and analyze
|
||||
information"},{"role":"user","content":"\nCurrent Task: Use the remote A2A agent
|
||||
to calculate 10 plus 15.\n\nProvide your complete response:"}],"model":"gpt-4.1-mini"}'
|
||||
headers:
|
||||
User-Agent:
|
||||
- X-USER-AGENT-XXX
|
||||
accept:
|
||||
- application/json
|
||||
accept-encoding:
|
||||
- ACCEPT-ENCODING-XXX
|
||||
authorization:
|
||||
- AUTHORIZATION-XXX
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '322'
|
||||
content-type:
|
||||
- application/json
|
||||
cookie:
|
||||
- COOKIE-XXX
|
||||
host:
|
||||
- api.openai.com
|
||||
x-stainless-arch:
|
||||
- X-STAINLESS-ARCH-XXX
|
||||
x-stainless-async:
|
||||
- 'false'
|
||||
x-stainless-lang:
|
||||
- python
|
||||
x-stainless-os:
|
||||
- X-STAINLESS-OS-XXX
|
||||
x-stainless-package-version:
|
||||
- 1.83.0
|
||||
x-stainless-read-timeout:
|
||||
- X-STAINLESS-READ-TIMEOUT-XXX
|
||||
x-stainless-retry-count:
|
||||
- '0'
|
||||
x-stainless-runtime:
|
||||
- CPython
|
||||
x-stainless-runtime-version:
|
||||
- 3.13.3
|
||||
method: POST
|
||||
uri: https://api.openai.com/v1/chat/completions
|
||||
response:
|
||||
body:
|
||||
string: "{\n \"id\": \"chatcmpl-D8WiEa5fOdnyGxf1o0YYZRjEVstUX\",\n \"object\":
|
||||
\"chat.completion\",\n \"created\": 1770924742,\n \"model\": \"gpt-4.1-mini-2025-04-14\",\n
|
||||
\ \"choices\": [\n {\n \"index\": 0,\n \"message\": {\n \"role\":
|
||||
\"assistant\",\n \"content\": \"Using the remote A2A agent to calculate
|
||||
10 plus 15:\\n\\n10 + 15 = 25\\n\\nThe result is 25.\",\n \"refusal\":
|
||||
null,\n \"annotations\": []\n },\n \"logprobs\": null,\n
|
||||
\ \"finish_reason\": \"stop\"\n }\n ],\n \"usage\": {\n \"prompt_tokens\":
|
||||
57,\n \"completion_tokens\": 29,\n \"total_tokens\": 86,\n \"prompt_tokens_details\":
|
||||
{\n \"cached_tokens\": 0,\n \"audio_tokens\": 0\n },\n \"completion_tokens_details\":
|
||||
{\n \"reasoning_tokens\": 0,\n \"audio_tokens\": 0,\n \"accepted_prediction_tokens\":
|
||||
0,\n \"rejected_prediction_tokens\": 0\n }\n },\n \"service_tier\":
|
||||
\"default\",\n \"system_fingerprint\": \"fp_75546bd1a7\"\n}\n"
|
||||
headers:
|
||||
CF-RAY:
|
||||
- CF-RAY-XXX
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Type:
|
||||
- application/json
|
||||
Date:
|
||||
- Thu, 12 Feb 2026 19:32:22 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
Strict-Transport-Security:
|
||||
- STS-XXX
|
||||
Transfer-Encoding:
|
||||
- chunked
|
||||
X-Content-Type-Options:
|
||||
- X-CONTENT-TYPE-XXX
|
||||
access-control-expose-headers:
|
||||
- ACCESS-CONTROL-XXX
|
||||
alt-svc:
|
||||
- h3=":443"; ma=86400
|
||||
cf-cache-status:
|
||||
- DYNAMIC
|
||||
openai-organization:
|
||||
- OPENAI-ORG-XXX
|
||||
openai-processing-ms:
|
||||
- '581'
|
||||
openai-project:
|
||||
- OPENAI-PROJECT-XXX
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
set-cookie:
|
||||
- SET-COOKIE-XXX
|
||||
x-openai-proxy-wasm:
|
||||
- v0.1
|
||||
x-ratelimit-limit-requests:
|
||||
- X-RATELIMIT-LIMIT-REQUESTS-XXX
|
||||
x-ratelimit-limit-tokens:
|
||||
- X-RATELIMIT-LIMIT-TOKENS-XXX
|
||||
x-ratelimit-remaining-requests:
|
||||
- X-RATELIMIT-REMAINING-REQUESTS-XXX
|
||||
x-ratelimit-remaining-tokens:
|
||||
- X-RATELIMIT-REMAINING-TOKENS-XXX
|
||||
x-ratelimit-reset-requests:
|
||||
- X-RATELIMIT-RESET-REQUESTS-XXX
|
||||
x-ratelimit-reset-tokens:
|
||||
- X-RATELIMIT-RESET-TOKENS-XXX
|
||||
x-request-id:
|
||||
- X-REQUEST-ID-XXX
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
version: 1
|
||||
@@ -0,0 +1,616 @@
|
||||
interactions:
|
||||
- request:
|
||||
body: '{"messages":[{"role":"system","content":"You are Research Analyst. Expert
|
||||
researcher with access to remote agents\nYour personal goal is: Find and analyze
|
||||
information"},{"role":"user","content":"\nCurrent Task: Ask the A2A agent to
|
||||
calculate 100 divided by 4.\n\nProvide your complete response:"}],"model":"gpt-4.1-mini","response_format":{"type":"json_schema","json_schema":{"schema":{"properties":{"a2a_ids":{"description":"A2A
|
||||
agent IDs to delegate to.","items":{"const":"http://localhost:9999/.well-known/agent-card.json","type":"string"},"maxItems":1,"title":"A2A
|
||||
Ids","type":"array"},"message":{"description":"The message content. If is_a2a=true,
|
||||
this is sent to the A2A agent. If is_a2a=false, this is your final answer ending
|
||||
the conversation.","title":"Message","type":"string"},"is_a2a":{"description":"Set
|
||||
to false when the remote agent has answered your question - extract their answer
|
||||
and return it as your final message. Set to true ONLY if you need to ask a NEW,
|
||||
DIFFERENT question. NEVER repeat the same request - if the conversation history
|
||||
shows the agent already answered, set is_a2a=false immediately.","title":"Is
|
||||
A2A","type":"boolean"}},"required":["a2a_ids","message","is_a2a"],"title":"AgentResponse","type":"object","additionalProperties":false},"name":"AgentResponse","strict":true}},"stream":false}'
|
||||
headers:
|
||||
User-Agent:
|
||||
- X-USER-AGENT-XXX
|
||||
accept:
|
||||
- application/json
|
||||
accept-encoding:
|
||||
- ACCEPT-ENCODING-XXX
|
||||
authorization:
|
||||
- AUTHORIZATION-XXX
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '1325'
|
||||
content-type:
|
||||
- application/json
|
||||
host:
|
||||
- api.openai.com
|
||||
x-stainless-arch:
|
||||
- X-STAINLESS-ARCH-XXX
|
||||
x-stainless-async:
|
||||
- 'false'
|
||||
x-stainless-helper-method:
|
||||
- beta.chat.completions.parse
|
||||
x-stainless-lang:
|
||||
- python
|
||||
x-stainless-os:
|
||||
- X-STAINLESS-OS-XXX
|
||||
x-stainless-package-version:
|
||||
- 1.83.0
|
||||
x-stainless-read-timeout:
|
||||
- X-STAINLESS-READ-TIMEOUT-XXX
|
||||
x-stainless-retry-count:
|
||||
- '0'
|
||||
x-stainless-runtime:
|
||||
- CPython
|
||||
x-stainless-runtime-version:
|
||||
- 3.12.10
|
||||
method: POST
|
||||
uri: https://api.openai.com/v1/chat/completions
|
||||
response:
|
||||
body:
|
||||
string: "{\n \"id\": \"chatcmpl-D3qOhyHELb5GreUumlAiVahTNIN2R\",\n \"object\":
|
||||
\"chat.completion\",\n \"created\": 1769808771,\n \"model\": \"gpt-4.1-mini-2025-04-14\",\n
|
||||
\ \"choices\": [\n {\n \"index\": 0,\n \"message\": {\n \"role\":
|
||||
\"assistant\",\n \"content\": \"{\\\"a2a_ids\\\":[\\\"http://localhost:9999/.well-known/agent-card.json\\\"],\\\"message\\\":\\\"Calculate
|
||||
100 divided by 4, please.\\\",\\\"is_a2a\\\":true}\",\n \"refusal\":
|
||||
null,\n \"annotations\": []\n },\n \"logprobs\": null,\n
|
||||
\ \"finish_reason\": \"stop\"\n }\n ],\n \"usage\": {\n \"prompt_tokens\":
|
||||
266,\n \"completion_tokens\": 40,\n \"total_tokens\": 306,\n \"prompt_tokens_details\":
|
||||
{\n \"cached_tokens\": 0,\n \"audio_tokens\": 0\n },\n \"completion_tokens_details\":
|
||||
{\n \"reasoning_tokens\": 0,\n \"audio_tokens\": 0,\n \"accepted_prediction_tokens\":
|
||||
0,\n \"rejected_prediction_tokens\": 0\n }\n },\n \"service_tier\":
|
||||
\"default\",\n \"system_fingerprint\": \"fp_e01c6f58e1\"\n}\n"
|
||||
headers:
|
||||
CF-RAY:
|
||||
- CF-RAY-XXX
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Type:
|
||||
- application/json
|
||||
Date:
|
||||
- Fri, 30 Jan 2026 21:32:52 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
Set-Cookie:
|
||||
- SET-COOKIE-XXX
|
||||
Strict-Transport-Security:
|
||||
- STS-XXX
|
||||
Transfer-Encoding:
|
||||
- chunked
|
||||
X-Content-Type-Options:
|
||||
- X-CONTENT-TYPE-XXX
|
||||
access-control-expose-headers:
|
||||
- ACCESS-CONTROL-XXX
|
||||
alt-svc:
|
||||
- h3=":443"; ma=86400
|
||||
cf-cache-status:
|
||||
- DYNAMIC
|
||||
openai-organization:
|
||||
- OPENAI-ORG-XXX
|
||||
openai-processing-ms:
|
||||
- '685'
|
||||
openai-project:
|
||||
- OPENAI-PROJECT-XXX
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
x-openai-proxy-wasm:
|
||||
- v0.1
|
||||
x-ratelimit-limit-requests:
|
||||
- X-RATELIMIT-LIMIT-REQUESTS-XXX
|
||||
x-ratelimit-limit-tokens:
|
||||
- X-RATELIMIT-LIMIT-TOKENS-XXX
|
||||
x-ratelimit-remaining-requests:
|
||||
- X-RATELIMIT-REMAINING-REQUESTS-XXX
|
||||
x-ratelimit-remaining-tokens:
|
||||
- X-RATELIMIT-REMAINING-TOKENS-XXX
|
||||
x-ratelimit-reset-requests:
|
||||
- X-RATELIMIT-RESET-REQUESTS-XXX
|
||||
x-ratelimit-reset-tokens:
|
||||
- X-RATELIMIT-RESET-TOKENS-XXX
|
||||
x-request-id:
|
||||
- X-REQUEST-ID-XXX
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
- request:
|
||||
body: '{"id":"4d72e53a-2c40-42cb-b74a-404a5a798ba6","jsonrpc":"2.0","method":"message/stream","params":{"configuration":{"acceptedOutputModes":["application/json"],"blocking":true},"message":{"kind":"message","messageId":"fa5e002d-f81b-4b61-84e6-27f40d0e0240","parts":[{"kind":"text","text":"Calculate
|
||||
100 divided by 4, please."}],"referenceTaskIds":[],"role":"user"}}}'
|
||||
headers:
|
||||
User-Agent:
|
||||
- X-USER-AGENT-XXX
|
||||
accept:
|
||||
- '*/*, text/event-stream'
|
||||
accept-encoding:
|
||||
- ACCEPT-ENCODING-XXX
|
||||
cache-control:
|
||||
- no-store
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '362'
|
||||
content-type:
|
||||
- application/json
|
||||
host:
|
||||
- localhost:9999
|
||||
method: POST
|
||||
uri: http://localhost:9999
|
||||
response:
|
||||
body:
|
||||
string: "data: {\"id\":\"4d72e53a-2c40-42cb-b74a-404a5a798ba6\",\"jsonrpc\":\"2.0\",\"result\":{\"contextId\":\"91d26e2b-6c66-45ce-9356-74a0eb634c28\",\"final\":false,\"kind\":\"status-update\",\"status\":{\"state\":\"submitted\"},\"taskId\":\"151869b9-f640-454a-865d-405413a0859d\"}}\r\n\r\ndata:
|
||||
{\"id\":\"4d72e53a-2c40-42cb-b74a-404a5a798ba6\",\"jsonrpc\":\"2.0\",\"result\":{\"contextId\":\"91d26e2b-6c66-45ce-9356-74a0eb634c28\",\"final\":false,\"kind\":\"status-update\",\"status\":{\"state\":\"working\"},\"taskId\":\"151869b9-f640-454a-865d-405413a0859d\"}}\r\n\r\ndata:
|
||||
{\"id\":\"4d72e53a-2c40-42cb-b74a-404a5a798ba6\",\"jsonrpc\":\"2.0\",\"result\":{\"contextId\":\"91d26e2b-6c66-45ce-9356-74a0eb634c28\",\"final\":true,\"kind\":\"status-update\",\"status\":{\"message\":{\"kind\":\"message\",\"messageId\":\"3bf24e8a-6a3b-45f1-82eb-7a283a89e0ac\",\"parts\":[{\"kind\":\"text\",\"text\":\"[Tool:
|
||||
calculator] 100 / 4 = 25.0\\nThe result of 100 divided by 4 is 25.\"}],\"role\":\"agent\"},\"state\":\"completed\"},\"taskId\":\"151869b9-f640-454a-865d-405413a0859d\"}}\r\n\r\n"
|
||||
headers:
|
||||
cache-control:
|
||||
- no-store
|
||||
connection:
|
||||
- keep-alive
|
||||
content-type:
|
||||
- text/event-stream; charset=utf-8
|
||||
date:
|
||||
- Fri, 30 Jan 2026 21:32:51 GMT
|
||||
server:
|
||||
- uvicorn
|
||||
transfer-encoding:
|
||||
- chunked
|
||||
x-accel-buffering:
|
||||
- 'no'
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
- request:
|
||||
body: '{"messages":[{"role":"system","content":"You are Research Analyst. Expert
|
||||
researcher with access to remote agents\nYour personal goal is: Find and analyze
|
||||
information"},{"role":"user","content":"\nCurrent Task: Ask the A2A agent to
|
||||
calculate 100 divided by 4.\n\nProvide your complete response:"}],"model":"gpt-4.1-mini","response_format":{"type":"json_schema","json_schema":{"schema":{"properties":{"a2a_ids":{"description":"A2A
|
||||
agent IDs to delegate to.","items":{"const":"http://localhost:9999/.well-known/agent-card.json","type":"string"},"maxItems":1,"title":"A2A
|
||||
Ids","type":"array"},"message":{"description":"The message content. If is_a2a=true,
|
||||
this is sent to the A2A agent. If is_a2a=false, this is your final answer ending
|
||||
the conversation.","title":"Message","type":"string"},"is_a2a":{"description":"Set
|
||||
to false when the remote agent has answered your question - extract their answer
|
||||
and return it as your final message. Set to true ONLY if you need to ask a NEW,
|
||||
DIFFERENT question. NEVER repeat the same request - if the conversation history
|
||||
shows the agent already answered, set is_a2a=false immediately.","title":"Is
|
||||
A2A","type":"boolean"}},"required":["a2a_ids","message","is_a2a"],"title":"AgentResponse","type":"object","additionalProperties":false},"name":"AgentResponse","strict":true}},"stream":false}'
|
||||
headers:
|
||||
User-Agent:
|
||||
- X-USER-AGENT-XXX
|
||||
accept:
|
||||
- application/json
|
||||
accept-encoding:
|
||||
- ACCEPT-ENCODING-XXX
|
||||
authorization:
|
||||
- AUTHORIZATION-XXX
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '1325'
|
||||
content-type:
|
||||
- application/json
|
||||
cookie:
|
||||
- COOKIE-XXX
|
||||
host:
|
||||
- api.openai.com
|
||||
x-stainless-arch:
|
||||
- X-STAINLESS-ARCH-XXX
|
||||
x-stainless-async:
|
||||
- 'false'
|
||||
x-stainless-helper-method:
|
||||
- beta.chat.completions.parse
|
||||
x-stainless-lang:
|
||||
- python
|
||||
x-stainless-os:
|
||||
- X-STAINLESS-OS-XXX
|
||||
x-stainless-package-version:
|
||||
- 1.83.0
|
||||
x-stainless-read-timeout:
|
||||
- X-STAINLESS-READ-TIMEOUT-XXX
|
||||
x-stainless-retry-count:
|
||||
- '0'
|
||||
x-stainless-runtime:
|
||||
- CPython
|
||||
x-stainless-runtime-version:
|
||||
- 3.12.10
|
||||
method: POST
|
||||
uri: https://api.openai.com/v1/chat/completions
|
||||
response:
|
||||
body:
|
||||
string: "{\n \"id\": \"chatcmpl-D3qOmCdZD7rL5Q1syh0ag6AuH5bw3\",\n \"object\":
|
||||
\"chat.completion\",\n \"created\": 1769808776,\n \"model\": \"gpt-4.1-mini-2025-04-14\",\n
|
||||
\ \"choices\": [\n {\n \"index\": 0,\n \"message\": {\n \"role\":
|
||||
\"assistant\",\n \"content\": \"{\\\"a2a_ids\\\":[\\\"http://localhost:9999/.well-known/agent-card.json\\\"],\\\"message\\\":\\\"Calculate
|
||||
100 divided by 4.\\\",\\\"is_a2a\\\":true}\",\n \"refusal\": null,\n
|
||||
\ \"annotations\": []\n },\n \"logprobs\": null,\n \"finish_reason\":
|
||||
\"stop\"\n }\n ],\n \"usage\": {\n \"prompt_tokens\": 266,\n \"completion_tokens\":
|
||||
38,\n \"total_tokens\": 304,\n \"prompt_tokens_details\": {\n \"cached_tokens\":
|
||||
0,\n \"audio_tokens\": 0\n },\n \"completion_tokens_details\":
|
||||
{\n \"reasoning_tokens\": 0,\n \"audio_tokens\": 0,\n \"accepted_prediction_tokens\":
|
||||
0,\n \"rejected_prediction_tokens\": 0\n }\n },\n \"service_tier\":
|
||||
\"default\",\n \"system_fingerprint\": \"fp_e01c6f58e1\"\n}\n"
|
||||
headers:
|
||||
CF-RAY:
|
||||
- CF-RAY-XXX
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Type:
|
||||
- application/json
|
||||
Date:
|
||||
- Fri, 30 Jan 2026 21:32:57 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
Strict-Transport-Security:
|
||||
- STS-XXX
|
||||
Transfer-Encoding:
|
||||
- chunked
|
||||
X-Content-Type-Options:
|
||||
- X-CONTENT-TYPE-XXX
|
||||
access-control-expose-headers:
|
||||
- ACCESS-CONTROL-XXX
|
||||
alt-svc:
|
||||
- h3=":443"; ma=86400
|
||||
cf-cache-status:
|
||||
- DYNAMIC
|
||||
openai-organization:
|
||||
- OPENAI-ORG-XXX
|
||||
openai-processing-ms:
|
||||
- '680'
|
||||
openai-project:
|
||||
- OPENAI-PROJECT-XXX
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
x-openai-proxy-wasm:
|
||||
- v0.1
|
||||
x-ratelimit-limit-requests:
|
||||
- X-RATELIMIT-LIMIT-REQUESTS-XXX
|
||||
x-ratelimit-limit-tokens:
|
||||
- X-RATELIMIT-LIMIT-TOKENS-XXX
|
||||
x-ratelimit-remaining-requests:
|
||||
- X-RATELIMIT-REMAINING-REQUESTS-XXX
|
||||
x-ratelimit-remaining-tokens:
|
||||
- X-RATELIMIT-REMAINING-TOKENS-XXX
|
||||
x-ratelimit-reset-requests:
|
||||
- X-RATELIMIT-RESET-REQUESTS-XXX
|
||||
x-ratelimit-reset-tokens:
|
||||
- X-RATELIMIT-RESET-TOKENS-XXX
|
||||
x-request-id:
|
||||
- X-REQUEST-ID-XXX
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
- request:
|
||||
body: '{"id":"4ea0d213-a2cd-4d10-9b8c-034cfaa1d678","jsonrpc":"2.0","method":"message/stream","params":{"configuration":{"acceptedOutputModes":["application/json"],"blocking":true},"message":{"kind":"message","messageId":"22c5127f-e6d8-4aae-852c-d2d131474e38","parts":[{"kind":"text","text":"Calculate
|
||||
100 divided by 4."}],"referenceTaskIds":[],"role":"user"}}}'
|
||||
headers:
|
||||
User-Agent:
|
||||
- X-USER-AGENT-XXX
|
||||
accept:
|
||||
- '*/*, text/event-stream'
|
||||
accept-encoding:
|
||||
- ACCEPT-ENCODING-XXX
|
||||
cache-control:
|
||||
- no-store
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '354'
|
||||
content-type:
|
||||
- application/json
|
||||
host:
|
||||
- localhost:9999
|
||||
method: POST
|
||||
uri: http://localhost:9999
|
||||
response:
|
||||
body:
|
||||
string: "data: {\"id\":\"4ea0d213-a2cd-4d10-9b8c-034cfaa1d678\",\"jsonrpc\":\"2.0\",\"result\":{\"contextId\":\"cba0ab67-9cc6-4afc-a15a-009b0abe4a1c\",\"final\":false,\"kind\":\"status-update\",\"status\":{\"state\":\"submitted\"},\"taskId\":\"50c44a89-d6bd-4272-92d0-9aef38b35c93\"}}\r\n\r\ndata:
|
||||
{\"id\":\"4ea0d213-a2cd-4d10-9b8c-034cfaa1d678\",\"jsonrpc\":\"2.0\",\"result\":{\"contextId\":\"cba0ab67-9cc6-4afc-a15a-009b0abe4a1c\",\"final\":false,\"kind\":\"status-update\",\"status\":{\"state\":\"working\"},\"taskId\":\"50c44a89-d6bd-4272-92d0-9aef38b35c93\"}}\r\n\r\ndata:
|
||||
{\"id\":\"4ea0d213-a2cd-4d10-9b8c-034cfaa1d678\",\"jsonrpc\":\"2.0\",\"result\":{\"contextId\":\"cba0ab67-9cc6-4afc-a15a-009b0abe4a1c\",\"final\":true,\"kind\":\"status-update\",\"status\":{\"message\":{\"kind\":\"message\",\"messageId\":\"1d96bb0a-a3b7-4217-a1bc-bdb2658f14b7\",\"parts\":[{\"kind\":\"text\",\"text\":\"[Tool:
|
||||
calculator] 100 / 4 = 25.0\\n100 divided by 4 is 25.0.\"}],\"role\":\"agent\"},\"state\":\"completed\"},\"taskId\":\"50c44a89-d6bd-4272-92d0-9aef38b35c93\"}}\r\n\r\n"
|
||||
headers:
|
||||
cache-control:
|
||||
- no-store
|
||||
connection:
|
||||
- keep-alive
|
||||
content-type:
|
||||
- text/event-stream; charset=utf-8
|
||||
date:
|
||||
- Fri, 30 Jan 2026 21:32:56 GMT
|
||||
server:
|
||||
- uvicorn
|
||||
transfer-encoding:
|
||||
- chunked
|
||||
x-accel-buffering:
|
||||
- 'no'
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
- request:
|
||||
body: '{"messages":[{"role":"system","content":"You are Research Analyst. Expert
|
||||
researcher with access to remote agents\nYour personal goal is: Find and analyze
|
||||
information"},{"role":"user","content":"\nCurrent Task: Ask the A2A agent to
|
||||
calculate 100 divided by 4.\n\nProvide your complete response:"}],"model":"gpt-4.1-mini","response_format":{"type":"json_schema","json_schema":{"schema":{"properties":{"a2a_ids":{"description":"A2A
|
||||
agent IDs to delegate to.","items":{"const":"http://localhost:9999/.well-known/agent-card.json","type":"string"},"maxItems":1,"title":"A2A
|
||||
Ids","type":"array"},"message":{"description":"The message content. If is_a2a=true,
|
||||
this is sent to the A2A agent. If is_a2a=false, this is your final answer ending
|
||||
the conversation.","title":"Message","type":"string"},"is_a2a":{"description":"Set
|
||||
to false when the remote agent has answered your question - extract their answer
|
||||
and return it as your final message. Set to true ONLY if you need to ask a NEW,
|
||||
DIFFERENT question. NEVER repeat the same request - if the conversation history
|
||||
shows the agent already answered, set is_a2a=false immediately.","title":"Is
|
||||
A2A","type":"boolean"}},"required":["a2a_ids","message","is_a2a"],"title":"AgentResponse","type":"object","additionalProperties":false},"name":"AgentResponse","strict":true}},"stream":false}'
|
||||
headers:
|
||||
User-Agent:
|
||||
- X-USER-AGENT-XXX
|
||||
accept:
|
||||
- application/json
|
||||
accept-encoding:
|
||||
- ACCEPT-ENCODING-XXX
|
||||
authorization:
|
||||
- AUTHORIZATION-XXX
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '1325'
|
||||
content-type:
|
||||
- application/json
|
||||
cookie:
|
||||
- COOKIE-XXX
|
||||
host:
|
||||
- api.openai.com
|
||||
x-stainless-arch:
|
||||
- X-STAINLESS-ARCH-XXX
|
||||
x-stainless-async:
|
||||
- 'false'
|
||||
x-stainless-helper-method:
|
||||
- beta.chat.completions.parse
|
||||
x-stainless-lang:
|
||||
- python
|
||||
x-stainless-os:
|
||||
- X-STAINLESS-OS-XXX
|
||||
x-stainless-package-version:
|
||||
- 1.83.0
|
||||
x-stainless-read-timeout:
|
||||
- X-STAINLESS-READ-TIMEOUT-XXX
|
||||
x-stainless-retry-count:
|
||||
- '0'
|
||||
x-stainless-runtime:
|
||||
- CPython
|
||||
x-stainless-runtime-version:
|
||||
- 3.12.10
|
||||
method: POST
|
||||
uri: https://api.openai.com/v1/chat/completions
|
||||
response:
|
||||
body:
|
||||
string: "{\n \"id\": \"chatcmpl-D3qOqMBxYghf1iunoo7hIo23Mmyw0\",\n \"object\":
|
||||
\"chat.completion\",\n \"created\": 1769808780,\n \"model\": \"gpt-4.1-mini-2025-04-14\",\n
|
||||
\ \"choices\": [\n {\n \"index\": 0,\n \"message\": {\n \"role\":
|
||||
\"assistant\",\n \"content\": \"{\\\"a2a_ids\\\":[\\\"http://localhost:9999/.well-known/agent-card.json\\\"],\\\"message\\\":\\\"Calculate
|
||||
100 divided by 4.\\\",\\\"is_a2a\\\":true}\",\n \"refusal\": null,\n
|
||||
\ \"annotations\": []\n },\n \"logprobs\": null,\n \"finish_reason\":
|
||||
\"stop\"\n }\n ],\n \"usage\": {\n \"prompt_tokens\": 266,\n \"completion_tokens\":
|
||||
38,\n \"total_tokens\": 304,\n \"prompt_tokens_details\": {\n \"cached_tokens\":
|
||||
0,\n \"audio_tokens\": 0\n },\n \"completion_tokens_details\":
|
||||
{\n \"reasoning_tokens\": 0,\n \"audio_tokens\": 0,\n \"accepted_prediction_tokens\":
|
||||
0,\n \"rejected_prediction_tokens\": 0\n }\n },\n \"service_tier\":
|
||||
\"default\",\n \"system_fingerprint\": \"fp_e01c6f58e1\"\n}\n"
|
||||
headers:
|
||||
CF-RAY:
|
||||
- CF-RAY-XXX
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Type:
|
||||
- application/json
|
||||
Date:
|
||||
- Fri, 30 Jan 2026 21:33:00 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
Strict-Transport-Security:
|
||||
- STS-XXX
|
||||
Transfer-Encoding:
|
||||
- chunked
|
||||
X-Content-Type-Options:
|
||||
- X-CONTENT-TYPE-XXX
|
||||
access-control-expose-headers:
|
||||
- ACCESS-CONTROL-XXX
|
||||
alt-svc:
|
||||
- h3=":443"; ma=86400
|
||||
cf-cache-status:
|
||||
- DYNAMIC
|
||||
openai-organization:
|
||||
- OPENAI-ORG-XXX
|
||||
openai-processing-ms:
|
||||
- '572'
|
||||
openai-project:
|
||||
- OPENAI-PROJECT-XXX
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
x-openai-proxy-wasm:
|
||||
- v0.1
|
||||
x-ratelimit-limit-requests:
|
||||
- X-RATELIMIT-LIMIT-REQUESTS-XXX
|
||||
x-ratelimit-limit-tokens:
|
||||
- X-RATELIMIT-LIMIT-TOKENS-XXX
|
||||
x-ratelimit-remaining-requests:
|
||||
- X-RATELIMIT-REMAINING-REQUESTS-XXX
|
||||
x-ratelimit-remaining-tokens:
|
||||
- X-RATELIMIT-REMAINING-TOKENS-XXX
|
||||
x-ratelimit-reset-requests:
|
||||
- X-RATELIMIT-RESET-REQUESTS-XXX
|
||||
x-ratelimit-reset-tokens:
|
||||
- X-RATELIMIT-RESET-TOKENS-XXX
|
||||
x-request-id:
|
||||
- X-REQUEST-ID-XXX
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
- request:
|
||||
body: '{"id":"217af3be-d6a6-48df-9460-f254481f7da6","jsonrpc":"2.0","method":"message/stream","params":{"configuration":{"acceptedOutputModes":["application/json"],"blocking":true},"message":{"kind":"message","messageId":"2886d1ee-0fc0-4143-98d8-e7a75ade6895","parts":[{"kind":"text","text":"Calculate
|
||||
100 divided by 4."}],"referenceTaskIds":[],"role":"user"}}}'
|
||||
headers:
|
||||
User-Agent:
|
||||
- X-USER-AGENT-XXX
|
||||
accept:
|
||||
- '*/*, text/event-stream'
|
||||
accept-encoding:
|
||||
- ACCEPT-ENCODING-XXX
|
||||
cache-control:
|
||||
- no-store
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '354'
|
||||
content-type:
|
||||
- application/json
|
||||
host:
|
||||
- localhost:9999
|
||||
method: POST
|
||||
uri: http://localhost:9999
|
||||
response:
|
||||
body:
|
||||
string: "data: {\"id\":\"217af3be-d6a6-48df-9460-f254481f7da6\",\"jsonrpc\":\"2.0\",\"result\":{\"contextId\":\"a3254fcf-baf7-4f46-9767-60156d837a6e\",\"final\":false,\"kind\":\"status-update\",\"status\":{\"state\":\"submitted\"},\"taskId\":\"2c0e3b76-60f7-4636-934a-2ec41af75ead\"}}\r\n\r\ndata:
|
||||
{\"id\":\"217af3be-d6a6-48df-9460-f254481f7da6\",\"jsonrpc\":\"2.0\",\"result\":{\"contextId\":\"a3254fcf-baf7-4f46-9767-60156d837a6e\",\"final\":false,\"kind\":\"status-update\",\"status\":{\"state\":\"working\"},\"taskId\":\"2c0e3b76-60f7-4636-934a-2ec41af75ead\"}}\r\n\r\ndata:
|
||||
{\"id\":\"217af3be-d6a6-48df-9460-f254481f7da6\",\"jsonrpc\":\"2.0\",\"result\":{\"contextId\":\"a3254fcf-baf7-4f46-9767-60156d837a6e\",\"final\":true,\"kind\":\"status-update\",\"status\":{\"message\":{\"kind\":\"message\",\"messageId\":\"b32f6b9c-70b5-4152-9df3-19436e3b655d\",\"parts\":[{\"kind\":\"text\",\"text\":\"[Tool:
|
||||
calculator] 100 / 4 = 25.0\\n100 divided by 4 equals 25.0.\"}],\"role\":\"agent\"},\"state\":\"completed\"},\"taskId\":\"2c0e3b76-60f7-4636-934a-2ec41af75ead\"}}\r\n\r\n"
|
||||
headers:
|
||||
cache-control:
|
||||
- no-store
|
||||
connection:
|
||||
- keep-alive
|
||||
content-type:
|
||||
- text/event-stream; charset=utf-8
|
||||
date:
|
||||
- Fri, 30 Jan 2026 21:32:59 GMT
|
||||
server:
|
||||
- uvicorn
|
||||
transfer-encoding:
|
||||
- chunked
|
||||
x-accel-buffering:
|
||||
- 'no'
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
- request:
|
||||
body: '{"messages":[{"role":"system","content":"You are Research Analyst. Expert
|
||||
researcher with access to remote agents\nYour personal goal is: Find and analyze
|
||||
information"},{"role":"user","content":"\nCurrent Task: Ask the A2A agent to
|
||||
calculate 100 divided by 4.\n\nProvide your complete response:"}],"model":"gpt-4.1-mini","response_format":{"type":"json_schema","json_schema":{"schema":{"properties":{"a2a_ids":{"description":"A2A
|
||||
agent IDs to delegate to.","items":{"const":"http://localhost:9999/.well-known/agent-card.json","type":"string"},"maxItems":1,"title":"A2A
|
||||
Ids","type":"array"},"message":{"description":"The message content. If is_a2a=true,
|
||||
this is sent to the A2A agent. If is_a2a=false, this is your final answer ending
|
||||
the conversation.","title":"Message","type":"string"},"is_a2a":{"description":"Set
|
||||
to false when the remote agent has answered your question - extract their answer
|
||||
and return it as your final message. Set to true ONLY if you need to ask a NEW,
|
||||
DIFFERENT question. NEVER repeat the same request - if the conversation history
|
||||
shows the agent already answered, set is_a2a=false immediately.","title":"Is
|
||||
A2A","type":"boolean"}},"required":["a2a_ids","message","is_a2a"],"title":"AgentResponse","type":"object","additionalProperties":false},"name":"AgentResponse","strict":true}},"stream":false}'
|
||||
headers:
|
||||
User-Agent:
|
||||
- X-USER-AGENT-XXX
|
||||
accept:
|
||||
- application/json
|
||||
accept-encoding:
|
||||
- ACCEPT-ENCODING-XXX
|
||||
authorization:
|
||||
- AUTHORIZATION-XXX
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '1325'
|
||||
content-type:
|
||||
- application/json
|
||||
cookie:
|
||||
- COOKIE-XXX
|
||||
host:
|
||||
- api.openai.com
|
||||
x-stainless-arch:
|
||||
- X-STAINLESS-ARCH-XXX
|
||||
x-stainless-async:
|
||||
- 'false'
|
||||
x-stainless-helper-method:
|
||||
- beta.chat.completions.parse
|
||||
x-stainless-lang:
|
||||
- python
|
||||
x-stainless-os:
|
||||
- X-STAINLESS-OS-XXX
|
||||
x-stainless-package-version:
|
||||
- 1.83.0
|
||||
x-stainless-read-timeout:
|
||||
- X-STAINLESS-READ-TIMEOUT-XXX
|
||||
x-stainless-retry-count:
|
||||
- '0'
|
||||
x-stainless-runtime:
|
||||
- CPython
|
||||
x-stainless-runtime-version:
|
||||
- 3.12.10
|
||||
method: POST
|
||||
uri: https://api.openai.com/v1/chat/completions
|
||||
response:
|
||||
body:
|
||||
string: "{\n \"id\": \"chatcmpl-D3qOu8lITQt6WBCLcm6bvduIC2xP0\",\n \"object\":
|
||||
\"chat.completion\",\n \"created\": 1769808784,\n \"model\": \"gpt-4.1-mini-2025-04-14\",\n
|
||||
\ \"choices\": [\n {\n \"index\": 0,\n \"message\": {\n \"role\":
|
||||
\"assistant\",\n \"content\": \"{\\\"a2a_ids\\\":[\\\"http://localhost:9999/.well-known/agent-card.json\\\"],\\\"message\\\":\\\"Please
|
||||
calculate 100 divided by 4.\\\",\\\"is_a2a\\\":true}\",\n \"refusal\":
|
||||
null,\n \"annotations\": []\n },\n \"logprobs\": null,\n
|
||||
\ \"finish_reason\": \"stop\"\n }\n ],\n \"usage\": {\n \"prompt_tokens\":
|
||||
266,\n \"completion_tokens\": 39,\n \"total_tokens\": 305,\n \"prompt_tokens_details\":
|
||||
{\n \"cached_tokens\": 0,\n \"audio_tokens\": 0\n },\n \"completion_tokens_details\":
|
||||
{\n \"reasoning_tokens\": 0,\n \"audio_tokens\": 0,\n \"accepted_prediction_tokens\":
|
||||
0,\n \"rejected_prediction_tokens\": 0\n }\n },\n \"service_tier\":
|
||||
\"default\",\n \"system_fingerprint\": \"fp_e01c6f58e1\"\n}\n"
|
||||
headers:
|
||||
CF-RAY:
|
||||
- CF-RAY-XXX
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Type:
|
||||
- application/json
|
||||
Date:
|
||||
- Fri, 30 Jan 2026 21:33:04 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
Strict-Transport-Security:
|
||||
- STS-XXX
|
||||
Transfer-Encoding:
|
||||
- chunked
|
||||
X-Content-Type-Options:
|
||||
- X-CONTENT-TYPE-XXX
|
||||
access-control-expose-headers:
|
||||
- ACCESS-CONTROL-XXX
|
||||
alt-svc:
|
||||
- h3=":443"; ma=86400
|
||||
cf-cache-status:
|
||||
- DYNAMIC
|
||||
openai-organization:
|
||||
- OPENAI-ORG-XXX
|
||||
openai-processing-ms:
|
||||
- '934'
|
||||
openai-project:
|
||||
- OPENAI-PROJECT-XXX
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
x-openai-proxy-wasm:
|
||||
- v0.1
|
||||
x-ratelimit-limit-requests:
|
||||
- X-RATELIMIT-LIMIT-REQUESTS-XXX
|
||||
x-ratelimit-limit-tokens:
|
||||
- X-RATELIMIT-LIMIT-TOKENS-XXX
|
||||
x-ratelimit-remaining-requests:
|
||||
- X-RATELIMIT-REMAINING-REQUESTS-XXX
|
||||
x-ratelimit-remaining-tokens:
|
||||
- X-RATELIMIT-REMAINING-TOKENS-XXX
|
||||
x-ratelimit-reset-requests:
|
||||
- X-RATELIMIT-RESET-REQUESTS-XXX
|
||||
x-ratelimit-reset-tokens:
|
||||
- X-RATELIMIT-RESET-TOKENS-XXX
|
||||
x-request-id:
|
||||
- X-REQUEST-ID-XXX
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
version: 1
|
||||
Reference in New Issue
Block a user