Skip to content

Commit b5ae02a

Browse files
Add async parity coverage for string-subclass tool response fields
Co-authored-by: Shri Sukhani <shrisukhani@users.noreply.github.com>
1 parent 549ea4a commit b5ae02a

File tree

1 file changed

+73
-0
lines changed

1 file changed

+73
-0
lines changed

tests/test_tools_response_handling.py

Lines changed: 73 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1128,6 +1128,28 @@ async def run() -> None:
11281128
asyncio.run(run())
11291129

11301130

1131+
def test_async_scrape_tool_rejects_broken_string_subclass_markdown_field_values():
1132+
class _BrokenMarkdownValue(str):
1133+
def __iter__(self):
1134+
raise RuntimeError("markdown iteration exploded")
1135+
1136+
async def run() -> None:
1137+
client = _AsyncScrapeClient(
1138+
_Response(data=SimpleNamespace(markdown=_BrokenMarkdownValue("page")))
1139+
)
1140+
with pytest.raises(
1141+
HyperbrowserError,
1142+
match="scrape tool response field 'markdown' must be a UTF-8 string",
1143+
) as exc_info:
1144+
await WebsiteScrapeTool.async_runnable(
1145+
client,
1146+
{"url": "https://example.com"},
1147+
)
1148+
assert exc_info.value.original_error is None
1149+
1150+
asyncio.run(run())
1151+
1152+
11311153
def test_async_crawl_tool_rejects_non_list_response_data():
11321154
async def run() -> None:
11331155
client = _AsyncCrawlClient(_Response(data={"invalid": "payload"}))
@@ -1186,6 +1208,57 @@ async def run() -> None:
11861208
asyncio.run(run())
11871209

11881210

1211+
def test_async_crawl_tool_rejects_broken_string_subclass_page_url_values():
1212+
class _BrokenUrlValue(str):
1213+
def __iter__(self):
1214+
raise RuntimeError("url iteration exploded")
1215+
1216+
async def run() -> None:
1217+
client = _AsyncCrawlClient(
1218+
_Response(
1219+
data=[
1220+
SimpleNamespace(
1221+
url=_BrokenUrlValue("https://example.com"),
1222+
markdown="body",
1223+
)
1224+
]
1225+
)
1226+
)
1227+
with pytest.raises(
1228+
HyperbrowserError,
1229+
match="crawl tool page field 'url' must be a UTF-8 string at index 0",
1230+
) as exc_info:
1231+
await WebsiteCrawlTool.async_runnable(
1232+
client, {"url": "https://example.com"}
1233+
)
1234+
assert exc_info.value.original_error is None
1235+
1236+
asyncio.run(run())
1237+
1238+
1239+
def test_async_browser_use_tool_rejects_broken_string_subclass_final_result_values():
1240+
class _BrokenFinalResultValue(str):
1241+
def __iter__(self):
1242+
raise RuntimeError("final_result iteration exploded")
1243+
1244+
async def run() -> None:
1245+
client = _AsyncBrowserUseClient(
1246+
_Response(
1247+
data=SimpleNamespace(final_result=_BrokenFinalResultValue("done"))
1248+
)
1249+
)
1250+
with pytest.raises(
1251+
HyperbrowserError,
1252+
match=(
1253+
"browser-use tool response field 'final_result' must be a UTF-8 string"
1254+
),
1255+
) as exc_info:
1256+
await BrowserUseTool.async_runnable(client, {"task": "search docs"})
1257+
assert exc_info.value.original_error is None
1258+
1259+
asyncio.run(run())
1260+
1261+
11891262
def test_async_screenshot_tool_decodes_utf8_bytes_field():
11901263
async def run() -> None:
11911264
client = _AsyncScrapeClient(

0 commit comments

Comments
 (0)