Skip to content

Commit 39192a0

Browse files
Add async parity coverage for response-data mapping boundaries
Co-authored-by: Shri Sukhani <shrisukhani@users.noreply.github.com>
1 parent 6b9bb10 commit 39192a0

File tree

1 file changed

+118
-0
lines changed

1 file changed

+118
-0
lines changed

tests/test_tools_response_handling.py

Lines changed: 118 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -948,6 +948,124 @@ async def run() -> None:
948948
asyncio.run(run())
949949

950950

951+
def test_async_scrape_tool_wraps_mapping_response_data_read_failures():
952+
class _BrokenResponse(Mapping[str, object]):
953+
def __iter__(self):
954+
yield "data"
955+
956+
def __len__(self) -> int:
957+
return 1
958+
959+
def __contains__(self, key: object) -> bool:
960+
return key == "data"
961+
962+
def __getitem__(self, key: str) -> object:
963+
_ = key
964+
raise RuntimeError("cannot read response data")
965+
966+
async def run() -> None:
967+
client = _AsyncScrapeClient(_BrokenResponse()) # type: ignore[arg-type]
968+
with pytest.raises(
969+
HyperbrowserError, match="Failed to read scrape tool response data"
970+
) as exc_info:
971+
await WebsiteScrapeTool.async_runnable(
972+
client,
973+
{"url": "https://example.com"},
974+
)
975+
assert exc_info.value.original_error is not None
976+
977+
asyncio.run(run())
978+
979+
980+
def test_async_scrape_tool_wraps_mapping_response_data_inspection_failures():
981+
class _BrokenContainsResponse(Mapping[str, object]):
982+
def __iter__(self):
983+
yield "data"
984+
985+
def __len__(self) -> int:
986+
return 1
987+
988+
def __contains__(self, key: object) -> bool:
989+
_ = key
990+
raise RuntimeError("cannot inspect response")
991+
992+
def __getitem__(self, key: str) -> object:
993+
_ = key
994+
return {"markdown": "ok"}
995+
996+
async def run() -> None:
997+
client = _AsyncScrapeClient(_BrokenContainsResponse()) # type: ignore[arg-type]
998+
with pytest.raises(
999+
HyperbrowserError, match="Failed to inspect scrape tool response data field"
1000+
) as exc_info:
1001+
await WebsiteScrapeTool.async_runnable(
1002+
client,
1003+
{"url": "https://example.com"},
1004+
)
1005+
assert exc_info.value.original_error is not None
1006+
1007+
asyncio.run(run())
1008+
1009+
1010+
def test_async_scrape_tool_preserves_hyperbrowser_mapping_data_inspection_failures():
1011+
class _BrokenContainsResponse(Mapping[str, object]):
1012+
def __iter__(self):
1013+
yield "data"
1014+
1015+
def __len__(self) -> int:
1016+
return 1
1017+
1018+
def __contains__(self, key: object) -> bool:
1019+
_ = key
1020+
raise HyperbrowserError("custom contains failure")
1021+
1022+
def __getitem__(self, key: str) -> object:
1023+
_ = key
1024+
return {"markdown": "ok"}
1025+
1026+
async def run() -> None:
1027+
client = _AsyncScrapeClient(_BrokenContainsResponse()) # type: ignore[arg-type]
1028+
with pytest.raises(
1029+
HyperbrowserError, match="custom contains failure"
1030+
) as exc_info:
1031+
await WebsiteScrapeTool.async_runnable(
1032+
client,
1033+
{"url": "https://example.com"},
1034+
)
1035+
assert exc_info.value.original_error is None
1036+
1037+
asyncio.run(run())
1038+
1039+
1040+
def test_async_scrape_tool_preserves_hyperbrowser_mapping_data_read_failures():
1041+
class _BrokenResponse(Mapping[str, object]):
1042+
def __iter__(self):
1043+
yield "data"
1044+
1045+
def __len__(self) -> int:
1046+
return 1
1047+
1048+
def __contains__(self, key: object) -> bool:
1049+
return key == "data"
1050+
1051+
def __getitem__(self, key: str) -> object:
1052+
_ = key
1053+
raise HyperbrowserError("custom data read failure")
1054+
1055+
async def run() -> None:
1056+
client = _AsyncScrapeClient(_BrokenResponse()) # type: ignore[arg-type]
1057+
with pytest.raises(
1058+
HyperbrowserError, match="custom data read failure"
1059+
) as exc_info:
1060+
await WebsiteScrapeTool.async_runnable(
1061+
client,
1062+
{"url": "https://example.com"},
1063+
)
1064+
assert exc_info.value.original_error is None
1065+
1066+
asyncio.run(run())
1067+
1068+
9511069
def test_async_crawl_tool_rejects_non_list_response_data():
9521070
async def run() -> None:
9531071
client = _AsyncCrawlClient(_Response(data={"invalid": "payload"}))

0 commit comments

Comments
 (0)