@@ -127,6 +127,74 @@ def test_scrape_tool_wraps_response_data_read_failures():
127127 assert exc_info .value .original_error is not None
128128
129129
130+ def test_scrape_tool_supports_mapping_response_objects ():
131+ client = _SyncScrapeClient ({"data" : {"markdown" : "from response mapping" }}) # type: ignore[arg-type]
132+
133+ output = WebsiteScrapeTool .runnable (client , {"url" : "https://example.com" })
134+
135+ assert output == "from response mapping"
136+
137+
138+ def test_scrape_tool_rejects_response_objects_missing_data_field ():
139+ client = _SyncScrapeClient ({"payload" : {"markdown" : "missing data" }}) # type: ignore[arg-type]
140+
141+ with pytest .raises (
142+ HyperbrowserError , match = "scrape tool response must include 'data'"
143+ ):
144+ WebsiteScrapeTool .runnable (client , {"url" : "https://example.com" })
145+
146+
147+ def test_scrape_tool_wraps_mapping_response_data_read_failures ():
148+ class _BrokenResponse (Mapping [str , object ]):
149+ def __iter__ (self ):
150+ yield "data"
151+
152+ def __len__ (self ) -> int :
153+ return 1
154+
155+ def __contains__ (self , key : object ) -> bool :
156+ return key == "data"
157+
158+ def __getitem__ (self , key : str ) -> object :
159+ _ = key
160+ raise RuntimeError ("cannot read response data" )
161+
162+ client = _SyncScrapeClient (_BrokenResponse ()) # type: ignore[arg-type]
163+
164+ with pytest .raises (
165+ HyperbrowserError , match = "Failed to read scrape tool response data"
166+ ) as exc_info :
167+ WebsiteScrapeTool .runnable (client , {"url" : "https://example.com" })
168+
169+ assert exc_info .value .original_error is not None
170+
171+
172+ def test_scrape_tool_wraps_mapping_response_data_inspection_failures ():
173+ class _BrokenContainsResponse (Mapping [str , object ]):
174+ def __iter__ (self ):
175+ yield "data"
176+
177+ def __len__ (self ) -> int :
178+ return 1
179+
180+ def __contains__ (self , key : object ) -> bool :
181+ _ = key
182+ raise RuntimeError ("cannot inspect response" )
183+
184+ def __getitem__ (self , key : str ) -> object :
185+ _ = key
186+ return {"markdown" : "ok" }
187+
188+ client = _SyncScrapeClient (_BrokenContainsResponse ()) # type: ignore[arg-type]
189+
190+ with pytest .raises (
191+ HyperbrowserError , match = "Failed to inspect scrape tool response data field"
192+ ) as exc_info :
193+ WebsiteScrapeTool .runnable (client , {"url" : "https://example.com" })
194+
195+ assert exc_info .value .original_error is not None
196+
197+
130198def test_scrape_tool_preserves_hyperbrowser_response_data_read_failures ():
131199 client = _SyncScrapeClient (
132200 _Response (data_error = HyperbrowserError ("custom scrape data failure" ))
@@ -340,6 +408,20 @@ async def run() -> None:
340408 asyncio .run (run ())
341409
342410
411+ def test_async_scrape_tool_supports_mapping_response_objects ():
412+ async def run () -> None :
413+ client = _AsyncScrapeClient (
414+ {"data" : {"markdown" : "async response mapping" }} # type: ignore[arg-type]
415+ )
416+ output = await WebsiteScrapeTool .async_runnable (
417+ client ,
418+ {"url" : "https://example.com" },
419+ )
420+ assert output == "async response mapping"
421+
422+ asyncio .run (run ())
423+
424+
343425def test_async_crawl_tool_rejects_non_list_response_data ():
344426 async def run () -> None :
345427 client = _AsyncCrawlClient (_Response (data = {"invalid" : "payload" }))
0 commit comments