Skip to content

Commit fe11913

Browse files
tpellissierclaude
andcommitted
Fix tuple unpacking for _request() calls returning telemetry data
Update internal _request() call sites to unpack (response, telemetry) tuples after Phase 1 changes. Also use duck typing instead of isinstance(x, list) for OperationResult compatibility. Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
1 parent 7a3e6bc commit fe11913

File tree

3 files changed

+10
-10
lines changed

3 files changed

+10
-10
lines changed

examples/advanced/file_upload.py

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -227,7 +227,7 @@ def ensure_file_attribute_generic(schema_name: str, label: str, key_prefix: str)
227227
f"{odata.api}/EntityDefinitions({meta_id})/Attributes?$select=SchemaName&$filter="
228228
f"SchemaName eq '{schema_name}'"
229229
)
230-
r = backoff(lambda: odata._request("get", url), delays=ATTRIBUTE_VISIBILITY_DELAYS)
230+
r, _ = backoff(lambda: odata._request("get", url), delays=ATTRIBUTE_VISIBILITY_DELAYS)
231231
val = []
232232
try:
233233
val = r.json().get("value", [])
@@ -255,7 +255,7 @@ def ensure_file_attribute_generic(schema_name: str, label: str, key_prefix: str)
255255
}
256256
try:
257257
url = f"{odata.api}/EntityDefinitions({meta_id})/Attributes"
258-
backoff(lambda: odata._request("post", url, json=payload), delays=ATTRIBUTE_VISIBILITY_DELAYS)
258+
backoff(lambda: odata._request("post", url, json=payload)[0], delays=ATTRIBUTE_VISIBILITY_DELAYS)
259259
print({f"{key_prefix}_file_attribute_created": True})
260260
time.sleep(2)
261261
return True
@@ -285,7 +285,7 @@ def wait_for_attribute_visibility(logical_name: str, label: str):
285285
time.sleep(delay)
286286
waited += delay
287287
try:
288-
resp = odata._request("get", probe_url)
288+
resp, _ = odata._request("get", probe_url)
289289
try:
290290
resp.json()
291291
except Exception: # noqa: BLE001
@@ -313,7 +313,7 @@ def wait_for_attribute_visibility(logical_name: str, label: str):
313313
payload = {name_attr: "File Sample Record"}
314314
log(f"client.create('{table_schema_name}', payload)")
315315
created_ids = backoff(lambda: client.create(table_schema_name, payload))
316-
if isinstance(created_ids, list) and created_ids:
316+
if created_ids and len(created_ids) > 0:
317317
record_id = created_ids[0]
318318
else:
319319
raise RuntimeError("Unexpected create return; expected list[str] with at least one GUID")
@@ -363,7 +363,7 @@ def get_dataset_info(file_path: Path):
363363
dl_url_single = (
364364
f"{odata.api}/{entity_set}({record_id})/{small_file_attr_logical}/$value" # raw entity_set URL OK
365365
)
366-
resp_single = backoff(lambda: odata._request("get", dl_url_single))
366+
resp_single, _ = backoff(lambda: odata._request("get", dl_url_single))
367367
content_single = resp_single.content or b""
368368
import hashlib # noqa: WPS433
369369

@@ -393,7 +393,7 @@ def get_dataset_info(file_path: Path):
393393
)
394394
)
395395
print({"small_replace_upload_completed": True, "small_replace_source_size": replace_size_small})
396-
resp_single_replace = backoff(lambda: odata._request("get", dl_url_single))
396+
resp_single_replace, _ = backoff(lambda: odata._request("get", dl_url_single))
397397
content_single_replace = resp_single_replace.content or b""
398398
downloaded_hash_replace = hashlib.sha256(content_single_replace).hexdigest() if content_single_replace else None
399399
hash_match_replace = (
@@ -435,7 +435,7 @@ def get_dataset_info(file_path: Path):
435435
dl_url_chunk = (
436436
f"{odata.api}/{entity_set}({record_id})/{chunk_file_attr_logical}/$value" # raw entity_set for download
437437
)
438-
resp_chunk = backoff(lambda: odata._request("get", dl_url_chunk))
438+
resp_chunk, _ = backoff(lambda: odata._request("get", dl_url_chunk))
439439
content_chunk = resp_chunk.content or b""
440440
import hashlib # noqa: WPS433
441441

@@ -464,7 +464,7 @@ def get_dataset_info(file_path: Path):
464464
)
465465
)
466466
print({"chunk_replace_upload_completed": True})
467-
resp_chunk_replace = backoff(lambda: odata._request("get", dl_url_chunk))
467+
resp_chunk_replace, _ = backoff(lambda: odata._request("get", dl_url_chunk))
468468
content_chunk_replace = resp_chunk_replace.content or b""
469469
dst_hash_chunk_replace = hashlib.sha256(content_chunk_replace).hexdigest() if content_chunk_replace else None
470470
hash_match_chunk_replace = (

examples/basic/functional_testing.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -190,7 +190,7 @@ def test_create_record(client: DataverseClient, table_info: Dict[str, Any]) -> s
190190
continue
191191
raise
192192

193-
if isinstance(created_ids, list) and created_ids:
193+
if created_ids and len(created_ids) > 0:
194194
record_id = created_ids[0]
195195
print(f"[OK] Record created successfully!")
196196
print(f" Record ID: {record_id}")

src/PowerPlatform/Dataverse/data/_upload.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -145,7 +145,7 @@ def _upload_file_chunk(
145145
headers["If-None-Match"] = "null"
146146
else:
147147
headers["If-Match"] = "*"
148-
r_init = self._request("patch", init_url, headers=headers, data=b"")
148+
r_init, _ = self._request("patch", init_url, headers=headers, data=b"")
149149
location = r_init.headers.get("Location") or r_init.headers.get("location")
150150
if not location:
151151
raise RuntimeError("Missing Location header with sessiontoken for chunked upload")

0 commit comments

Comments
 (0)