From 38b1843074b9d2202a1815f3952c9b9014736100 Mon Sep 17 00:00:00 2001 From: Max Wang Date: Mon, 24 Nov 2025 15:32:51 -0800 Subject: [PATCH 1/3] retries for examples --- examples/advanced/file_upload.py | 48 ++++- examples/advanced/walkthrough.py | 63 +++++-- examples/basic/functional_testing.py | 208 +++++++++++++++++---- src/PowerPlatform/Dataverse/data/_odata.py | 18 +- 4 files changed, 265 insertions(+), 72 deletions(-) diff --git a/examples/advanced/file_upload.py b/examples/advanced/file_upload.py index 9bfae49..830a3ca 100644 --- a/examples/advanced/file_upload.py +++ b/examples/advanced/file_upload.py @@ -66,7 +66,7 @@ def log(call: str): # Simple SHA-256 helper with caching to avoid re-reading large files multiple times. _FILE_HASH_CACHE = {} - +ATTRIBUTE_VISIBILITY_DELAYS = (0, 3, 10, 20, 35, 50, 70, 90, 120) def file_sha256(path: Path): # returns (hex_digest, size_bytes) try: @@ -178,12 +178,12 @@ def backoff(op, *, delays=(0, 2, 5, 10), retry_status=(400, 403, 404, 409, 412, def ensure_table(): # Check by schema - existing = client.get_table_info(TABLE_SCHEMA_NAME) + existing = backoff(lambda: client.get_table_info(TABLE_SCHEMA_NAME)) if existing: print({"table": TABLE_SCHEMA_NAME, "existed": True}) return existing log("client.create_table('new_FileSample', schema={'new_Title': 'string'})") - info = client.create_table(TABLE_SCHEMA_NAME, {"new_Title": "string"}) + info = backoff(lambda: client.create_table(TABLE_SCHEMA_NAME, {"new_Title": "string"})) print({"table": TABLE_SCHEMA_NAME, "existed": False, "metadata_id": info.get("metadata_id")}) return info @@ -217,7 +217,7 @@ def ensure_file_attribute_generic(schema_name: str, label: str, key_prefix: str) f"{odata.api}/EntityDefinitions({meta_id})/Attributes?$select=SchemaName&$filter=" f"SchemaName eq '{schema_name}'" ) - r = odata._request("get", url) + r = backoff(lambda: odata._request("get", url), delays=ATTRIBUTE_VISIBILITY_DELAYS) val = [] try: val = r.json().get("value", []) @@ -245,7 +245,7 @@ def ensure_file_attribute_generic(schema_name: str, label: str, key_prefix: str) } try: url = f"{odata.api}/EntityDefinitions({meta_id})/Attributes" - r = odata._request("post", url, json=payload) + r = backoff(lambda: odata._request("post", url, json=payload), delays=ATTRIBUTE_VISIBILITY_DELAYS) print({f"{key_prefix}_file_attribute_created": True}) time.sleep(2) return True @@ -263,11 +263,39 @@ def ensure_file_attribute_generic(schema_name: str, label: str, key_prefix: str) return False +def wait_for_attribute_visibility(logical_name: str, label: str): + if not logical_name or not entity_set: + return False + odata = client._get_odata() + probe_url = f"{odata.api}/{entity_set}?$top=1&$select={logical_name}" + waited = 0 + last_error = None + for delay in ATTRIBUTE_VISIBILITY_DELAYS: + if delay: + time.sleep(delay) + waited += delay + try: + resp = odata._request("get", probe_url) + try: + resp.json() + except Exception: # noqa: BLE001 + pass + if waited: + print({f"{label}_attribute_visible_wait_seconds": waited}) + return True + except Exception as ex: # noqa: BLE001 + last_error = ex + continue + raise RuntimeError(f"Timed out waiting for attribute '{logical_name}' to materialize") from last_error + + # Conditionally ensure each attribute only if its mode is selected if run_small: ensure_file_attribute_generic(small_file_attr_schema, "Small Document", "small") + wait_for_attribute_visibility(small_file_attr_logical, "small") if run_chunk: ensure_file_attribute_generic(chunk_file_attr_schema, "Chunk Document", "chunk") + wait_for_attribute_visibility(chunk_file_attr_logical, "chunk") # --------------------------- Record create --------------------------- record_id = None @@ -325,7 +353,7 @@ def get_dataset_info(file_path: Path): dl_url_single = ( f"{odata.api}/{entity_set}({record_id})/{small_file_attr_logical}/$value" # raw entity_set URL OK ) - resp_single = odata._request("get", dl_url_single) + resp_single = backoff(lambda: odata._request("get", dl_url_single)) content_single = resp_single.content or b"" import hashlib # noqa: WPS433 @@ -355,7 +383,7 @@ def get_dataset_info(file_path: Path): ) ) print({"small_replace_upload_completed": True, "small_replace_source_size": replace_size_small}) - resp_single_replace = odata._request("get", dl_url_single) + resp_single_replace = backoff(lambda: odata._request("get", dl_url_single)) content_single_replace = resp_single_replace.content or b"" downloaded_hash_replace = hashlib.sha256(content_single_replace).hexdigest() if content_single_replace else None hash_match_replace = ( @@ -397,7 +425,7 @@ def get_dataset_info(file_path: Path): dl_url_chunk = ( f"{odata.api}/{entity_set}({record_id})/{chunk_file_attr_logical}/$value" # raw entity_set for download ) - resp_chunk = odata._request("get", dl_url_chunk) + resp_chunk = backoff(lambda: odata._request("get", dl_url_chunk)) content_chunk = resp_chunk.content or b"" import hashlib # noqa: WPS433 @@ -426,7 +454,7 @@ def get_dataset_info(file_path: Path): ) ) print({"chunk_replace_upload_completed": True}) - resp_chunk_replace = odata._request("get", dl_url_chunk) + resp_chunk_replace = backoff(lambda: odata._request("get", dl_url_chunk)) content_chunk_replace = resp_chunk_replace.content or b"" dst_hash_chunk_replace = hashlib.sha256(content_chunk_replace).hexdigest() if content_chunk_replace else None hash_match_chunk_replace = ( @@ -459,7 +487,7 @@ def get_dataset_info(file_path: Path): if cleanup_table: try: log(f"client.delete_table('{TABLE_SCHEMA_NAME}')") - client.delete_table(TABLE_SCHEMA_NAME) + backoff(lambda: client.delete_table(TABLE_SCHEMA_NAME)) print({"table_deleted": True}) except Exception as e: # noqa: BLE001 print({"table_deleted": False, "error": str(e)}) diff --git a/examples/advanced/walkthrough.py b/examples/advanced/walkthrough.py index c8a5c63..de31b21 100644 --- a/examples/advanced/walkthrough.py +++ b/examples/advanced/walkthrough.py @@ -19,9 +19,11 @@ import sys import json +import time from enum import IntEnum from azure.identity import InteractiveBrowserCredential from PowerPlatform.Dataverse.client import DataverseClient +import requests # Simple logging helper @@ -36,6 +38,24 @@ class Priority(IntEnum): HIGH = 3 +def backoff(op, *, delays=(0, 2, 5, 10), retry_status=(400, 403, 404, 409, 412, 429, 500, 502, 503, 504)): + last = None + for delay in delays: + if delay: + time.sleep(delay) + try: + return op() + except Exception as ex: # noqa: BLE001 + last = ex + resp = getattr(ex, "response", None) + code = getattr(resp, "status_code", None) + if isinstance(ex, requests.exceptions.HTTPError) and code in retry_status: + continue + continue + if last: + raise last + + def main(): print("=" * 80) print("Dataverse SDK Walkthrough") @@ -72,7 +92,7 @@ def main(): table_name = "new_WalkthroughDemo" log_call(f"client.get_table_info('{table_name}')") - table_info = client.get_table_info(table_name) + table_info = backoff(lambda: client.get_table_info(table_name)) if table_info: print(f"✓ Table already exists: {table_info.get('table_schema_name')}") @@ -87,7 +107,7 @@ def main(): "new_Completed": "bool", "new_Priority": Priority, } - table_info = client.create_table(table_name, columns) + table_info = backoff(lambda: client.create_table(table_name, columns)) print(f"✓ Created table: {table_info.get('table_schema_name')}") print(f" Columns created: {', '.join(table_info.get('columns_created', []))}") @@ -107,7 +127,7 @@ def main(): "new_Completed": False, "new_Priority": Priority.MEDIUM, } - id1 = client.create(table_name, single_record)[0] + id1 = backoff(lambda: client.create(table_name, single_record))[0] print(f"✓ Created single record: {id1}") # Multiple create @@ -135,7 +155,7 @@ def main(): "new_Priority": Priority.HIGH, }, ] - ids = client.create(table_name, multiple_records) + ids = backoff(lambda: client.create(table_name, multiple_records)) print(f"✓ Created {len(ids)} records: {ids}") # ============================================================================ @@ -147,7 +167,7 @@ def main(): # Single read by ID log_call(f"client.get('{table_name}', '{id1}')") - record = client.get(table_name, id1) + record = backoff(lambda: client.get(table_name, id1)) print("✓ Retrieved single record:") print( json.dumps( @@ -182,13 +202,13 @@ def main(): # Single update log_call(f"client.update('{table_name}', '{id1}', {{...}})") - client.update(table_name, id1, {"new_Quantity": 100}) - updated = client.get(table_name, id1) + backoff(lambda: client.update(table_name, id1, {"new_Quantity": 100})) + updated = backoff(lambda: client.get(table_name, id1)) print(f"✓ Updated single record new_Quantity: {updated.get('new_quantity')}") # Multiple update (broadcast same change) log_call(f"client.update('{table_name}', [{len(ids)} IDs], {{...}})") - client.update(table_name, ids, {"new_Completed": True}) + backoff(lambda: client.update(table_name, ids, {"new_Completed": True})) print(f"✓ Updated {len(ids)} records to new_Completed=True") # ============================================================================ @@ -210,7 +230,7 @@ def main(): } for i in range(1, 21) ] - paging_ids = client.create(table_name, paging_records) + paging_ids = backoff(lambda: client.create(table_name, paging_records)) print(f"✓ Created {len(paging_ids)} records for paging demo") # Query with paging @@ -230,7 +250,7 @@ def main(): log_call(f"client.query_sql('SELECT new_title, new_quantity FROM {table_name} WHERE new_completed = 1')") sql = f"SELECT new_title, new_quantity FROM new_walkthroughdemo WHERE new_completed = 1" try: - results = client.query_sql(sql) + results = backoff(lambda: client.query_sql(sql)) print(f"✓ SQL query returned {len(results)} completed records:") for result in results[:5]: # Show first 5 print(f" - new_Title='{result.get('new_title')}', new_Quantity={result.get('new_quantity')}") @@ -252,8 +272,8 @@ def main(): "new_Completed": False, "new_Priority": "High", # String label instead of int } - label_id = client.create(table_name, label_record)[0] - retrieved = client.get(table_name, label_id) + label_id = backoff(lambda: client.create(table_name, label_record))[0] + retrieved = backoff(lambda: client.get(table_name, label_id)) print(f"✓ Created record with string label 'High' for new_Priority") print(f" new_Priority stored as integer: {retrieved.get('new_priority')}") print(f" new_Priority@FormattedValue: {retrieved.get('new_priority@OData.Community.Display.V1.FormattedValue')}") @@ -266,12 +286,12 @@ def main(): print("=" * 80) log_call(f"client.create_columns('{table_name}', {{'new_Notes': 'string'}})") - created_cols = client.create_columns(table_name, {"new_Notes": "string"}) + created_cols = backoff(lambda: client.create_columns(table_name, {"new_Notes": "string"})) print(f"✓ Added column: {created_cols[0]}") # Delete the column we just added log_call(f"client.delete_columns('{table_name}', ['new_Notes'])") - client.delete_columns(table_name, ["new_Notes"]) + backoff(lambda: client.delete_columns(table_name, ["new_Notes"])) print(f"✓ Deleted column: new_Notes") # ============================================================================ @@ -283,12 +303,12 @@ def main(): # Single delete log_call(f"client.delete('{table_name}', '{id1}')") - client.delete(table_name, id1) + backoff(lambda: client.delete(table_name, id1)) print(f"✓ Deleted single record: {id1}") # Multiple delete (delete the paging demo records) log_call(f"client.delete('{table_name}', [{len(paging_ids)} IDs])") - job_id = client.delete(table_name, paging_ids) + job_id = backoff(lambda: client.delete(table_name, paging_ids)) print(f"✓ Bulk delete job started: {job_id}") print(f" (Deleting {len(paging_ids)} paging demo records)") @@ -300,8 +320,15 @@ def main(): print("=" * 80) log_call(f"client.delete_table('{table_name}')") - client.delete_table(table_name) - print(f"✓ Deleted table: {table_name}") + try: + backoff(lambda: client.delete_table(table_name)) + print(f"✓ Deleted table: {table_name}") + except Exception as ex: # noqa: BLE001 + code = getattr(getattr(ex, "response", None), "status_code", None) + if isinstance(ex, requests.exceptions.HTTPError) and code == 404: + print(f"✓ Table removed: {table_name}") + else: + raise # ============================================================================ # SUMMARY diff --git a/examples/basic/functional_testing.py b/examples/basic/functional_testing.py index 886aca6..9d39eee 100644 --- a/examples/basic/functional_testing.py +++ b/examples/basic/functional_testing.py @@ -26,7 +26,7 @@ import sys import time -from typing import Optional, Dict, Any +from typing import Optional, Dict, Any, List from datetime import datetime # Import SDK components (assumes installation is already validated) @@ -73,6 +73,42 @@ def setup_authentication() -> DataverseClient: sys.exit(1) +def wait_for_table_metadata( + client: DataverseClient, + table_schema_name: str, + retries: int = 10, + delay_seconds: int = 3, +) -> Dict[str, Any]: + """Poll until table metadata is published and entity set becomes available.""" + + for attempt in range(1, retries + 1): + try: + info = client.get_table_info(table_schema_name) + if info and info.get("entity_set_name"): + # Check for PrimaryIdAttribute next, make sure it's available + # so subsequent CRUD calls do not hit a cached miss despite table_info succeeding. + odata = client._get_odata() + odata._entity_set_from_schema_name(table_schema_name) + + if attempt > 1: + print( + f" ✅ Table metadata available after {attempt} attempts." + ) + return info + except Exception: + pass + + if attempt < retries: + print( + f" ⏳ Waiting for table metadata to publish (attempt {attempt}/{retries})..." + ) + time.sleep(delay_seconds) + + raise RuntimeError( + "Table metadata did not become available in time. Please retry later." + ) + + def ensure_test_table(client: DataverseClient) -> Dict[str, Any]: """Create or verify test table exists.""" print("\n📋 Test Table Setup") @@ -109,9 +145,7 @@ def ensure_test_table(client: DataverseClient) -> Dict[str, Any]: print(f" Logical name: {table_info.get('table_logical_name')}") print(f" Entity set: {table_info.get('entity_set_name')}") - # Wait a moment for table to be ready - time.sleep(2) - return table_info + return wait_for_table_metadata(client, table_schema_name) except MetadataError as e: print(f"❌ Failed to create table: {e}") @@ -125,6 +159,8 @@ def test_create_record(client: DataverseClient, table_info: Dict[str, Any]) -> s table_schema_name = table_info.get("table_schema_name") attr_prefix = table_schema_name.split("_", 1)[0] if "_" in table_schema_name else table_schema_name + retries = 5 + delay_seconds = 3 # Create test record data test_data = { @@ -138,7 +174,21 @@ def test_create_record(client: DataverseClient, table_info: Dict[str, Any]) -> s try: print("🚀 Creating test record...") - created_ids = client.create(table_schema_name, test_data) + created_ids: Optional[List[str]] = None + for attempt in range(1, retries + 1): + try: + created_ids = client.create(table_schema_name, test_data) + if attempt > 1: + print(f" ✅ Record creation succeeded after {attempt} attempts.") + break + except HttpError as err: + if getattr(err, "status_code", None) == 404 and attempt < retries: + print( + f" ⏳ Table not ready for create (attempt {attempt}/{retries}). Retrying in {delay_seconds}s..." + ) + time.sleep(delay_seconds) + continue + raise if isinstance(created_ids, list) and created_ids: record_id = created_ids[0] @@ -165,9 +215,29 @@ def test_read_record(client: DataverseClient, table_info: Dict[str, Any], record table_schema_name = table_info.get("table_schema_name") attr_prefix = table_schema_name.split("_", 1)[0] if "_" in table_schema_name else table_schema_name + retries = 5 + delay_seconds = 3 + try: print(f"🔍 Reading record: {record_id}") - record = client.get(table_schema_name, record_id) + record = None + for attempt in range(1, retries + 1): + try: + record = client.get(table_schema_name, record_id) + if attempt > 1: + print(f" ✅ Record read succeeded after {attempt} attempts.") + break + except HttpError as err: + if getattr(err, "status_code", None) == 404 and attempt < retries: + print( + f" ⏳ Record not queryable yet (attempt {attempt}/{retries}). Retrying in {delay_seconds}s..." + ) + time.sleep(delay_seconds) + continue + raise + + if record is None: + raise RuntimeError("Record did not become available in time.") if record: print("✅ Record retrieved successfully!") @@ -203,29 +273,40 @@ def test_query_records(client: DataverseClient, table_info: Dict[str, Any]) -> N table_schema_name = table_info.get("table_schema_name") attr_prefix = table_schema_name.split("_", 1)[0] if "_" in table_schema_name else table_schema_name + retries = 5 + delay_seconds = 3 try: print("🔍 Querying records from test table...") - - # Query with filter and select - records_iterator = client.get( - table_schema_name, - select=[f"{attr_prefix}_name", f"{attr_prefix}_count", f"{attr_prefix}_amount"], - filter=f"{attr_prefix}_is_active eq true", - top=5, - orderby=[f"{attr_prefix}_name asc"], - ) - - record_count = 0 - for batch in records_iterator: - for record in batch: - record_count += 1 - name = record.get(f"{attr_prefix}_name", "N/A") - count = record.get(f"{attr_prefix}_count", "N/A") - amount = record.get(f"{attr_prefix}_amount", "N/A") - print(f" Record {record_count}: {name} (Count: {count}, Amount: {amount})") - - print(f"✅ Query completed! Found {record_count} active records.") + for attempt in range(1, retries + 1): + try: + records_iterator = client.get( + table_schema_name, + select=[f"{attr_prefix}_name", f"{attr_prefix}_count", f"{attr_prefix}_amount"], + filter=f"{attr_prefix}_is_active eq true", + top=5, + orderby=[f"{attr_prefix}_name asc"], + ) + + record_count = 0 + for batch in records_iterator: + for record in batch: + record_count += 1 + name = record.get(f"{attr_prefix}_name", "N/A") + count = record.get(f"{attr_prefix}_count", "N/A") + amount = record.get(f"{attr_prefix}_amount", "N/A") + print(f" Record {record_count}: {name} (Count: {count}, Amount: {amount})") + + print(f"✅ Query completed! Found {record_count} active records.") + break + except HttpError as err: + if getattr(err, "status_code", None) == 404 and attempt < retries: + print( + f" ⏳ Query retry {attempt}/{retries} after metadata 404 ({err}). Waiting {delay_seconds}s..." + ) + time.sleep(delay_seconds) + continue + raise except Exception as e: print(f"⚠️ Query test encountered an issue: {e}") @@ -238,16 +319,33 @@ def cleanup_test_data(client: DataverseClient, table_info: Dict[str, Any], recor print("=" * 50) table_schema_name = table_info.get("table_schema_name") + retries = 5 + delay_seconds = 3 # Ask user if they want to clean up cleanup_choice = input("Do you want to delete the test record? (y/N): ").strip().lower() if cleanup_choice in ["y", "yes"]: - try: - client.delete(table_schema_name, record_id) - print("✅ Test record deleted successfully") - except Exception as e: - print(f"⚠️ Failed to delete test record: {e}") + for attempt in range(1, retries + 1): + try: + client.delete(table_schema_name, record_id) + print("✅ Test record deleted successfully") + break + except HttpError as err: + status = getattr(err, "status_code", None) + if status == 404: + print("ℹ️ Record already deleted or not yet available; skipping.") + break + if attempt < retries: + print( + f" ⏳ Record delete retry {attempt}/{retries} after error ({err}). Waiting {delay_seconds}s..." + ) + time.sleep(delay_seconds) + continue + print(f"⚠️ Failed to delete test record: {err}") + except Exception as e: + print(f"⚠️ Failed to delete test record: {e}") + break else: print("ℹ️ Test record kept for inspection") @@ -255,15 +353,53 @@ def cleanup_test_data(client: DataverseClient, table_info: Dict[str, Any], recor table_cleanup = input("Do you want to delete the test table? (y/N): ").strip().lower() if table_cleanup in ["y", "yes"]: - try: - client.delete_table(table_info.get("table_schema_name")) - print("✅ Test table deleted successfully") - except Exception as e: - print(f"⚠️ Failed to delete test table: {e}") + for attempt in range(1, retries + 1): + try: + client.delete_table(table_info.get("table_schema_name")) + print("✅ Test table deleted successfully") + break + except HttpError as err: + status = getattr(err, "status_code", None) + if status == 404: + if _table_still_exists(client, table_info.get("table_schema_name")): + if attempt < retries: + print( + f" ⏳ Table delete retry {attempt}/{retries} after metadata 404 ({err}). Waiting {delay_seconds}s..." + ) + time.sleep(delay_seconds) + continue + print(f"⚠️ Failed to delete test table due to metadata delay: {err}") + break + print("✅ Test table deleted successfully (404 reported).") + break + if attempt < retries: + print( + f" ⏳ Table delete retry {attempt}/{retries} after error ({err}). Waiting {delay_seconds}s..." + ) + time.sleep(delay_seconds) + continue + print(f"⚠️ Failed to delete test table: {err}") + except Exception as e: + print(f"⚠️ Failed to delete test table: {e}") + break else: print("ℹ️ Test table kept for future testing") +def _table_still_exists(client: DataverseClient, table_schema_name: Optional[str]) -> bool: + if not table_schema_name: + return False + try: + info = client.get_table_info(table_schema_name) + return bool(info and info.get("entity_set_name")) + except HttpError as probe_err: + if getattr(probe_err, "status_code", None) == 404: + return False + return True + except Exception: + return True + + def main(): """Main test function.""" print("🚀 PowerPlatform Dataverse Client SDK - Advanced Functional Testing") diff --git a/src/PowerPlatform/Dataverse/data/_odata.py b/src/PowerPlatform/Dataverse/data/_odata.py index 8eda7ad..99b3546 100644 --- a/src/PowerPlatform/Dataverse/data/_odata.py +++ b/src/PowerPlatform/Dataverse/data/_odata.py @@ -993,17 +993,19 @@ def _optionset_map(self, table_schema_name: str, attr_logical: str) -> Optional[ f"{self.api}/EntityDefinitions(LogicalName='{table_schema_name_esc}')/Attributes" f"?$filter=LogicalName eq '{attr_esc}'&$select=LogicalName,AttributeType" ) - # Retry up to 3 times on 404 (new or not-yet-published attribute metadata). If still 404, raise. + # Retry on 404 (metadata not yet published) before surfacing the error. r_type = None - for attempt in range(3): + max_attempts = 5 + backoff_seconds = 0.4 + for attempt in range(1, max_attempts + 1): try: r_type = self._request("get", url_type) break except HttpError as err: if getattr(err, "status_code", None) == 404: - if attempt < 2: - # Exponential-ish backoff: 0.4s, 0.8s - time.sleep(0.4 * (2**attempt)) + if attempt < max_attempts: + # Exponential backoff: 0.4s, 0.8s, 1.6s, ... + time.sleep(backoff_seconds * (2 ** (attempt - 1))) continue raise RuntimeError( f"Picklist attribute metadata not found after retries: entity='{table_schema_name}' attribute='{attr_logical}' (404)" @@ -1029,14 +1031,14 @@ def _optionset_map(self, table_schema_name: str, attr_logical: str) -> Optional[ ) # Step 2 fetch with retries: expanded OptionSet (cast form first) r_opts = None - for attempt in range(3): + for attempt in range(1, max_attempts + 1): try: r_opts = self._request("get", cast_url) break except HttpError as err: if getattr(err, "status_code", None) == 404: - if attempt < 2: - time.sleep(0.4 * (2**attempt)) # 0.4s, 0.8s + if attempt < max_attempts: + time.sleep(backoff_seconds * (2 ** (attempt - 1))) continue raise RuntimeError( f"Picklist OptionSet metadata not found after retries: entity='{table_schema_name}' attribute='{attr_logical}' (404)" From 72096e88ad5551da6bdaf042d233a44b3adb2c72 Mon Sep 17 00:00:00 2001 From: Max Wang Date: Mon, 24 Nov 2025 15:45:13 -0800 Subject: [PATCH 2/3] update per ai comments --- examples/advanced/file_upload.py | 9 ++------- examples/advanced/walkthrough.py | 12 ++++-------- src/PowerPlatform/Dataverse/data/_odata.py | 2 +- 3 files changed, 7 insertions(+), 16 deletions(-) diff --git a/examples/advanced/file_upload.py b/examples/advanced/file_upload.py index 830a3ca..c07c1a5 100644 --- a/examples/advanced/file_upload.py +++ b/examples/advanced/file_upload.py @@ -153,7 +153,7 @@ def generate_test_pdf(size_mb: int = 10) -> Path: return test_file -def backoff(op, *, delays=(0, 2, 5, 10), retry_status=(400, 403, 404, 409, 412, 429, 500, 502, 503, 504)): +def backoff(op, *, delays=(0, 2, 5, 10)): last = None for d in delays: if d: @@ -162,11 +162,6 @@ def backoff(op, *, delays=(0, 2, 5, 10), retry_status=(400, 403, 404, 409, 412, return op() except Exception as ex: # noqa: BLE001 last = ex - r = getattr(ex, "response", None) - code = getattr(r, "status_code", None) - if isinstance(ex, requests.exceptions.HTTPError) and code in retry_status: - continue - # For non-HTTP errors just retry the schedule continue if last: raise last @@ -245,7 +240,7 @@ def ensure_file_attribute_generic(schema_name: str, label: str, key_prefix: str) } try: url = f"{odata.api}/EntityDefinitions({meta_id})/Attributes" - r = backoff(lambda: odata._request("post", url, json=payload), delays=ATTRIBUTE_VISIBILITY_DELAYS) + backoff(lambda: odata._request("post", url, json=payload), delays=ATTRIBUTE_VISIBILITY_DELAYS) print({f"{key_prefix}_file_attribute_created": True}) time.sleep(2) return True diff --git a/examples/advanced/walkthrough.py b/examples/advanced/walkthrough.py index de31b21..16ca9f3 100644 --- a/examples/advanced/walkthrough.py +++ b/examples/advanced/walkthrough.py @@ -38,19 +38,15 @@ class Priority(IntEnum): HIGH = 3 -def backoff(op, *, delays=(0, 2, 5, 10), retry_status=(400, 403, 404, 409, 412, 429, 500, 502, 503, 504)): +def backoff(op, *, delays=(0, 2, 5, 10)): last = None - for delay in delays: - if delay: - time.sleep(delay) + for d in delays: + if d: + time.sleep(d) try: return op() except Exception as ex: # noqa: BLE001 last = ex - resp = getattr(ex, "response", None) - code = getattr(resp, "status_code", None) - if isinstance(ex, requests.exceptions.HTTPError) and code in retry_status: - continue continue if last: raise last diff --git a/src/PowerPlatform/Dataverse/data/_odata.py b/src/PowerPlatform/Dataverse/data/_odata.py index 99b3546..0aa168a 100644 --- a/src/PowerPlatform/Dataverse/data/_odata.py +++ b/src/PowerPlatform/Dataverse/data/_odata.py @@ -1004,7 +1004,7 @@ def _optionset_map(self, table_schema_name: str, attr_logical: str) -> Optional[ except HttpError as err: if getattr(err, "status_code", None) == 404: if attempt < max_attempts: - # Exponential backoff: 0.4s, 0.8s, 1.6s, ... + # Exponential backoff: 0.4s, 0.8s, 1.6s, 3.2s time.sleep(backoff_seconds * (2 ** (attempt - 1))) continue raise RuntimeError( From 03ff93ed5c9428167a981c14fe19ac18fabed533 Mon Sep 17 00:00:00 2001 From: Max Wang Date: Mon, 24 Nov 2025 16:32:21 -0800 Subject: [PATCH 3/3] improvments to retry --- examples/advanced/file_upload.py | 19 +++++++++++++++++-- examples/advanced/walkthrough.py | 28 +++++++++++++++++++++++----- 2 files changed, 40 insertions(+), 7 deletions(-) diff --git a/examples/advanced/file_upload.py b/examples/advanced/file_upload.py index c07c1a5..60f9495 100644 --- a/examples/advanced/file_upload.py +++ b/examples/advanced/file_upload.py @@ -153,17 +153,32 @@ def generate_test_pdf(size_mb: int = 10) -> Path: return test_file -def backoff(op, *, delays=(0, 2, 5, 10)): +def backoff(op, *, delays=(0, 2, 5, 10, 20, 20)): last = None + total_delay = 0 + attempts = 0 for d in delays: if d: time.sleep(d) + total_delay += d + attempts += 1 try: - return op() + result = op() + if attempts > 1: + retry_count = attempts - 1 + print( + f" ↺ Backoff succeeded after {retry_count} retry(s); waited {total_delay}s total." + ) + return result except Exception as ex: # noqa: BLE001 last = ex continue if last: + if attempts: + retry_count = max(attempts - 1, 0) + print( + f" ⚠ Backoff exhausted after {retry_count} retry(s); waited {total_delay}s total." + ) raise last diff --git a/examples/advanced/walkthrough.py b/examples/advanced/walkthrough.py index 16ca9f3..7cad0e2 100644 --- a/examples/advanced/walkthrough.py +++ b/examples/advanced/walkthrough.py @@ -23,6 +23,7 @@ from enum import IntEnum from azure.identity import InteractiveBrowserCredential from PowerPlatform.Dataverse.client import DataverseClient +from PowerPlatform.Dataverse.core.errors import MetadataError import requests @@ -38,17 +39,32 @@ class Priority(IntEnum): HIGH = 3 -def backoff(op, *, delays=(0, 2, 5, 10)): +def backoff(op, *, delays=(0, 2, 5, 10, 20, 20)): last = None + total_delay = 0 + attempts = 0 for d in delays: if d: time.sleep(d) + total_delay += d + attempts += 1 try: - return op() + result = op() + if attempts > 1: + retry_count = attempts - 1 + print( + f" ↺ Backoff succeeded after {retry_count} retry(s); waited {total_delay}s total." + ) + return result except Exception as ex: # noqa: BLE001 last = ex continue if last: + if attempts: + retry_count = max(attempts - 1, 0) + print( + f" ⚠ Backoff exhausted after {retry_count} retry(s); waited {total_delay}s total." + ) raise last @@ -183,7 +199,8 @@ def main(): # Multiple read with filter log_call(f"client.get('{table_name}', filter='new_quantity gt 5')") all_records = [] - for page in client.get(table_name, filter="new_quantity gt 5"): + records_iterator = backoff(lambda: client.get(table_name, filter="new_quantity gt 5")) + for page in records_iterator: all_records.extend(page) print(f"✓ Found {len(all_records)} records with new_quantity > 5") for rec in all_records: @@ -232,7 +249,8 @@ def main(): # Query with paging log_call(f"client.get('{table_name}', page_size=5)") print("Fetching records with page_size=5...") - for page_num, page in enumerate(client.get(table_name, orderby=["new_Quantity"], page_size=5), start=1): + paging_iterator = backoff(lambda: client.get(table_name, orderby=["new_Quantity"], page_size=5)) + for page_num, page in enumerate(paging_iterator, start=1): record_ids = [r.get("new_walkthroughdemoid")[:8] + "..." for r in page] print(f" Page {page_num}: {len(page)} records - IDs: {record_ids}") @@ -321,7 +339,7 @@ def main(): print(f"✓ Deleted table: {table_name}") except Exception as ex: # noqa: BLE001 code = getattr(getattr(ex, "response", None), "status_code", None) - if isinstance(ex, requests.exceptions.HTTPError) and code == 404: + if (isinstance(ex, (requests.exceptions.HTTPError, MetadataError)) and code == 404): print(f"✓ Table removed: {table_name}") else: raise