diff --git a/README.md b/README.md index 055e31a..1d34165 100644 --- a/README.md +++ b/README.md @@ -99,11 +99,12 @@ The SDK provides a simple, pythonic interface for Dataverse operations: | Concept | Description | |---------|-------------| | **DataverseClient** | Main entry point for all operations with environment connection | -| **Records** | Dataverse records represented as Python dictionaries with logical field names | -| **Logical Names** | Use table logical names (`"account"`) and column logical names (`"name"`) | +| **Records** | Dataverse records represented as Python dictionaries with column schema names | +| **Schema names** | Use table schema names (`"account"`, `"new_MyTestTable"`) and column schema names (`"name"`, `"new_MyTestColumn"`). See: [Table definitions in Microsoft Dataverse](https://learn.microsoft.com/en-us/power-apps/developer/data-platform/entity-metadata) | | **Bulk Operations** | Efficient bulk processing for multiple records with automatic optimization | | **Paging** | Automatic handling of large result sets with iterators | | **Structured Errors** | Detailed exception hierarchy with retry guidance and diagnostic information | +| **Customization prefix values** | Custom tables and columns require a customization prefix value to be included for all operations (e.g., `"new_MyTestTable"`, not `"MyTestTable"`). See: [Table definitions in Microsoft Dataverse](https://learn.microsoft.com/en-us/power-apps/developer/data-platform/entity-metadata) | ## Examples @@ -175,40 +176,75 @@ for record in results: print(record["name"]) # OData query with paging +# Note: filter and expand parameters are case sensitive pages = client.get( "account", - select=["accountid", "name"], - filter="statecode eq 0", + select=["accountid", "name"], # select is case-insensitive (automatically lowercased) + filter="statecode eq 0", # filter must use lowercase logical names (not transformed) top=100 ) for page in pages: for record in page: print(record["name"]) + +# Query with navigation property expansion (case-sensitive!) +pages = client.get( + "account", + select=["name"], + expand=["primarycontactid"], # Navigation property names are case-sensitive + filter="statecode eq 0" # Column names must be lowercase logical names +) +for page in pages: + for account in page: + contact = account.get("primarycontactid", {}) + print(f"{account['name']} - Contact: {contact.get('fullname', 'N/A')}") ``` +> **Important**: When using `filter` and `expand` parameters: +> - **`filter`**: Column names must use exact lowercase logical names (e.g., `"statecode eq 0"`, not `"StateCode eq 0"`) +> - **`expand`**: Navigation property names are case-sensitive and must match the exact server names +> - **`select`** and **`orderby`**: Case-insensitive; automatically converted to lowercase + ### Table management ```python -# Create a custom table -table_info = client.create_table("Product", { - "code": "string", - "price": "decimal", - "active": "bool" +# Create a custom table, including the customization prefix value in the schema names for the table and columns. +table_info = client.create_table("new_Product", { + "new_Code": "string", + "new_Price": "decimal", + "new_Active": "bool" }) -# Add columns to existing table -client.create_columns("Product", {"category": "string"}) +# Create with custom primary column name and solution assignment +table_info = client.create_table( + table_schema_name="new_Product", + columns={ + "new_Code": "string", + "new_Price": "decimal" + }, + solution_unique_name="MyPublisher", # Optional: add to specific solution + primary_column_schema_name="new_ProductName" # Optional: custom primary column (default is "{customization prefix value}_Name") +) + +# Add columns to existing table (columns must include customization prefix value) +client.create_columns("new_Product", {"new_Category": "string"}) + +# Remove columns +client.delete_columns("new_Product", ["new_Category"]) # Clean up -client.delete_table("Product") +client.delete_table("new_Product") ``` +> **Important**: All custom column names must include the customization prefix value (e.g., `"new_"`). +> This ensures explicit, predictable naming and aligns with Dataverse metadata requirements. + ### File operations ```python # Upload a file to a record client.upload_file( - logical_name="account", + table_schema_name="account", record_id=account_id, file_name_attribute="new_document", path="/path/to/document.pdf" diff --git a/examples/advanced/complete_walkthrough.py b/examples/advanced/complete_walkthrough.py deleted file mode 100644 index 948fced..0000000 --- a/examples/advanced/complete_walkthrough.py +++ /dev/null @@ -1,686 +0,0 @@ -# Copyright (c) Microsoft Corporation. -# Licensed under the MIT license. - -""" -PowerPlatform Dataverse Client - Complete SDK Walkthrough - -This comprehensive example demonstrates advanced usage of the PowerPlatform-Dataverse-Client SDK -including all major features and production-ready patterns. - -Features Demonstrated: -- Authentication setup and connection management -- Table creation with custom schemas and enums -- Single and bulk record operations (CRUD) -- Advanced querying with SQL and OData -- Paging and batch processing -- Column metadata management -- Multi-language label support -- Error handling and retry patterns -- Interactive cleanup options - -Prerequisites: - pip install PowerPlatform-Dataverse-Client - pip install azure-identity - -For local development, you can also run from source by uncommenting the sys.path line below. - -Note: This is a comprehensive demonstration. For basic installation validation, - use examples/basic/installation_example.py first. -""" - -import sys -from pathlib import Path -import os -from typing import Optional - -# Uncomment for local development from source -# sys.path.append(str(Path(__file__).resolve().parents[2] / "src")) - -from PowerPlatform.Dataverse import DataverseClient -from PowerPlatform.Dataverse.core.errors import MetadataError -from enum import IntEnum -from azure.identity import InteractiveBrowserCredential -import traceback -import requests -import time -from datetime import date, timedelta - - -entered = input("Enter Dataverse org URL (e.g. https://yourorg.crm.dynamics.com): ").strip() -if not entered: - print("No URL entered; exiting.") - sys.exit(1) - -base_url = entered.rstrip('/') -delete_choice = input("Delete the new_SampleItem table at end? (Y/n): ").strip() or "y" -delete_table_at_end = (str(delete_choice).lower() in ("y", "yes", "true", "1")) -# Ask once whether to pause between steps during this run -pause_choice = input("Pause between test steps? (y/N): ").strip() or "n" -pause_between_steps = (str(pause_choice).lower() in ("y", "yes", "true", "1")) -# Create a credential we can reuse (for DataverseClient) -credential = InteractiveBrowserCredential() -client = DataverseClient(base_url=base_url, credential=credential) - -# Small helpers: call logging and step pauses -def log_call(call: str) -> None: - print({"call": call}) - -def pause(next_step: str) -> None: - if pause_between_steps: - try: - input(f"\nNext: {next_step} — press Enter to continue...") - except EOFError: - # If stdin is not available, just proceed - pass - -# Small generic backoff helper used only in this quickstart -# Include common transient statuses like 429/5xx to improve resilience. -def backoff_retry(op, *, delays=(0, 2, 5, 10, 20), retry_http_statuses=(400, 403, 404, 409, 412, 429, 500, 502, 503, 504), retry_if=None): - last_exc = None - for delay in delays: - if delay: - time.sleep(delay) - try: - return op() - except Exception as ex: - print(f'Request failed: {ex}') - last_exc = ex - if retry_if and retry_if(ex): - print("Retrying operation...") - continue - if isinstance(ex, requests.exceptions.HTTPError): - code = getattr(getattr(ex, 'response', None), 'status_code', None) - if code in retry_http_statuses: - print("Retrying operation...") - continue - break - if last_exc: - raise last_exc - -# Enum demonstrating local option set creation with multilingual labels (for French labels to work, enable French language in the environment first) -class Status(IntEnum): - Active = 1 - Inactive = 2 - Archived = 5 - __labels__ = { - 1033: { - "Active": "Active", - "Inactive": "Inactive", - "Archived": "Archived", - }, - 1036: { - "Active": "Actif", - "Inactive": "Inactif", - "Archived": "Archivé", - } - } - -print("Ensure custom table exists (Metadata):") -table_info = None -created_this_run = False - -# Check for existing table using list_tables -log_call("client.list_tables()") -tables = client.list_tables() -existing_table = next((t for t in tables if t.get("SchemaName") == "new_SampleItem"), None) -if existing_table: - table_info = client.get_table_info("new_SampleItem") - created_this_run = False - print({ - "table": table_info.get("entity_schema"), - "existed": True, - "entity_set": table_info.get("entity_set_name"), - "logical": table_info.get("entity_logical_name"), - "metadata_id": table_info.get("metadata_id"), - }) - -else: - # Create it since it doesn't exist - try: - log_call("client.create_table('new_SampleItem', schema={code,count,amount,when,active,status})") - table_info = client.create_table( - "new_SampleItem", - { - "code": "string", - "count": "int", - "amount": "decimal", - "when": "datetime", - "active": "bool", - "status": Status, - }, - ) - created_this_run = True if table_info and table_info.get("columns_created") else False - print({ - "table": table_info.get("entity_schema") if table_info else None, - "existed": False, - "entity_set": table_info.get("entity_set_name") if table_info else None, - "logical": table_info.get("entity_logical_name") if table_info else None, - "metadata_id": table_info.get("metadata_id") if table_info else None, - }) - except Exception as e: - # Print full stack trace and any HTTP response details if present - print("Create table failed:") - traceback.print_exc() - resp = getattr(e, 'response', None) - if resp is not None: - try: - print({ - "status": resp.status_code, - "url": getattr(resp, 'url', None), - "body": resp.text[:2000] if getattr(resp, 'text', None) else None, - }) - except Exception: - pass - # Fail fast: all operations must use the custom table - sys.exit(1) -entity_schema = table_info.get("entity_schema") or "new_SampleItem" -logical = table_info.get("entity_logical_name") -metadata_id = table_info.get("metadata_id") -if not metadata_id: - refreshed_info = client.get_table_info(entity_schema) or {} - metadata_id = refreshed_info.get("metadata_id") - if metadata_id: - table_info["metadata_id"] = metadata_id - -# Derive attribute logical name prefix from the entity logical name (segment before first underscore) -attr_prefix = logical.split("_", 1)[0] if "_" in logical else logical -code_key = f"{attr_prefix}_code" -count_key = f"{attr_prefix}_count" -amount_key = f"{attr_prefix}_amount" -when_key = f"{attr_prefix}_when" -status_key = f"{attr_prefix}_status" -id_key = f"{logical}id" - -def summary_from_record(rec: dict) -> dict: - return { - "code": rec.get(code_key), - "count": rec.get(count_key), - "amount": rec.get(amount_key), - "when": rec.get(when_key), - } - -def print_line_summaries(label: str, summaries: list[dict]) -> None: - print(label) - for s in summaries: - print( - f" - id={s.get('id')} code={s.get('code')} " - f"count={s.get('count')} amount={s.get('amount')} when={s.get('when')}" - ) - -def _has_installed_language(base_url: str, credential, lcid: int) -> bool: - try: - token = credential.get_token(f"{base_url}/.default").token - url = f"{base_url}/api/data/v9.2/RetrieveAvailableLanguages()" - headers = {"Authorization": f"Bearer {token}", "Accept": "application/json"} - resp = requests.get(url, headers=headers, timeout=15) - if not resp.ok: - return False - data = resp.json() if resp.content else {} - langs: list[int] = [] - for val in data.values(): - if isinstance(val, list) and val and all(isinstance(x, int) for x in val): - langs = val - break - print({"lang_check": {"endpoint": url, "status": resp.status_code, "found": langs, "using": lcid in langs}}) - return lcid in langs - except Exception: - return False - -# if French language (1036) is installed, we use labels in both English and French -use_french_labels = _has_installed_language(base_url, credential, 1036) -if use_french_labels: - print({"labels_language": "fr", "note": "French labels in use."}) -else: - print({"labels_language": "en", "note": "Using English (and numeric values)."}) - -# 2) Create a record in the new table -print("Create records (OData) demonstrating single create and bound CreateMultiple (multi):") - -# Define base payloads -single_payload = { - f"{attr_prefix}_name": "Sample A", - code_key: "X001", - count_key: 42, - amount_key: 123.45, - when_key: "2025-01-01", - f"{attr_prefix}_active": True, - status_key: ("Actif" if use_french_labels else Status.Active.value), -} -# Generate multiple payloads -# Distribution update: roughly one-third English labels, one-third French labels, one-third raw integer values. -# We cycle per record: index % 3 == 1 -> English label, == 2 -> French label (if available, else English), == 0 -> integer value. -multi_payloads: list[dict] = [] -base_date = date(2025, 1, 2) -# Fixed 6-step cycle pattern encapsulated in helper: Active, Inactive, Actif, Inactif, 1, 2 (repeat) -def _status_value_for_index(idx: int, use_french: bool): - pattern = [ - ("label", "Active"), - ("label", "Inactive"), - ("fr_label", "Actif"), - ("fr_label", "Inactif"), - ("int", Status.Active.value), - ("int", Status.Inactive.value), - ] - kind, raw = pattern[(idx - 1) % len(pattern)] - if kind == "label": - return raw - if kind == "fr_label": - if use_french: - return raw - return "Active" if raw == "Actif" else "Inactive" - return raw - -for i in range(1, 16): - multi_payloads.append({ - f"{attr_prefix}_name": f"Sample {i:02d}", - code_key: f"X{200 + i:03d}", - count_key: 5 * i, - amount_key: round(10.0 * i, 2), - when_key: (base_date + timedelta(days=i - 1)).isoformat(), - f"{attr_prefix}_active": True, - status_key: _status_value_for_index(i, use_french_labels), - }) - -record_ids: list[str] = [] - -try: - # Single create returns list[str] (length 1) - log_call(f"client.create('{logical}', single_payload)") - single_ids = backoff_retry(lambda: client.create(logical, single_payload)) - if not (isinstance(single_ids, list) and len(single_ids) == 1): - raise RuntimeError("Unexpected single create return shape (expected one-element list)") - record_ids.extend(single_ids) - - # Multi create returns list[str] - log_call(f"client.create('{logical}', multi_payloads)") - multi_ids = backoff_retry(lambda: client.create(logical, multi_payloads)) - if isinstance(multi_ids, list): - record_ids.extend([mid for mid in multi_ids if isinstance(mid, str)]) - else: - print({"multi_unexpected_type": type(multi_ids).__name__, "value_preview": str(multi_ids)[:300]}) - - print({"entity": logical, "created_ids": record_ids}) - print_line_summaries("Created record summaries (IDs only; representation not fetched):", [{"id": rid} for rid in record_ids[:1]]) -except Exception as e: - # Surface detailed info for debugging (especially multi-create failures) - print(f"Create failed: {e}") - resp = getattr(e, 'response', None) - if resp is not None: - try: - print({ - 'status': resp.status_code, - 'url': getattr(resp, 'url', None), - 'body': resp.text[:2000] if getattr(resp, 'text', None) else None, - 'headers': {k: v for k, v in getattr(resp, 'headers', {}).items() if k.lower() in ('request-id','activityid','dataverse-instanceversion','content-type')} - }) - except Exception: - pass - sys.exit(1) - -pause("Next: Read record") - -# 3) Read record via OData -print("Read (OData):") -try: - if record_ids: - # Read only the first record and move on - target = record_ids[0] - log_call(f"client.get('{logical}', '{target}')") - rec = backoff_retry(lambda: client.get(logical, target)) - print_line_summaries("Read record summary:", [{"id": target, **summary_from_record(rec)}]) - else: - raise RuntimeError("No record created; skipping read.") -except Exception as e: - print(f"Get failed: {e}") -# 3.5) Update record, then read again and verify -print("Update (OData) and verify:") -# Show what will be updated and planned update calls, then pause -try: - if not record_ids: - raise RuntimeError("No record created; skipping update.") - - update_data = { - f"{attr_prefix}_code": "X002", - f"{attr_prefix}_count": 99, - f"{attr_prefix}_amount": 543.21, - f"{attr_prefix}_when": "2025-02-02", - f"{attr_prefix}_active": False, - status_key: ("Inactif" if use_french_labels else Status.Inactive.value), - } - expected_checks = { - f"{attr_prefix}_code": "X002", - f"{attr_prefix}_count": 99, - f"{attr_prefix}_active": False, - status_key: Status.Inactive.value, - } - amount_key = f"{attr_prefix}_amount" - - # Describe what is changing - print( - { - "updating_to": { - code_key: update_data[code_key], - count_key: update_data[count_key], - amount_key: update_data[amount_key], - when_key: update_data[when_key], - } - } - ) - - # Choose a single target to update to keep other records different - target_id = record_ids[0] - pause("Execute Update") - - # Update only the chosen record and summarize - log_call(f"client.update('{logical}', '{target_id}', update_data)") - # Perform update (returns None); follow-up read to verify - backoff_retry(lambda: client.update(logical, target_id, update_data)) - verify_rec = backoff_retry(lambda: client.get(logical, target_id)) - for k, v in expected_checks.items(): - assert verify_rec.get(k) == v, f"Field {k} expected {v}, got {verify_rec.get(k)}" - got = verify_rec.get(amount_key) - got_f = float(got) if got is not None else None - assert got_f is not None and abs(got_f - 543.21) < 1e-6, f"Field {amount_key} expected 543.21, got {got}" - print({"entity": logical, "updated": True}) - print_line_summaries("Updated record summary:", [{"id": target_id, **summary_from_record(verify_rec)}]) -except Exception as e: - print(f"Update/verify failed: {e}") - sys.exit(1) - -# 3.6) Bulk update (UpdateMultiple) demo: update count field on up to first 5 remaining records -print("Bulk update (UpdateMultiple) demo:") -try: - if len(record_ids) > 1: - # Prepare a small subset to update (skip the first already updated one) - subset = record_ids[1:6] - bulk_updates = [] - for idx, rid in enumerate(subset, start=1): - # Simple deterministic changes so user can observe - bulk_updates.append({ - id_key: rid, - count_key: 100 + idx, # new count values - }) - log_call(f"client.update('{logical}', <{len(bulk_updates)} ids>, )") - # Unified update handles multiple via list of patches (returns None) - backoff_retry(lambda: client.update(logical, subset, bulk_updates)) - print({"bulk_update_requested": len(bulk_updates), "bulk_update_completed": True}) - # Verify the updated count values by refetching the subset - verification = [] - # Small delay to reduce risk of any brief replication delay - time.sleep(1) - for rid in subset: - rec = backoff_retry(lambda rid=rid: client.get(logical, rid)) - verification.append({ - "id": rid, - "count": rec.get(count_key), - }) - print({"bulk_update_verification": verification}) - else: - print({"bulk_update_skipped": True, "reason": "not enough records"}) -except Exception as e: - print(f"Bulk update failed: {e}") - -# 4) Query records via SQL (?sql parameter)) -print("Query (SQL via ?sql query parameter):") -try: - import time - pause("Execute SQL Query") - - def _run_query(): - cols = f"{id_key}, {code_key}, {amount_key}, {when_key}" - query = f"SELECT TOP 2 {cols} FROM {logical} ORDER BY {attr_prefix}_amount DESC" - log_call(f"client.query_sql(\"{query}\") (Web API ?sql=)") - return client.query_sql(query) - - def _retry_if(ex: Exception) -> bool: - msg = str(ex) if ex else "" - return ("Invalid table name" in msg) or ("Invalid object name" in msg) - - rows = backoff_retry(_run_query, delays=(0, 2, 5), retry_http_statuses=(), retry_if=_retry_if) - id_key = f"{logical}id" - ids = [r.get(id_key) for r in rows if isinstance(r, dict) and r.get(id_key)] - print({"entity": logical, "rows": len(rows) if isinstance(rows, list) else 0, "ids": ids}) - record_summaries = [] - for row in rows if isinstance(rows, list) else []: - record_summaries.append( - { - "id": row.get(id_key), - "code": row.get(code_key), - "count": row.get(count_key), - "amount": row.get(amount_key), - "when": row.get(when_key), - } - ) - print_line_summaries("SQL record summaries (top 2 by amount):", record_summaries) -except Exception as e: - print(f"SQL query failed: {e}") - -# Pause between SQL query and retrieve-multiple demos -pause("Retrieve multiple (OData paging demos)") - -# 4.5) Retrieve multiple via OData paging (scenarios) -def run_paging_demo(label: str, *, top: Optional[int], page_size: Optional[int]) -> None: - print("") - print({"paging_demo": label, "top": top, "page_size": page_size}) - total = 0 - page_index = 0 - _select = [id_key, code_key, amount_key, when_key, status_key] - _orderby = [f"{code_key} asc"] - for page in client.get( - logical, - select=_select, - filter=None, - orderby=_orderby, - top=top, - expand=None, - page_size=page_size, - ): - page_index += 1 - total += len(page) - print({ - "page": page_index, - "page_size": len(page), - "sample": [ - { - "id": r.get(id_key), - "code": r.get(code_key), - "amount": r.get(amount_key), - "when": r.get(when_key), - "status": r.get(status_key), - } - for r in page[:5] - ], - }) - print({"paging_demo_done": label, "pages": page_index, "total_rows": total}) - print("") - -print("") -print("==============================") -print("Retrieve multiple (OData paging demos)") -print("==============================") -try: - # 1) Tiny page size, no top: force multiple pages - run_paging_demo("page_size=2 (no top)", top=None, page_size=2) - pause("Next paging demo: top=3, page_size=2") - - # 2) Limit total results while keeping small pages - run_paging_demo("top=3, page_size=2", top=3, page_size=2) - pause("Next paging demo: top=2 (default page size)") - - # 3) Limit total results with default server page size (likely one page) - run_paging_demo("top=2 (default page size)", top=2, page_size=None) -except Exception as e: - print(f"Retrieve multiple demos failed: {e}") -# 5) Delete record -print("Delete (OData):") -# Show deletes to be executed (single + bulk) -if 'record_ids' in locals() and record_ids: - print({"delete_count": len(record_ids)}) -pause("Execute Delete (single then bulk)") -try: - if record_ids: - single_target = record_ids[0] - rest_targets = record_ids[1:] - single_error: Optional[str] = None - bulk_job_id: Optional[str] = None - bulk_error: Optional[str] = None - - try: - log_call(f"client.delete('{logical}', '{single_target}')") - backoff_retry(lambda: client.delete(logical, single_target)) - except Exception as ex: - single_error = str(ex) - - half = max(1, len(rest_targets) // 2) - bulk_targets = rest_targets[:half] - sequential_targets = rest_targets[half:] - bulk_error = None - sequential_error = None - - # Fire-and-forget bulk delete for the first portion - try: - log_call(f"client.delete('{logical}', <{len(bulk_targets)} ids>, use_bulk_delete=True)") - bulk_job_id = client.delete(logical, bulk_targets) - except Exception as ex: - bulk_error = str(ex) - - # Sequential deletes for the remainder - try: - log_call(f"client.delete('{logical}', <{len(sequential_targets)} ids>, use_bulk_delete=False)") - for rid in sequential_targets: - backoff_retry(lambda rid=rid: client.delete(logical, rid, use_bulk_delete=False)) - except Exception as ex: - sequential_error = str(ex) - - print({ - "entity": logical, - "delete_single": { - "id": single_target, - "error": single_error, - }, - "delete_bulk": { - "count": len(bulk_targets), - "job_id": bulk_job_id, - "error": bulk_error, - }, - "delete_sequential": { - "count": len(sequential_targets), - "error": sequential_error, - }, - }) - else: - raise RuntimeError("No record created; skipping delete.") -except Exception as e: - print(f"Delete failed: {e}") - -pause("Next: column metadata helpers") - -# 6) Column metadata helpers: column create/delete -print("Column metadata helpers (create/delete column):") -scratch_column = f"scratch_{int(time.time())}" -column_payload = {scratch_column: "string"} -try: - log_call(f"client.create_column('{entity_schema}', {repr(column_payload)})") - column_create = client.create_columns(entity_schema, column_payload) - if not isinstance(column_create, list) or not column_create: - raise RuntimeError("create_column did not return schema list") - created_details = column_create - if not all(isinstance(item, str) for item in created_details): - raise RuntimeError("create_column entries were not schema strings") - attribute_schema = created_details[0] - odata_client = client._get_odata() - exists_after_create = None - exists_after_delete = None - attr_type_before = None - if metadata_id and attribute_schema: - _ready_message = "Column metadata not yet available" - def _metadata_after_create(): - meta = odata_client._get_attribute_metadata( - metadata_id, - attribute_schema, - extra_select="@odata.type,AttributeType", - ) - if not meta or not meta.get("MetadataId"): - raise RuntimeError(_ready_message) - return meta - - ready_meta = backoff_retry( - _metadata_after_create, - delays=(0, 1, 2, 4, 8), - retry_http_statuses=(), - retry_if=lambda exc: isinstance(exc, RuntimeError) and str(exc) == _ready_message, - ) - exists_after_create = bool(ready_meta) - raw_type = ready_meta.get("@odata.type") or ready_meta.get("AttributeType") - if isinstance(raw_type, str): - attr_type_before = raw_type - lowered = raw_type.lower() - delete_target = attribute_schema or scratch_column - log_call(f"client.delete_column('{entity_schema}', '{delete_target}')") - - def _delete_column(): - return client.delete_columns(entity_schema, delete_target) - - column_delete = backoff_retry( - _delete_column, - delays=(0, 1, 2, 4, 8), - retry_http_statuses=(), - retry_if=lambda exc: ( - isinstance(exc, MetadataError) - or "not found" in str(exc).lower() - or "not yet available" in str(exc).lower() - ), - ) - if not isinstance(column_delete, list) or not column_delete: - raise RuntimeError("delete_column did not return schema list") - deleted_details = column_delete - if not all(isinstance(item, str) for item in deleted_details): - raise RuntimeError("delete_column entries were not schema strings") - if attribute_schema not in deleted_details: - raise RuntimeError("delete_column response missing expected schema name") - if metadata_id and attribute_schema: - _delete_message = "Column metadata still present after delete" - def _ensure_removed(): - meta = odata_client._get_attribute_metadata(metadata_id, attribute_schema) - if meta: - raise RuntimeError(_delete_message) - return True - - removed = backoff_retry( - _ensure_removed, - delays=(0, 1, 2, 4, 8), - retry_http_statuses=(), - retry_if=lambda exc: isinstance(exc, RuntimeError) and str(exc) == _delete_message, - ) - exists_after_delete = not removed - print({ - "created_column": scratch_column, - "create_summary": created_details, - "delete_summary": deleted_details, - "attribute_type_before_delete": attr_type_before, - "exists_after_create": exists_after_create, - "exists_after_delete": exists_after_delete, - }) -except MetadataError as meta_err: - print({"column_metadata_error": str(meta_err)}) -except Exception as exc: - print({"column_metadata_unexpected": str(exc)}) - -pause("Next: Cleanup table") - -# 7) Cleanup: delete the custom table if it exists -print("Cleanup (Metadata):") -if delete_table_at_end: - try: - log_call("client.get_table_info('new_SampleItem')") - info = client.get_table_info("new_SampleItem") - if info: - log_call("client.delete_table('new_SampleItem')") - client.delete_table("new_SampleItem") - print({"table_deleted": True}) - else: - print({"table_deleted": False, "reason": "not found"}) - except Exception as e: - print(f"Delete table failed: {e}") -else: - print({"table_deleted": False, "reason": "user opted to keep table"}) diff --git a/examples/advanced/file_upload.py b/examples/advanced/file_upload.py index 63e9f06..24a5851 100644 --- a/examples/advanced/file_upload.py +++ b/examples/advanced/file_upload.py @@ -166,7 +166,6 @@ def backoff(op, *, delays=(0,2,5,10), retry_status=(400,403,404,409,412,429,500, # --------------------------- Table ensure --------------------------- TABLE_SCHEMA_NAME = "new_FileSample" -# If user wants new publisher prefix / naming, adjust above. def ensure_table(): # Check by schema @@ -174,8 +173,8 @@ def ensure_table(): if existing: print({"table": TABLE_SCHEMA_NAME, "existed": True}) return existing - log("client.create_table('new_FileSample', schema={title})") - info = client.create_table(TABLE_SCHEMA_NAME, {"title": "string"}) + log("client.create_table('new_FileSample', schema={'new_Title': 'string'})") + info = client.create_table(TABLE_SCHEMA_NAME, {"new_Title": "string"}) print({"table": TABLE_SCHEMA_NAME, "existed": False, "metadata_id": info.get('metadata_id')}) return info @@ -187,8 +186,8 @@ def ensure_table(): sys.exit(1) entity_set = table_info.get("entity_set_name") -logical = table_info.get("entity_logical_name") or entity_set.rstrip("s") -attr_prefix = logical.split('_',1)[0] if '_' in logical else logical +table_schema_name = table_info.get("table_schema_name") +attr_prefix = table_schema_name.split('_',1)[0] if '_' in table_schema_name else table_schema_name name_attr = f"{attr_prefix}_name" small_file_attr_schema = f"{attr_prefix}_SmallDocument" # second file attribute for small single-request demo small_file_attr_logical = f"{attr_prefix}_smalldocument" # expected logical name (lowercase) @@ -258,13 +257,13 @@ def ensure_file_attribute_generic(schema_name: str, label: str, key_prefix: str) record_id = None try: payload = {name_attr: "File Sample Record"} - log(f"client.create('{logical}', payload)") - created_ids = backoff(lambda: client.create(logical, payload)) + log(f"client.create('{table_schema_name}', payload)") + created_ids = backoff(lambda: client.create(table_schema_name, payload)) if isinstance(created_ids, list) and created_ids: record_id = created_ids[0] else: raise RuntimeError("Unexpected create return; expected list[str] with at least one GUID") - print({"record_created": True, "id": record_id, "logical": logical}) + print({"record_created": True, "id": record_id, "table schema name": table_schema_name}) except Exception as e: # noqa: BLE001 print({"record_created": False, "error": str(e)}) sys.exit(1) @@ -295,7 +294,7 @@ def get_dataset_info(file_path: Path): try: DATASET_FILE, small_file_size, src_hash = get_dataset_info(_GENERATED_TEST_FILE) backoff(lambda: client.upload_file( - logical, + table_schema_name, record_id, small_file_attr_logical, str(DATASET_FILE), @@ -322,7 +321,7 @@ def get_dataset_info(file_path: Path): print("Small single-request upload demo - REPLACE with 8MB file:") replacement_file, replace_size_small, replace_hash_small = get_dataset_info(_GENERATED_TEST_FILE_8MB) backoff(lambda: client.upload_file( - logical, + table_schema_name, record_id, small_file_attr_logical, str(replacement_file), @@ -350,7 +349,7 @@ def get_dataset_info(file_path: Path): try: DATASET_FILE, src_size_chunk, src_hash_chunk = get_dataset_info(_GENERATED_TEST_FILE) backoff(lambda: client.upload_file( - logical, + table_schema_name, record_id, chunk_file_attr_logical, str(DATASET_FILE), @@ -376,7 +375,7 @@ def get_dataset_info(file_path: Path): print("Streaming chunk upload demo - REPLACE with 8MB file:") replacement_file, replace_size_chunk, replace_hash_chunk = get_dataset_info(_GENERATED_TEST_FILE_8MB) backoff(lambda: client.upload_file( - logical, + table_schema_name, record_id, chunk_file_attr_logical, str(replacement_file), @@ -401,8 +400,8 @@ def get_dataset_info(file_path: Path): # --------------------------- Cleanup --------------------------- if cleanup_record and record_id: try: - log(f"client.delete('{entity_set}', '{record_id}')") - backoff(lambda: client.delete(entity_set, record_id)) + log(f"client.delete('{table_schema_name}', '{record_id}')") + backoff(lambda: client.delete(table_schema_name, record_id)) print({"record_deleted": True}) except Exception as e: # noqa: BLE001 print({"record_deleted": False, "error": str(e)}) diff --git a/examples/advanced/pandas_integration.py b/examples/advanced/pandas_integration.py index fdd3a86..fc62e06 100644 --- a/examples/advanced/pandas_integration.py +++ b/examples/advanced/pandas_integration.py @@ -69,15 +69,15 @@ def backoff_retry(op, *, delays=(0, 2, 5, 10, 20), retry_http_statuses=(400, 403 created_this_run = False # First check for existing table -existing = client.get_table_info("SampleItem") +existing = client.get_table_info("new_SampleItem") if existing: table_info = existing created_this_run = False print({ - "table": table_info.get("entity_schema"), + "table": table_info.get("table_schema_name"), "existed": True, "entity_set": table_info.get("entity_set_name"), - "logical": table_info.get("entity_logical_name"), + "logical": table_info.get("table_logical_name"), "metadata_id": table_info.get("metadata_id"), }) @@ -85,21 +85,21 @@ def backoff_retry(op, *, delays=(0, 2, 5, 10, 20), retry_http_statuses=(400, 403 # Create it since it doesn't exist try: table_info = client.create_table( - "SampleItem", + "new_SampleItem", { - "code": "string", - "count": "int", - "amount": "decimal", - "when": "datetime", - "active": "bool", + "new_Code": "string", + "new_Count": "int", + "new_Amount": "decimal", + "new_When": "datetime", + "new_Active": "bool", }, ) created_this_run = True if table_info and table_info.get("columns_created") else False print({ - "table": table_info.get("entity_schema") if table_info else None, + "table": table_info.get("table_schema_name") if table_info else None, "existed": False, "entity_set": table_info.get("entity_set_name") if table_info else None, - "logical": table_info.get("entity_logical_name") if table_info else None, + "logical": table_info.get("table_logical_name") if table_info else None, "metadata_id": table_info.get("metadata_id") if table_info else None, }) except Exception as e: @@ -119,7 +119,7 @@ def backoff_retry(op, *, delays=(0, 2, 5, 10, 20), retry_http_statuses=(400, 403 # Fail fast: all operations must use the custom table sys.exit(1) -logical = table_info.get("entity_logical_name") +logical = table_info.get("table_logical_name") # Derive attribute logical name prefix from the entity logical name attr_prefix = logical.split("_", 1)[0] if "_" in logical else logical record_data = { @@ -232,9 +232,9 @@ def _retry_if(ex: Exception) -> bool: print("Cleanup (Metadata):") try: # Delete if present, regardless of whether it was created in this run - info = client.get_table_info("SampleItem") + info = client.get_table_info("new_SampleItem") if info: - client.delete_table("SampleItem") + client.delete_table("new_SampleItem") print({"table_deleted": True}) else: print({"table_deleted": False, "reason": "not found"}) diff --git a/examples/advanced/walkthrough.py b/examples/advanced/walkthrough.py new file mode 100644 index 0000000..5311592 --- /dev/null +++ b/examples/advanced/walkthrough.py @@ -0,0 +1,322 @@ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT license. + +""" +Walkthrough demonstrating core Dataverse SDK operations. + +This example shows: +- Table creation with various column types including enums +- Single and multiple record CRUD operations +- Querying with filtering, paging, and SQL +- Picklist label-to-value conversion +- Column management +- Cleanup + +Prerequisites: +- pip install PowerPlatform-Dataverse-Client +- pip install azure-identity +""" + +import sys +import json +from enum import IntEnum +from azure.identity import InteractiveBrowserCredential +from PowerPlatform.Dataverse.client import DataverseClient + + +# Simple logging helper +def log_call(description): + print(f"\n→ {description}") + + +# Define enum for priority picklist +class Priority(IntEnum): + LOW = 1 + MEDIUM = 2 + HIGH = 3 + + +def main(): + print("=" * 80) + print("Dataverse SDK Walkthrough") + print("=" * 80) + + # ============================================================================ + # 1. SETUP & AUTHENTICATION + # ============================================================================ + print("\n" + "=" * 80) + print("1. Setup & Authentication") + print("=" * 80) + + base_url = input("Enter Dataverse org URL (e.g. https://yourorg.crm.dynamics.com): ").strip() + if not base_url: + print("No URL entered; exiting.") + sys.exit(1) + + base_url = base_url.rstrip('/') + + log_call("InteractiveBrowserCredential()") + credential = InteractiveBrowserCredential() + + log_call(f"DataverseClient(base_url='{base_url}', credential=...)") + client = DataverseClient(base_url=base_url, credential=credential) + print(f"✓ Connected to: {base_url}") + + # ============================================================================ + # 2. TABLE CREATION (METADATA) + # ============================================================================ + print("\n" + "=" * 80) + print("2. Table Creation (Metadata)") + print("=" * 80) + + table_name = "new_WalkthroughDemo" + + log_call(f"client.get_table_info('{table_name}')") + table_info = client.get_table_info(table_name) + + if table_info: + print(f"✓ Table already exists: {table_info.get('table_schema_name')}") + print(f" Logical Name: {table_info.get('table_logical_name')}") + print(f" Entity Set: {table_info.get('entity_set_name')}") + else: + log_call(f"client.create_table('{table_name}', schema={{...}})") + columns = { + "new_Title": "string", + "new_Quantity": "int", + "new_Amount": "decimal", + "new_Completed": "bool", + "new_Priority": Priority + } + table_info = client.create_table(table_name, columns) + print(f"✓ Created table: {table_info.get('table_schema_name')}") + print(f" Columns created: {', '.join(table_info.get('columns_created', []))}") + + # ============================================================================ + # 3. CREATE OPERATIONS + # ============================================================================ + print("\n" + "=" * 80) + print("3. Create Operations") + print("=" * 80) + + # Single create + log_call(f"client.create('{table_name}', {{...}})") + single_record = { + "new_Title": "Complete project documentation", + "new_Quantity": 5, + "new_Amount": 1250.50, + "new_Completed": False, + "new_Priority": Priority.MEDIUM + } + id1 = client.create(table_name, single_record)[0] + print(f"✓ Created single record: {id1}") + + # Multiple create + log_call(f"client.create('{table_name}', [{{...}}, {{...}}, {{...}}])") + multiple_records = [ + { + "new_Title": "Review code changes", + "new_Quantity": 10, + "new_Amount": 500.00, + "new_Completed": True, + "new_Priority": Priority.HIGH + }, + { + "new_Title": "Update test cases", + "new_Quantity": 8, + "new_Amount": 750.25, + "new_Completed": False, + "new_Priority": Priority.LOW + }, + { + "new_Title": "Deploy to staging", + "new_Quantity": 3, + "new_Amount": 2000.00, + "new_Completed": False, + "new_Priority": Priority.HIGH + } + ] + ids = client.create(table_name, multiple_records) + print(f"✓ Created {len(ids)} records: {ids}") + + # ============================================================================ + # 4. READ OPERATIONS + # ============================================================================ + print("\n" + "=" * 80) + print("4. Read Operations") + print("=" * 80) + + # Single read by ID + log_call(f"client.get('{table_name}', '{id1}')") + record = client.get(table_name, id1) + print("✓ Retrieved single record:") + print(json.dumps({ + "new_walkthroughdemoid": record.get("new_walkthroughdemoid"), + "new_title": record.get("new_title"), + "new_quantity": record.get("new_quantity"), + "new_amount": record.get("new_amount"), + "new_completed": record.get("new_completed"), + "new_priority": record.get("new_priority"), + "new_priority@FormattedValue": record.get("new_priority@OData.Community.Display.V1.FormattedValue") + }, indent=2)) + + # Multiple read with filter + log_call(f"client.get('{table_name}', filter='new_quantity gt 5')") + all_records = [] + for page in client.get(table_name, filter="new_quantity gt 5"): + all_records.extend(page) + print(f"✓ Found {len(all_records)} records with new_quantity > 5") + for rec in all_records: + print(f" - new_Title='{rec.get('new_title')}', new_Quantity={rec.get('new_quantity')}") + + # ============================================================================ + # 5. UPDATE OPERATIONS + # ============================================================================ + print("\n" + "=" * 80) + print("5. Update Operations") + print("=" * 80) + + # Single update + log_call(f"client.update('{table_name}', '{id1}', {{...}})") + client.update(table_name, id1, {"new_Quantity": 100}) + updated = client.get(table_name, id1) + print(f"✓ Updated single record new_Quantity: {updated.get('new_quantity')}") + + # Multiple update (broadcast same change) + log_call(f"client.update('{table_name}', [{len(ids)} IDs], {{...}})") + client.update(table_name, ids, {"new_Completed": True}) + print(f"✓ Updated {len(ids)} records to new_Completed=True") + + # ============================================================================ + # 6. PAGING DEMO + # ============================================================================ + print("\n" + "=" * 80) + print("6. Paging Demo") + print("=" * 80) + + # Create 20 records for paging + log_call(f"client.create('{table_name}', [20 records])") + paging_records = [ + { + "new_Title": f"Paging test item {i}", + "new_Quantity": i, + "new_Amount": i * 10.0, + "new_Completed": False, + "new_Priority": Priority.LOW + } + for i in range(1, 21) + ] + paging_ids = client.create(table_name, paging_records) + print(f"✓ Created {len(paging_ids)} records for paging demo") + + # Query with paging + log_call(f"client.get('{table_name}', page_size=5)") + print("Fetching records with page_size=5...") + for page_num, page in enumerate(client.get(table_name, orderby=["new_Quantity"], page_size=5), start=1): + record_ids = [r.get('new_walkthroughdemoid')[:8] + "..." for r in page] + print(f" Page {page_num}: {len(page)} records - IDs: {record_ids}") + + # ============================================================================ + # 7. SQL QUERY + # ============================================================================ + print("\n" + "=" * 80) + print("7. SQL Query") + print("=" * 80) + + log_call(f"client.query_sql('SELECT new_title, new_quantity FROM {table_name} WHERE new_completed = 1')") + sql = f"SELECT new_title, new_quantity FROM new_walkthroughdemo WHERE new_completed = 1" + try: + results = client.query_sql(sql) + print(f"✓ SQL query returned {len(results)} completed records:") + for result in results[:5]: # Show first 5 + print(f" - new_Title='{result.get('new_title')}', new_Quantity={result.get('new_quantity')}") + except Exception as e: + print(f"⚠ SQL query failed (known server-side bug): {str(e)}") + + # ============================================================================ + # 8. PICKLIST LABEL CONVERSION + # ============================================================================ + print("\n" + "=" * 80) + print("8. Picklist Label Conversion") + print("=" * 80) + + log_call(f"client.create('{table_name}', {{'new_Priority': 'High'}})") + label_record = { + "new_Title": "Test label conversion", + "new_Quantity": 1, + "new_Amount": 99.99, + "new_Completed": False, + "new_Priority": "High" # String label instead of int + } + label_id = client.create(table_name, label_record)[0] + retrieved = client.get(table_name, label_id) + print(f"✓ Created record with string label 'High' for new_Priority") + print(f" new_Priority stored as integer: {retrieved.get('new_priority')}") + print(f" new_Priority@FormattedValue: {retrieved.get('new_priority@OData.Community.Display.V1.FormattedValue')}") + + # ============================================================================ + # 9. COLUMN MANAGEMENT + # ============================================================================ + print("\n" + "=" * 80) + print("9. Column Management") + print("=" * 80) + + log_call(f"client.create_columns('{table_name}', {{'new_Notes': 'string'}})") + created_cols = client.create_columns(table_name, {"new_Notes": "string"}) + print(f"✓ Added column: {created_cols[0]}") + + # Delete the column we just added + log_call(f"client.delete_columns('{table_name}', ['new_Notes'])") + client.delete_columns(table_name, ["new_Notes"]) + print(f"✓ Deleted column: new_Notes") + + # ============================================================================ + # 10. DELETE OPERATIONS + # ============================================================================ + print("\n" + "=" * 80) + print("10. Delete Operations") + print("=" * 80) + + # Single delete + log_call(f"client.delete('{table_name}', '{id1}')") + client.delete(table_name, id1) + print(f"✓ Deleted single record: {id1}") + + # Multiple delete (delete the paging demo records) + log_call(f"client.delete('{table_name}', [{len(paging_ids)} IDs])") + job_id = client.delete(table_name, paging_ids) + print(f"✓ Bulk delete job started: {job_id}") + print(f" (Deleting {len(paging_ids)} paging demo records)") + + # ============================================================================ + # 11. CLEANUP + # ============================================================================ + print("\n" + "=" * 80) + print("11. Cleanup") + print("=" * 80) + + log_call(f"client.delete_table('{table_name}')") + client.delete_table(table_name) + print(f"✓ Deleted table: {table_name}") + + # ============================================================================ + # SUMMARY + # ============================================================================ + print("\n" + "=" * 80) + print("Walkthrough Complete!") + print("=" * 80) + print("\nDemonstrated operations:") + print(" ✓ Table creation with multiple column types") + print(" ✓ Single and multiple record creation") + print(" ✓ Reading records by ID and with filters") + print(" ✓ Single and multiple record updates") + print(" ✓ Paging through large result sets") + print(" ✓ SQL queries") + print(" ✓ Picklist label-to-value conversion") + print(" ✓ Column management") + print(" ✓ Single and bulk delete operations") + print(" ✓ Table cleanup") + print("=" * 80) + + +if __name__ == "__main__": + main() diff --git a/examples/basic/functional_testing.py b/examples/basic/functional_testing.py index dd5fdf5..ea4b727 100644 --- a/examples/basic/functional_testing.py +++ b/examples/basic/functional_testing.py @@ -78,35 +78,36 @@ def ensure_test_table(client: DataverseClient) -> Dict[str, Any]: print("\n📋 Test Table Setup") print("=" * 50) - table_schema = "TestSDKFunctionality" + table_schema_name = "test_TestSDKFunctionality" try: # Check if table already exists - existing_table = client.get_table_info(table_schema) + existing_table = client.get_table_info(table_schema_name) if existing_table: - print(f"✅ Test table '{table_schema}' already exists") + print(f"✅ Test table '{table_schema_name}' already exists") return existing_table except Exception: - print(f"📝 Table '{table_schema}' not found, creating...") + print(f"📝 Table '{table_schema_name}' not found, creating...") try: print("🔨 Creating new test table...") # Create the test table with various field types table_info = client.create_table( - table_schema, + table_schema_name, + primary_column_schema_name="test_name", + columns= { - "name": "string", # Primary name field - "description": "string", # Description field - "count": "int", # Integer field - "amount": "decimal", # Decimal field - "is_active": "bool", # Boolean field - "created_date": "datetime" # DateTime field + "test_description": "string", # Description field + "test_count": "int", # Integer field + "test_amount": "decimal", # Decimal field + "test_is_active": "bool", # Boolean field + "test_created_date": "datetime" # DateTime field } ) - print(f"✅ Created test table: {table_info.get('entity_schema')}") - print(f" Logical name: {table_info.get('entity_logical_name')}") + print(f"✅ Created test table: {table_info.get('table_schema_name')}") + print(f" Logical name: {table_info.get('table_logical_name')}") print(f" Entity set: {table_info.get('entity_set_name')}") # Wait a moment for table to be ready @@ -122,9 +123,9 @@ def test_create_record(client: DataverseClient, table_info: Dict[str, Any]) -> s """Test record creation.""" print("\n📝 Record Creation Test") print("=" * 50) - - logical_name = table_info.get("entity_logical_name") - attr_prefix = logical_name.split("_", 1)[0] if "_" in logical_name else logical_name + + table_schema_name = table_info.get("table_schema_name") + attr_prefix = table_schema_name.split("_", 1)[0] if "_" in table_schema_name else table_schema_name # Create test record data test_data = { @@ -138,7 +139,7 @@ def test_create_record(client: DataverseClient, table_info: Dict[str, Any]) -> s try: print("🚀 Creating test record...") - created_ids = client.create(logical_name, test_data) + created_ids = client.create(table_schema_name, test_data) if isinstance(created_ids, list) and created_ids: record_id = created_ids[0] @@ -162,12 +163,12 @@ def test_read_record(client: DataverseClient, table_info: Dict[str, Any], record print("\n📖 Record Reading Test") print("=" * 50) - logical_name = table_info.get("entity_logical_name") - attr_prefix = logical_name.split("_", 1)[0] if "_" in logical_name else logical_name + table_schema_name = table_info.get("table_schema_name") + attr_prefix = table_schema_name.split("_", 1)[0] if "_" in table_schema_name else table_schema_name try: print(f"🔍 Reading record: {record_id}") - record = client.get(logical_name, record_id) + record = client.get(table_schema_name, record_id) if record: print("✅ Record retrieved successfully!") @@ -197,15 +198,15 @@ def test_query_records(client: DataverseClient, table_info: Dict[str, Any]) -> N print("\n🔍 Record Query Test") print("=" * 50) - logical_name = table_info.get("entity_logical_name") - attr_prefix = logical_name.split("_", 1)[0] if "_" in logical_name else logical_name + table_schema_name = table_info.get("table_schema_name") + attr_prefix = table_schema_name.split("_", 1)[0] if "_" in table_schema_name else table_schema_name try: print("🔍 Querying records from test table...") # Query with filter and select records_iterator = client.get( - logical_name, + table_schema_name, select=[f"{attr_prefix}_name", f"{attr_prefix}_count", f"{attr_prefix}_amount"], filter=f"{attr_prefix}_is_active eq true", top=5, @@ -233,14 +234,14 @@ def cleanup_test_data(client: DataverseClient, table_info: Dict[str, Any], recor print("\n🧹 Cleanup") print("=" * 50) - logical_name = table_info.get("entity_logical_name") + table_schema_name = table_info.get("table_schema_name") # Ask user if they want to clean up cleanup_choice = input("Do you want to delete the test record? (y/N): ").strip().lower() if cleanup_choice in ['y', 'yes']: try: - client.delete(logical_name, record_id) + client.delete(table_schema_name, record_id) print("✅ Test record deleted successfully") except Exception as e: print(f"⚠️ Failed to delete test record: {e}") @@ -252,7 +253,7 @@ def cleanup_test_data(client: DataverseClient, table_info: Dict[str, Any], recor if table_cleanup in ['y', 'yes']: try: - client.delete_table(table_info.get("entity_schema")) + client.delete_table(table_info.get("table_schema_name")) print("✅ Test table deleted successfully") except Exception as e: print(f"⚠️ Failed to delete test table: {e}") diff --git a/examples/basic/installation_example.py b/examples/basic/installation_example.py index ef47e79..1974a64 100644 --- a/examples/basic/installation_example.py +++ b/examples/basic/installation_example.py @@ -226,7 +226,7 @@ def show_usage_examples(): # Get table information info = client.get_table_info("CustomEntity") -print(f"Table: {info['entity_schema']}") +print(f"Table: {info['table_schema_name']}") # List all tables tables = client.list_tables() diff --git a/src/PowerPlatform/Dataverse/client.py b/src/PowerPlatform/Dataverse/client.py index 9307077..ef06d24 100644 --- a/src/PowerPlatform/Dataverse/client.py +++ b/src/PowerPlatform/Dataverse/client.py @@ -100,14 +100,14 @@ def _get_odata(self) -> ODataClient: return self._odata # ---------------- Unified CRUD: create/update/delete ---------------- - def create(self, logical_name: str, records: Union[Dict[str, Any], List[Dict[str, Any]]]) -> List[str]: + def create(self, table_schema_name: str, records: Union[Dict[str, Any], List[Dict[str, Any]]]) -> List[str]: """ - Create one or more records by logical (singular) entity name. + Create one or more records by table name. - :param logical_name: Logical (singular) entity name, e.g. ``"account"`` or ``"contact"``. - :type logical_name: ``str`` + :param table_schema_name: Schema name of the table (e.g. ``"account"``, ``"contact"``, or ``"new_MyTestTable"``). + :type table_schema_name: ``str`` :param records: A single record dictionary or a list of record dictionaries. - Each dictionary should contain attribute logical names as keys. + Each dictionary should contain column schema names as keys. :type records: ``dict`` or ``list[dict]`` :return: List of created record GUIDs. Returns a single-element list for a single input. @@ -133,21 +133,21 @@ def create(self, logical_name: str, records: Union[Dict[str, Any], List[Dict[str print(f"Created {len(ids)} accounts") """ od = self._get_odata() - entity_set = od._entity_set_from_logical(logical_name) + entity_set = od._entity_set_from_schema_name(table_schema_name) if isinstance(records, dict): - rid = od._create(entity_set, logical_name, records) + rid = od._create(entity_set, table_schema_name, records) # _create returns str on single input if not isinstance(rid, str): raise TypeError("_create (single) did not return GUID string") return [rid] if isinstance(records, list): - ids = od._create_multiple(entity_set, logical_name, records) + ids = od._create_multiple(entity_set, table_schema_name, records) if not isinstance(ids, list) or not all(isinstance(x, str) for x in ids): raise TypeError("_create (multi) did not return list[str]") return ids raise TypeError("records must be dict or list[dict]") - def update(self, logical_name: str, ids: Union[str, List[str]], changes: Union[Dict[str, Any], List[Dict[str, Any]]]) -> None: + def update(self, table_schema_name: str, ids: Union[str, List[str]], changes: Union[Dict[str, Any], List[Dict[str, Any]]]) -> None: """ Update one or more records. @@ -157,8 +157,8 @@ def update(self, logical_name: str, ids: Union[str, List[str]], changes: Union[D 2. Broadcast update: ``update("account", [id1, id2], {"status": 1})`` - applies same changes to all IDs 3. Paired updates: ``update("account", [id1, id2], [changes1, changes2])`` - one-to-one mapping - :param logical_name: Logical (singular) entity name, e.g. ``"account"``. - :type logical_name: ``str`` + :param table_schema_name: Schema name of the table (e.g. ``"account"`` or ``"new_MyTestTable"``). + :type table_schema_name: ``str`` :param ids: Single GUID string or list of GUID strings to update. :type ids: ``str`` or ``list[str]`` :param changes: Dictionary of changes for single/broadcast mode, or list of dictionaries @@ -194,24 +194,24 @@ def update(self, logical_name: str, ids: Union[str, List[str]], changes: Union[D if isinstance(ids, str): if not isinstance(changes, dict): raise TypeError("For single id, changes must be a dict") - od._update(logical_name, ids, changes) # discard representation + od._update(table_schema_name, ids, changes) # discard representation return None if not isinstance(ids, list): raise TypeError("ids must be str or list[str]") - od._update_by_ids(logical_name, ids, changes) + od._update_by_ids(table_schema_name, ids, changes) return None def delete( self, - logical_name: str, + table_schema_name: str, ids: Union[str, List[str]], use_bulk_delete: bool = True, ) -> Optional[str]: """ Delete one or more records by GUID. - :param logical_name: Logical (singular) entity name, e.g. ``"account"``. - :type logical_name: ``str`` + :param table_schema_name: Schema name of the table (e.g. ``"account"`` or ``"new_MyTestTable"``). + :type table_schema_name: ``str`` :param ids: Single GUID string or list of GUID strings to delete. :type ids: ``str`` or ``list[str]`` :param use_bulk_delete: When ``True`` (default) and ``ids`` is a list, execute the BulkDelete action and @@ -235,7 +235,7 @@ def delete( """ od = self._get_odata() if isinstance(ids, str): - od._delete(logical_name, ids) + od._delete(table_schema_name, ids) return None if not isinstance(ids, list): raise TypeError("ids must be str or list[str]") @@ -244,14 +244,14 @@ def delete( if not all(isinstance(rid, str) for rid in ids): raise TypeError("ids must contain string GUIDs") if use_bulk_delete: - return od._delete_multiple(logical_name, ids) + return od._delete_multiple(table_schema_name, ids) for rid in ids: - od._delete(logical_name, rid) + od._delete(table_schema_name, rid) return None def get( self, - logical_name: str, + table_schema_name: str, record_id: Optional[str] = None, select: Optional[List[str]] = None, filter: Optional[str] = None, @@ -266,19 +266,19 @@ def get( When ``record_id`` is provided, returns a single record dictionary. When ``record_id`` is None, returns a generator yielding batches of records. - :param logical_name: Logical (singular) entity name, e.g. ``"account"``. - :type logical_name: ``str`` + :param table_schema_name: Schema name of the table (e.g. ``"account"`` or ``"new_MyTestTable"``). + :type table_schema_name: ``str`` :param record_id: Optional GUID to fetch a specific record. If None, queries multiple records. :type record_id: ``str`` or ``None`` - :param select: Optional list of attribute logical names to retrieve. + :param select: Optional list of attribute logical names to retrieve. Column names are case-insensitive and automatically lowercased (e.g. ``["new_Title", "new_Amount"]`` becomes ``"new_title,new_amount"``). :type select: ``list[str]`` or ``None`` - :param filter: Optional OData filter string, e.g. ``"name eq 'Contoso'"``. + :param filter: Optional OData filter string, e.g. ``"name eq 'Contoso'"`` or ``"new_quantity gt 5"``. Column names in filter expressions must use exact lowercase logical names (e.g. ``"new_quantity"``, not ``"new_Quantity"``). The filter string is passed directly to the Dataverse Web API without transformation. :type filter: ``str`` or ``None`` - :param orderby: Optional list of attributes to sort by, e.g. ``["name asc", "createdon desc"]``. + :param orderby: Optional list of attributes to sort by, e.g. ``["name asc", "createdon desc"]``. Column names are automatically lowercased. :type orderby: ``list[str]`` or ``None`` :param top: Optional maximum number of records to return. :type top: ``int`` or ``None`` - :param expand: Optional list of navigation properties to expand. + :param expand: Optional list of navigation properties to expand, e.g. ``["primarycontactid"]``. Navigation property names are case-sensitive and must match the server-defined names exactly. These are NOT automatically transformed. Consult entity metadata for correct casing. :type expand: ``list[str]`` or ``None`` :param page_size: Optional number of records per page for pagination. :type page_size: ``int`` or ``None`` @@ -295,12 +295,27 @@ def get( record = client.get("account", record_id=account_id, select=["name", "telephone1"]) print(record["name"]) - Query multiple records with filtering:: + Query multiple records with filtering (note: exact logical names in filter):: - for batch in client.get("account", filter="name eq 'Contoso'", select=["name"]): + for batch in client.get( + "account", + filter="statecode eq 0 and name eq 'Contoso'", # Must use exact logical names (lower-case) + select=["name", "telephone1"] + ): for account in batch: print(account["name"]) + Query with navigation property expansion (note: case-sensitive property name):: + + for batch in client.get( + "account", + select=["name"], + expand=["primarycontactid"], # Case-sensitive! Check metadata for exact name + filter="statecode eq 0" + ): + for account in batch: + print(f"{account['name']} - Contact: {account.get('primarycontactid', {}).get('fullname')}") + Query with sorting and pagination:: for batch in client.get( @@ -316,12 +331,12 @@ def get( if not isinstance(record_id, str): raise TypeError("record_id must be str") return od._get( - logical_name, + table_schema_name, record_id, select=select, ) return od._get_multiple( - logical_name, + table_schema_name, select=select, filter=filter, orderby=orderby, @@ -367,43 +382,41 @@ def query_sql(self, sql: str): return self._get_odata()._query_sql(sql) # Table metadata helpers - def get_table_info(self, tablename: str) -> Optional[Dict[str, Any]]: + def get_table_info(self, table_schema_name: str) -> Optional[Dict[str, Any]]: """ - Get basic metadata for a custom table if it exists. + Get basic metadata for a table if it exists. - :param tablename: Table friendly name (e.g. ``"SampleItem"``) or full schema name - (e.g. ``"new_SampleItem"``). - :type tablename: ``str`` + :param table_schema_name: Schema name of the table (e.g. ``"new_MyTestTable"`` or ``"account"``). + :type table_schema_name: ``str`` - :return: Dictionary containing table metadata with keys ``entity_schema``, - ``entity_logical_name``, ``entity_set_name``, and ``metadata_id``. + :return: Dictionary containing table metadata with keys ``table_schema_name``, + ``table_logical_name``, ``entity_set_name``, and ``metadata_id``. Returns None if the table is not found. :rtype: ``dict`` or ``None`` Example: Retrieve table metadata:: - info = client.get_table_info("SampleItem") + info = client.get_table_info("new_MyTestTable") if info: - print(f"Logical name: {info['entity_logical_name']}") + print(f"Logical name: {info['table_logical_name']}") print(f"Entity set: {info['entity_set_name']}") """ - return self._get_odata()._get_table_info(tablename) + return self._get_odata()._get_table_info(table_schema_name) def create_table( self, - tablename: str, - schema: Dict[str, Any], + table_schema_name: str, + columns: Dict[str, Any], solution_unique_name: Optional[str] = None, + primary_column_schema_name: Optional[str] = None, ) -> Dict[str, Any]: """ Create a simple custom table with specified columns. - :param tablename: Table friendly name (e.g. ``"SampleItem"``) or full schema name - (e.g. ``"new_SampleItem"``). If a publisher prefix is not included, the default - publisher prefix will be applied. - :type tablename: ``str`` - :param schema: Dictionary mapping column logical names (without prefix) to their types. + :param table_schema_name: Schema name of the table with customization prefix value (e.g. ``"new_MyTestTable"``). + :type table_schema_name: ``str`` + :param columns: Dictionary mapping column names (with customization prefix value) to their types. All custom column names must include the customization prefix value (e.g. ``"new_Title"``). Supported types: - Primitive types: ``"string"``, ``"int"``, ``"decimal"``, ``"float"``, ``"datetime"``, ``"bool"`` @@ -418,13 +431,14 @@ class ItemStatus(IntEnum): 1036: {"Active": "Actif", "Inactive": "Inactif"} } - :type schema: ``dict[str, Any]`` - :param solution_unique_name: Optional solution unique name that should own the new table. - When omitted the table is created in the default solution. + :type columns: dict[str, Any] + :param solution_unique_name: Optional solution unique name that should own the new table. When omitted the table is created in the default solution. :type solution_unique_name: ``str`` or ``None`` + :param primary_column_schema_name: Optional primary name column schema name with customization prefix value (e.g. ``"new_MyTestTable"``). If not provided, defaults to ``"{customization prefix value}_Name"``. + :type primary_column_schema_name: ``str`` or ``None`` - :return: Dictionary containing table metadata including ``entity_schema``, - ``entity_set_name``, ``entity_logical_name``, ``metadata_id``, and ``columns_created``. + :return: Dictionary containing table metadata including ``table_schema_name``, + ``entity_set_name``, ``table_logical_name``, ``metadata_id``, and ``columns_created``. :rtype: ``dict`` :raises ~PowerPlatform.Dataverse.core.errors.MetadataError: If table creation fails or the schema is invalid. @@ -438,31 +452,39 @@ class ItemStatus(IntEnum): ACTIVE = 1 INACTIVE = 2 - schema = { - "title": "string", - "quantity": "int", - "price": "decimal", - "available": "bool", - "status": ItemStatus + columns = { + "new_Title": "string", # Note: includes 'new_' customization prefix value + "new_Quantity": "int", + "new_Price": "decimal", + "new_Available": "bool", + "new_Status": ItemStatus } - result = client.create_table("SampleItem", schema) - print(f"Created table: {result['entity_logical_name']}") + result = client.create_table("new_MyTestTable", columns) + print(f"Created table: {result['table_schema_name']}") print(f"Columns: {result['columns_created']}") + + Create a table with a custom primary column name:: + + result = client.create_table( + "new_Product", + {"new_Price": "decimal"}, + primary_column_schema_name="new_ProductName" + ) """ return self._get_odata()._create_table( - tablename, - schema, + table_schema_name, + columns, solution_unique_name, + primary_column_schema_name, ) - def delete_table(self, tablename: str) -> None: + def delete_table(self, table_schema_name: str) -> None: """ Delete a custom table by name. - :param tablename: Table friendly name (e.g. ``"SampleItem"``) or full schema name - (e.g. ``"new_SampleItem"``). - :type tablename: ``str`` + :param table_schema_name: Schema name of the table (e.g. ``"new_MyTestTable"`` or ``"account"``). + :type table_schema_name: ``str`` :raises ~PowerPlatform.Dataverse.core.errors.MetadataError: If the table does not exist or deletion fails. @@ -473,9 +495,9 @@ def delete_table(self, tablename: str) -> None: Example: Delete a custom table:: - client.delete_table("SampleItem") + client.delete_table("new_MyTestTable") """ - self._get_odata()._delete_table(tablename) + self._get_odata()._delete_table(table_schema_name) def list_tables(self) -> list[str]: """ @@ -495,15 +517,15 @@ def list_tables(self) -> list[str]: def create_columns( self, - tablename: str, + table_schema_name: str, columns: Dict[str, Any], ) -> List[str]: """ Create one or more columns on an existing table using a schema-style mapping. - :param tablename: Friendly name ("SampleItem") or full schema name ("new_SampleItem"). - :type tablename: ``str`` - :param columns: Mapping of logical names (without prefix) to supported types. Primitive types include + :param table_schema_name: Schema name of the table (e.g. ``"new_MyTestTable"``). + :type table_schema_name: ``str`` + :param columns: Mapping of column schema names (with customization prefix value) to supported types. All custom column names must include the customization prefix value** (e.g. ``"new_Notes"``). Primitive types include ``string``, ``int``, ``decimal``, ``float``, ``datetime``, and ``bool``. Enum subclasses (IntEnum preferred) generate a local option set and can specify localized labels via ``__labels__``. :type columns: ``Dict[str, Any]`` @@ -513,31 +535,30 @@ def create_columns( Create two columns on the custom table:: created = client.create_columns( - "new_SampleItem", + "new_MyTestTable", { - "scratch": "string", - "flags": "bool", + "new_Scratch": "string", + "new_Flags": "bool", }, ) - print(created) + print(created) # ['new_Scratch', 'new_Flags'] """ return self._get_odata()._create_columns( - tablename, + table_schema_name, columns, ) def delete_columns( self, - tablename: str, + table_schema_name: str, columns: Union[str, List[str]], ) -> List[str]: """ Delete one or more columns from a table. - :param tablename: Friendly or schema name of the table. - :type tablename: ``str`` - :param columns: Column name or list of column names to remove. Friendly names are normalized to schema - names using the same prefix logic as ``create_columns``. + :param table_schema_name: Schema name of the table (e.g. ``"new_MyTestTable"``). + :type table_schema_name: ``str`` + :param columns: Column name or list of column names to remove. Must include customization prefix value (e.g. ``"new_TestColumn"``). :type columns: ``str`` | ``list[str]`` :returns: Schema names for the columns that were removed. :rtype: ``list[str]`` @@ -545,20 +566,20 @@ def delete_columns( Remove two custom columns by schema name: removed = client.delete_columns( - "new_SampleItem", + "new_MyTestTable", ["new_Scratch", "new_Flags"], ) - print(removed) + print(removed) # ['new_Scratch', 'new_Flags'] """ return self._get_odata()._delete_columns( - tablename, + table_schema_name, columns, ) # File upload def upload_file( self, - logical_name: str, + table_schema_name: str, record_id: str, file_name_attribute: str, path: str, @@ -569,8 +590,8 @@ def upload_file( """ Upload a file to a Dataverse file column. - :param logical_name: Singular logical table name, e.g. ``"account"``. - :type logical_name: ``str`` + :param table_schema_name: Schema name of the table, e.g. ``"account"`` or ``"new_MyTestTable"``. + :type table_schema_name: ``str`` :param record_id: GUID of the target record. :type record_id: ``str`` :param file_name_attribute: Logical name of the file column attribute. @@ -600,7 +621,7 @@ def upload_file( Upload a PDF file:: client.upload_file( - logical_name="account", + table_schema_name="account", record_id=account_id, file_name_attribute="new_contract", path="/path/to/contract.pdf", @@ -610,7 +631,7 @@ def upload_file( Upload with auto mode selection:: client.upload_file( - logical_name="email", + table_schema_name="email", record_id=email_id, file_name_attribute="new_attachment", path="/path/to/large_file.zip", @@ -618,7 +639,7 @@ def upload_file( ) """ od = self._get_odata() - entity_set = od._entity_set_from_logical(logical_name) + entity_set = od._entity_set_from_schema_name(table_schema_name) od.upload_file( entity_set, record_id, diff --git a/src/PowerPlatform/Dataverse/data/odata.py b/src/PowerPlatform/Dataverse/data/odata.py index cfeb721..385497e 100644 --- a/src/PowerPlatform/Dataverse/data/odata.py +++ b/src/PowerPlatform/Dataverse/data/odata.py @@ -45,6 +45,32 @@ def _escape_odata_quotes(value: str) -> str: """Escape single quotes for OData queries (by doubling them).""" return value.replace("'", "''") + @staticmethod + def _normalize_cache_key(table_schema_name: str) -> str: + """Normalize table_schema_name to lowercase for case-insensitive cache keys.""" + return table_schema_name.lower() if isinstance(table_schema_name, str) else "" + + @staticmethod + def _lowercase_keys(record: Dict[str, Any]) -> Dict[str, Any]: + """Convert all dictionary keys to lowercase for case-insensitive column names. + + Dataverse LogicalNames for attributes are stored lowercase, but users may + provide PascalCase names (matching SchemaName). This normalizes the input. + """ + if not isinstance(record, dict): + return record + return {k.lower() if isinstance(k, str) else k: v for k, v in record.items()} + + @staticmethod + def _lowercase_list(items: Optional[List[str]]) -> Optional[List[str]]: + """Convert all strings in a list to lowercase for case-insensitive column names. + + Used for $select, $orderby, $expand parameters where column names must be lowercase. + """ + if not items: + return items + return [item.lower() if isinstance(item, str) else item for item in items] + def __init__( self, auth, @@ -74,11 +100,11 @@ def __init__( backoff=self.config.http_backoff, timeout=self.config.http_timeout, ) - # Cache: logical name -> entity set name (plural) resolved from metadata + # Cache: normalized table_schema_name (lowercase) -> entity set name (plural) resolved from metadata self._logical_to_entityset_cache: dict[str, str] = {} - # Cache: logical name -> primary id attribute (e.g. accountid) + # Cache: normalized table_schema_name (lowercase) -> primary id attribute (e.g. accountid) self._logical_primaryid_cache: dict[str, str] = {} - # Picklist label cache: (logical_name, attribute_logical) -> {'map': {...}, 'ts': epoch_seconds} + # Picklist label cache: (normalized_table_schema_name, normalized_attribute) -> {'map': {...}, 'ts': epoch_seconds} self._picklist_label_cache = {} self._picklist_cache_ttl_seconds = 3600 # 1 hour TTL @@ -158,13 +184,13 @@ def _request(self, method: str, url: str, *, expected: tuple[int, ...] = (200, 2 ) # --- CRUD Internal functions --- - def _create(self, entity_set: str, logical_name: str, record: Dict[str, Any]) -> str: + def _create(self, entity_set: str, table_schema_name: str, record: Dict[str, Any]) -> str: """Create a single record and return its GUID. :param entity_set: Resolved entity set (plural) name. :type entity_set: ``str`` - :param logical_name: Singular logical entity name. - :type logical_name: ``str`` + :param table_schema_name: Schema name of the table. + :type table_schema_name: ``str`` :param record: Attribute payload mapped by logical column names. :type record: ``dict[str, Any]`` @@ -174,7 +200,9 @@ def _create(self, entity_set: str, logical_name: str, record: Dict[str, Any]) -> .. note:: Relies on ``OData-EntityId`` (canonical) or ``Location`` response header. No response body parsing is performed. Raises ``RuntimeError`` if neither header contains a GUID. """ - record = self._convert_labels_to_ints(logical_name, record) + # Lowercase all keys to match Dataverse LogicalName expectations + record = self._lowercase_keys(record) + record = self._convert_labels_to_ints(table_schema_name, record) url = f"{self.api}/{entity_set}" r = self._request("post", url, json=record) @@ -193,28 +221,32 @@ def _create(self, entity_set: str, logical_name: str, record: Dict[str, Any]) -> f"Create response missing GUID in OData-EntityId/Location headers (status={getattr(r,'status_code', '?')}). Headers: {header_keys}" ) - def _create_multiple(self, entity_set: str, logical_name: str, records: List[Dict[str, Any]]) -> List[str]: + def _create_multiple(self, entity_set: str, table_schema_name: str, records: List[Dict[str, Any]]) -> List[str]: """Create multiple records using the collection-bound ``CreateMultiple`` action. :param entity_set: Resolved entity set (plural) name. :type entity_set: ``str`` - :param logical_name: Singular logical entity name. - :type logical_name: ``str`` - :param records: Payload dictionaries mapped by logical attribute names. + :param table_schema_name: Schema name of the table. + :type table_schema_name: ``str`` + :param records: Payload dictionaries mapped by column schema names. :type records: ``list[dict[str, Any]]`` :return: List of created record GUIDs (may be empty if response lacks IDs). :rtype: ``list[str]`` .. note:: - Logical type stamping: if any payload omits ``@odata.type`` the client injects ``Microsoft.Dynamics.CRM.``. If all payloads already include ``@odata.type`` no modification occurs. + Logical type stamping: if any payload omits ``@odata.type`` the client injects ``Microsoft.Dynamics.CRM.``. If all payloads already include ``@odata.type`` no modification occurs. """ if not all(isinstance(r, dict) for r in records): raise TypeError("All items for multi-create must be dicts") need_logical = any("@odata.type" not in r for r in records) + # @odata.type uses LogicalName (lowercase) + logical_name = table_schema_name.lower() enriched: List[Dict[str, Any]] = [] for r in records: - r = self._convert_labels_to_ints(logical_name, r) + # Lowercase all keys to match Dataverse LogicalName expectations + r = self._lowercase_keys(r) + r = self._convert_labels_to_ints(table_schema_name, r) if "@odata.type" in r or not need_logical: enriched.append(r) else: @@ -252,25 +284,26 @@ def _create_multiple(self, entity_set: str, logical_name: str, records: List[Dic return [] # --- Derived helpers for high-level client ergonomics --- - def _primary_id_attr(self, logical_name: str) -> str: + def _primary_id_attr(self, table_schema_name: str) -> str: """Return primary key attribute using metadata; error if unavailable.""" - pid = self._logical_primaryid_cache.get(logical_name) + cache_key = self._normalize_cache_key(table_schema_name) + pid = self._logical_primaryid_cache.get(cache_key) if pid: return pid - # Resolve metadata (populates _logical_primaryid_cache or raises if logical unknown) - self._entity_set_from_logical(logical_name) - pid2 = self._logical_primaryid_cache.get(logical_name) + # Resolve metadata (populates _logical_primaryid_cache or raises if table_schema_name unknown) + self._entity_set_from_schema_name(table_schema_name) + pid2 = self._logical_primaryid_cache.get(cache_key) if pid2: return pid2 raise RuntimeError( - f"PrimaryIdAttribute not resolved for logical name '{logical_name}'. Metadata did not include PrimaryIdAttribute." + f"PrimaryIdAttribute not resolved for table_schema_name '{table_schema_name}'. Metadata did not include PrimaryIdAttribute." ) - def _update_by_ids(self, logical_name: str, ids: List[str], changes: Union[Dict[str, Any], List[Dict[str, Any]]]) -> None: + def _update_by_ids(self, table_schema_name: str, ids: List[str], changes: Union[Dict[str, Any], List[Dict[str, Any]]]) -> None: """Update many records by GUID list using the collection-bound ``UpdateMultiple`` action. - :param logical_name: Logical (singular) entity name. - :type logical_name: ``str`` + :param table_schema_name: Schema name of the table. + :type table_schema_name: ``str`` :param ids: GUIDs of target records. :type ids: ``list[str]`` :param changes: Broadcast patch (``dict``) applied to all IDs, or list of per-record patches (1:1 with ``ids``). @@ -283,11 +316,11 @@ def _update_by_ids(self, logical_name: str, ids: List[str], changes: Union[Dict[ raise TypeError("ids must be list[str]") if not ids: return None - pk_attr = self._primary_id_attr(logical_name) - entity_set = self._entity_set_from_logical(logical_name) + pk_attr = self._primary_id_attr(table_schema_name) + entity_set = self._entity_set_from_schema_name(table_schema_name) if isinstance(changes, dict): batch = [{pk_attr: rid, **changes} for rid in ids] - self._update_multiple(entity_set, logical_name, batch) + self._update_multiple(entity_set, table_schema_name, batch) return None if not isinstance(changes, list): raise TypeError("changes must be dict or list[dict]") @@ -298,12 +331,12 @@ def _update_by_ids(self, logical_name: str, ids: List[str], changes: Union[Dict[ if not isinstance(patch, dict): raise TypeError("Each patch must be a dict") batch.append({pk_attr: rid, **patch}) - self._update_multiple(entity_set, logical_name, batch) + self._update_multiple(entity_set, table_schema_name, batch) return None def _delete_multiple( self, - logical_name: str, + table_schema_name: str, ids: List[str], ) -> Optional[str]: """Delete many records by GUID list via the ``BulkDelete`` action. @@ -321,9 +354,12 @@ def _delete_multiple( return None value_objects = [{"Value": rid, "Type": "System.Guid"} for rid in targets] - pk_attr = self._primary_id_attr(logical_name) + pk_attr = self._primary_id_attr(table_schema_name) timestamp = datetime.now(timezone.utc).isoformat(timespec="seconds").replace("+00:00", "Z") - job_label = f"Bulk delete {logical_name} records @ {timestamp}" + job_label = f"Bulk delete {table_schema_name} records @ {timestamp}" + + # EntityName must use lowercase LogicalName + logical_name = table_schema_name.lower() query = { "@odata.type": "Microsoft.Dynamics.CRM.QueryExpression", @@ -385,30 +421,32 @@ def esc(match): return f"({k})" return f"({k})" - def _update(self, logical_name: str, key: str, data: Dict[str, Any]) -> None: - """Update an existing record by GUID or alternate key. + def _update(self, table_schema_name: str, key: str, data: Dict[str, Any]) -> None: + """Update an existing record by GUID. - :param logical_name: Logical (singular) entity name. - :type logical_name: ``str`` - :param key: Record GUID (with or without parentheses) or alternate key syntax. + :param table_schema_name: Schema name of the table. + :type table_schema_name: ``str`` + :param key: Record GUID (with or without parentheses). :type key: ``str`` :param data: Partial entity payload (attributes to patch). :type data: ``dict[str, Any]`` :return: ``None`` :rtype: ``None`` """ - data = self._convert_labels_to_ints(logical_name, data) - entity_set = self._entity_set_from_logical(logical_name) + # Lowercase all keys to match Dataverse LogicalName expectations + data = self._lowercase_keys(data) + data = self._convert_labels_to_ints(table_schema_name, data) + entity_set = self._entity_set_from_schema_name(table_schema_name) url = f"{self.api}/{entity_set}{self._format_key(key)}" r = self._request("patch", url, headers={"If-Match": "*"}, json=data) - def _update_multiple(self, entity_set: str, logical_name: str, records: List[Dict[str, Any]]) -> None: + def _update_multiple(self, entity_set: str, table_schema_name: str, records: List[Dict[str, Any]]) -> None: """Bulk update existing records via the collection-bound ``UpdateMultiple`` action. :param entity_set: Resolved entity set (plural) name. :type entity_set: ``str`` - :param logical_name: Logical (singular) name (e.g. ``"account"``). - :type logical_name: ``str`` + :param table_schema_name: Schema name of the table, e.g. "new_MyTestTable". + :type table_schema_name: ``str`` :param records: List of patch dictionaries. Each must include the true primary key attribute (e.g. ``accountid``) and one or more fields to update. :type records: ``list[dict[str, Any]]`` :return: ``None`` @@ -425,9 +463,13 @@ def _update_multiple(self, entity_set: str, logical_name: str, records: List[Dic # Determine whether we need logical name resolution (@odata.type missing in any payload) need_logical = any("@odata.type" not in r for r in records) + # @odata.type uses LogicalName (lowercase) + logical_name = table_schema_name.lower() enriched: List[Dict[str, Any]] = [] for r in records: - r = self._convert_labels_to_ints(logical_name, r) + # Lowercase all keys to match Dataverse LogicalName expectations + r = self._lowercase_keys(r) + r = self._convert_labels_to_ints(table_schema_name, r) if "@odata.type" in r or not need_logical: enriched.append(r) else: @@ -441,27 +483,27 @@ def _update_multiple(self, entity_set: str, logical_name: str, records: List[Dic # Intentionally ignore response content: no stable contract for IDs across environments. return None - def _delete(self, logical_name: str, key: str) -> None: - """Delete a record by GUID or alternate key. + def _delete(self, table_schema_name: str, key: str) -> None: + """Delete a record by GUID. - :param logical_name: Singular logical entity name. - :type logical_name: ``str`` - :param key: Record GUID (with or without parentheses) or alternate key syntax. + :param table_schema_name: Schema name of the table. + :type table_schema_name: ``str`` + :param key: Record GUID (with or without parentheses) :type key: ``str`` :return: ``None`` :rtype: ``None`` """ - entity_set = self._entity_set_from_logical(logical_name) + entity_set = self._entity_set_from_schema_name(table_schema_name) url = f"{self.api}/{entity_set}{self._format_key(key)}" self._request("delete", url, headers={"If-Match": "*"}) - def _get(self, logical_name: str, key: str, select: Optional[List[str]] = None) -> Dict[str, Any]: + def _get(self, table_schema_name: str, key: str, select: Optional[List[str]] = None) -> Dict[str, Any]: """Retrieve a single record. - :param logical_name: Singular logical entity name. - :type logical_name: ``str`` - :param key: Record GUID (with or without parentheses) or alternate key syntax. + :param table_schema_name: Schema name of the table. + :type table_schema_name: ``str`` + :param key: Record GUID (with or without parentheses). :type key: ``str`` :param select: Columns to select; joined with commas into $select. :type select: ``list[str]`` | ``None`` @@ -471,15 +513,16 @@ def _get(self, logical_name: str, key: str, select: Optional[List[str]] = None) """ params = {} if select: + # Lowercase column names for case-insensitive matching params["$select"] = ",".join(select) - entity_set = self._entity_set_from_logical(logical_name) + entity_set = self._entity_set_from_schema_name(table_schema_name) url = f"{self.api}/{entity_set}{self._format_key(key)}" r = self._request("get", url, params=params) return r.json() def _get_multiple( self, - logical_name: str, + table_schema_name: str, select: Optional[List[str]] = None, filter: Optional[str] = None, orderby: Optional[List[str]] = None, @@ -489,17 +532,17 @@ def _get_multiple( ) -> Iterable[List[Dict[str, Any]]]: """Iterate records from an entity set, yielding one page (list of dicts) at a time. - :param logical_name: Singular logical entity name. - :type logical_name: ``str`` - :param select: Columns to include (``$select``) or ``None``. + :param table_schema_name: Schema name of the table. + :type table_schema_name: ``str`` + :param select: Columns to include (``$select``) or ``None``. Column names are automatically lowercased. :type select: ``list[str]`` | ``None`` - :param filter: OData ``$filter`` expression or ``None``. + :param filter: OData ``$filter`` expression or ``None``. This is passed as-is without transformation. Users must provide lowercase logical column names (e.g., "statecode eq 0"). :type filter: ``str`` | ``None`` - :param orderby: Order expressions (``$orderby``) or ``None``. + :param orderby: Order expressions (``$orderby``) or ``None``. Column names are automatically lowercased. :type orderby: ``list[str]`` | ``None`` :param top: Max total records (applied on first request as ``$top``) or ``None``. :type top: ``int`` | ``None`` - :param expand: Navigation properties to expand (``$expand``) or ``None``. + :param expand: Navigation properties to expand (``$expand``) or ``None``. These are case-sensitive and passed as-is. Users must provide exact navigation property names from entity metadata. :type expand: ``list[str]`` | ``None`` :param page_size: Per-page size hint via ``Prefer: odata.maxpagesize``. :type page_size: ``int`` | ``None`` @@ -522,16 +565,20 @@ def _do_request(url: str, *, params: Optional[Dict[str, Any]] = None) -> Dict[st except ValueError: return {} - entity_set = self._entity_set_from_logical(logical_name) + entity_set = self._entity_set_from_schema_name(table_schema_name) base_url = f"{self.api}/{entity_set}" params: Dict[str, Any] = {} if select: - params["$select"] = ",".join(select) + # Lowercase column names for case-insensitive matching + params["$select"] = ",".join(self._lowercase_list(select)) if filter: + # Filter is passed as-is; users must use lowercase column names in filter expressions params["$filter"] = filter if orderby: - params["$orderby"] = ",".join(orderby) + # Lowercase column names for case-insensitive matching + params["$orderby"] = ",".join(self._lowercase_list(orderby)) if expand: + # Lowercase navigation property names for case-insensitive matching params["$expand"] = ",".join(expand) if top is not None: params["$top"] = int(top) @@ -577,7 +624,7 @@ def _query_sql(self, sql: str) -> list[dict[str, Any]]: # Extract logical table name via helper (robust to identifiers ending with 'from') logical = self._extract_logical_table(sql) - entity_set = self._entity_set_from_logical(logical) + entity_set = self._entity_set_from_schema_name(logical) # Issue GET /{entity_set}?sql= url = f"{self.api}/{entity_set}" params = {"sql": sql} @@ -616,18 +663,23 @@ def _extract_logical_table(sql: str) -> str: return m.group(1).lower() # ---------------------- Entity set resolution ----------------------- - def _entity_set_from_logical(self, logical: str) -> str: - """Resolve entity set name (plural) from a logical (singular) name using metadata. + def _entity_set_from_schema_name(self, table_schema_name: str) -> str: + """Resolve entity set name (plural) from a schema name (singular) name using metadata. - Caches results for subsequent SQL queries. + Caches results for subsequent queries. Case-insensitive. """ - if not logical: - raise ValueError("logical name required") - cached = self._logical_to_entityset_cache.get(logical) + if not table_schema_name: + raise ValueError("table schema name required") + + # Use normalized (lowercase) key for cache lookup + cache_key = self._normalize_cache_key(table_schema_name) + cached = self._logical_to_entityset_cache.get(cache_key) if cached: return cached url = f"{self.api}/EntityDefinitions" - logical_escaped = self._escape_odata_quotes(logical) + # LogicalName in Dataverse is stored in lowercase, so we need to lowercase for the filter + logical_lower = table_schema_name.lower() + logical_escaped = self._escape_odata_quotes(logical_lower) params = { "$select": "LogicalName,EntitySetName,PrimaryIdAttribute", "$filter": f"LogicalName eq '{logical_escaped}'", @@ -639,22 +691,22 @@ def _entity_set_from_logical(self, logical: str) -> str: except ValueError: items = [] if not items: - plural_hint = " (did you pass a plural entity set name instead of the singular logical name?)" if logical.endswith("s") and not logical.endswith("ss") else "" + plural_hint = " (did you pass a plural entity set name instead of the singular table schema name?)" if table_schema_name.endswith("s") and not table_schema_name.endswith("ss") else "" raise MetadataError( - f"Unable to resolve entity set for logical name '{logical}'. Provide the singular logical name.{plural_hint}", + f"Unable to resolve entity set for table schema name '{table_schema_name}'. Provide the singular table schema name.{plural_hint}", subcode=METADATA_ENTITYSET_NOT_FOUND, ) md = items[0] es = md.get("EntitySetName") if not es: raise MetadataError( - f"Metadata response missing EntitySetName for logical '{logical}'.", + f"Metadata response missing EntitySetName for table schema name '{table_schema_name}'.", subcode=METADATA_ENTITYSET_NAME_MISSING, ) - self._logical_to_entityset_cache[logical] = es + self._logical_to_entityset_cache[cache_key] = es primary_id_attr = md.get("PrimaryIdAttribute") if isinstance(primary_id_attr, str) and primary_id_attr: - self._logical_primaryid_cache[logical] = primary_id_attr + self._logical_primaryid_cache[cache_key] = primary_id_attr return es # ---------------------- Table metadata helpers ---------------------- @@ -675,22 +727,24 @@ def _to_pascal(self, name: str) -> str: parts = re.split(r"[^A-Za-z0-9]+", name) return "".join(p[:1].upper() + p[1:] for p in parts if p) - def _normalize_entity_schema(self, tablename: str) -> str: - if "_" in tablename: - return tablename - return f"new_{self._to_pascal(tablename)}" - - def _get_entity_by_schema( + def _get_entity_by_table_schema_name( self, - schema_name: str, + table_schema_name: str, headers: Optional[Dict[str, str]] = None, ) -> Optional[Dict[str, Any]]: + """Get entity metadata by table schema name. Case-insensitive. + + Note: LogicalName is stored lowercase in Dataverse, so we lowercase the input + for case-insensitive matching. The response includes SchemaName, LogicalName, + EntitySetName, and MetadataId. + """ url = f"{self.api}/EntityDefinitions" - # Escape single quotes in schema name - schema_escaped = self._escape_odata_quotes(schema_name) + # LogicalName is stored lowercase, so we lowercase the input for lookup + logical_lower = table_schema_name.lower() + logical_escaped = self._escape_odata_quotes(logical_lower) params = { "$select": "MetadataId,LogicalName,SchemaName,EntitySetName", - "$filter": f"SchemaName eq '{schema_escaped}'", + "$filter": f"LogicalName eq '{logical_escaped}'", } r = self._request("get", url, params=params, headers=headers) items = r.json().get("value", []) @@ -698,7 +752,7 @@ def _get_entity_by_schema( def _create_entity( self, - schema_name: str, + table_schema_name: str, display_name: str, attributes: List[Dict[str, Any]], solution_unique_name: Optional[str] = None, @@ -706,7 +760,7 @@ def _create_entity( url = f"{self.api}/EntityDefinitions" payload = { "@odata.type": "Microsoft.Dynamics.CRM.EntityMetadata", - "SchemaName": schema_name, + "SchemaName": table_schema_name, "DisplayName": self._label(display_name), "DisplayCollectionName": self._label(display_name + "s"), "Description": self._label(f"Custom entity for {display_name}"), @@ -720,37 +774,27 @@ def _create_entity( if solution_unique_name: params = {"SolutionUniqueName": solution_unique_name} self._request("post", url, json=payload, params=params) - ent = self._get_entity_by_schema( - schema_name, + ent = self._get_entity_by_table_schema_name( + table_schema_name, headers={"Consistency": "Strong"}, ) if not ent or not ent.get("EntitySetName"): raise RuntimeError( - f"Failed to create or retrieve entity '{schema_name}' (EntitySetName not available)." + f"Failed to create or retrieve entity '{table_schema_name}' (EntitySetName not available)." ) if not ent.get("MetadataId"): raise RuntimeError( - f"MetadataId missing after creating entity '{schema_name}'." + f"MetadataId missing after creating entity '{table_schema_name}'." ) return ent - def _normalize_attribute_schema(self, entity_schema: str, column_name: str) -> str: - # Use same publisher prefix segment as entity_schema if present; else default to 'new_'. - if not isinstance(column_name, str) or not column_name.strip(): - raise ValueError("column_name must be a non-empty string") - publisher = entity_schema.split("_", 1)[0] if "_" in entity_schema else "new" - expected_prefix = f"{publisher}_" - if column_name.lower().startswith(expected_prefix.lower()): - return column_name - return f"{publisher}_{self._to_pascal(column_name)}" - def _get_attribute_metadata( self, entity_metadata_id: str, - schema_name: str, + column_schema_name: str, extra_select: Optional[str] = None, ) -> Optional[Dict[str, Any]]: - attr_escaped = self._escape_odata_quotes(schema_name) + attr_escaped = self._escape_odata_quotes(column_schema_name) url = f"{self.api}/EntityDefinitions({entity_metadata_id})/Attributes" select_fields = ["MetadataId", "LogicalName", "SchemaName"] if extra_select: @@ -802,7 +846,7 @@ def _build_localizedlabels_payload(self, translations: Dict[int, str]) -> Dict[s "LocalizedLabels": locs, } - def _enum_optionset_payload(self, schema_name: str, enum_cls: type[Enum], is_primary_name: bool = False) -> Dict[str, Any]: + def _enum_optionset_payload(self, column_schema_name: str, enum_cls: type[Enum], is_primary_name: bool = False) -> Dict[str, Any]: """Create local (IsGlobal=False) PicklistAttributeMetadata from an Enum subclass. Supports translation mapping via optional class attribute `__labels__`: @@ -877,10 +921,10 @@ def _enum_optionset_payload(self, schema_name: str, enum_cls: type[Enum], is_pri "Label": self._build_localizedlabels_payload(per_lang), }) - attr_label = schema_name.split("_")[-1] + attr_label = column_schema_name.split("_")[-1] return { "@odata.type": "Microsoft.Dynamics.CRM.PicklistAttributeMetadata", - "SchemaName": schema_name, + "SchemaName": column_schema_name, "DisplayName": self._label(attr_label), "RequiredLevel": {"Value": "None"}, "IsPrimaryName": bool(is_primary_name), @@ -902,7 +946,7 @@ def _normalize_picklist_label(self, label: str) -> str: norm = re.sub(r"\s+", " ", norm).strip().lower() return norm - def _optionset_map(self, logical_name: str, attr_logical: str) -> Optional[Dict[str, int]]: + def _optionset_map(self, table_schema_name: str, attr_logical: str) -> Optional[Dict[str, int]]: """Build or return cached mapping of normalized label -> value for a picklist attribute. Returns empty dict if attribute is not a picklist or has no options. Returns None only @@ -912,20 +956,22 @@ def _optionset_map(self, logical_name: str, attr_logical: str) -> Optional[Dict[ ----- - This method calls the Web API twice per attribute so it could have perf impact when there are lots of columns on the entity. """ - if not logical_name or not attr_logical: + if not table_schema_name or not attr_logical: return None - cache_key = (logical_name, attr_logical.lower()) + # Normalize cache key for case-insensitive lookups + cache_key = (self._normalize_cache_key(table_schema_name), self._normalize_cache_key(attr_logical)) now = time.time() entry = self._picklist_label_cache.get(cache_key) if isinstance(entry, dict) and 'map' in entry and (now - entry.get('ts', 0)) < self._picklist_cache_ttl_seconds: return entry['map'] - attr_esc = self._escape_odata_quotes(attr_logical) - logical_esc = self._escape_odata_quotes(logical_name) + # LogicalNames in Dataverse are stored in lowercase, so we need to lowercase for filters + attr_esc = self._escape_odata_quotes(attr_logical.lower()) + table_schema_name_esc = self._escape_odata_quotes(table_schema_name.lower()) # Step 1: lightweight fetch (no expand) to determine attribute type url_type = ( - f"{self.api}/EntityDefinitions(LogicalName='{logical_esc}')/Attributes" + f"{self.api}/EntityDefinitions(LogicalName='{table_schema_name_esc}')/Attributes" f"?$filter=LogicalName eq '{attr_esc}'&$select=LogicalName,AttributeType" ) # Retry up to 3 times on 404 (new or not-yet-published attribute metadata). If still 404, raise. @@ -941,7 +987,7 @@ def _optionset_map(self, logical_name: str, attr_logical: str) -> Optional[Dict[ time.sleep(0.4 * (2 ** attempt)) continue raise RuntimeError( - f"Picklist attribute metadata not found after retries: entity='{logical_name}' attribute='{attr_logical}' (404)" + f"Picklist attribute metadata not found after retries: entity='{table_schema_name}' attribute='{attr_logical}' (404)" ) from err raise if r_type is None: @@ -959,7 +1005,7 @@ def _optionset_map(self, logical_name: str, attr_logical: str) -> Optional[Dict[ # Step 2: fetch with expand only now that we know it's a picklist # Need to cast to the derived PicklistAttributeMetadata type; OptionSet is not a nav on base AttributeMetadata. cast_url = ( - f"{self.api}/EntityDefinitions(LogicalName='{logical_esc}')/Attributes(LogicalName='{attr_esc}')/" + f"{self.api}/EntityDefinitions(LogicalName='{table_schema_name_esc}')/Attributes(LogicalName='{attr_esc}')/" "Microsoft.Dynamics.CRM.PicklistAttributeMetadata?$select=LogicalName&$expand=OptionSet($select=Options)" ) # Step 2 fetch with retries: expanded OptionSet (cast form first) @@ -974,7 +1020,7 @@ def _optionset_map(self, logical_name: str, attr_logical: str) -> Optional[Dict[ time.sleep(0.4 * (2 ** attempt)) # 0.4s, 0.8s continue raise RuntimeError( - f"Picklist OptionSet metadata not found after retries: entity='{logical_name}' attribute='{attr_logical}' (404)" + f"Picklist OptionSet metadata not found after retries: entity='{table_schema_name}' attribute='{attr_logical}' (404)" ) from err raise if r_opts is None: @@ -1012,7 +1058,7 @@ def _optionset_map(self, logical_name: str, attr_logical: str) -> Optional[Dict[ self._picklist_label_cache[cache_key] = {'map': {}, 'ts': now} return {} - def _convert_labels_to_ints(self, logical_name: str, record: Dict[str, Any]) -> Dict[str, Any]: + def _convert_labels_to_ints(self, table_schema_name: str, record: Dict[str, Any]) -> Dict[str, Any]: """Return a copy of record with any labels converted to option ints. Heuristic: For each string value, attempt to resolve against picklist metadata. @@ -1022,7 +1068,7 @@ def _convert_labels_to_ints(self, logical_name: str, record: Dict[str, Any]) -> for k, v in list(out.items()): if not isinstance(v, str) or not v.strip(): continue - mapping = self._optionset_map(logical_name, k) + mapping = self._optionset_map(table_schema_name, k) if not mapping: continue norm = self._normalize_picklist_label(v) @@ -1031,18 +1077,18 @@ def _convert_labels_to_ints(self, logical_name: str, record: Dict[str, Any]) -> out[k] = val return out - def _attribute_payload(self, schema_name: str, dtype: Any, *, is_primary_name: bool = False) -> Optional[Dict[str, Any]]: + def _attribute_payload(self, column_schema_name: str, dtype: Any, *, is_primary_name: bool = False) -> Optional[Dict[str, Any]]: # Enum-based local option set support if isinstance(dtype, type) and issubclass(dtype, Enum): - return self._enum_optionset_payload(schema_name, dtype, is_primary_name=is_primary_name) + return self._enum_optionset_payload(column_schema_name, dtype, is_primary_name=is_primary_name) if not isinstance(dtype, str): - raise ValueError(f"Unsupported column spec type for '{schema_name}': {type(dtype)} (expected str or Enum subclass)") + raise ValueError(f"Unsupported column spec type for '{column_schema_name}': {type(dtype)} (expected str or Enum subclass)") dtype_l = dtype.lower().strip() - label = schema_name.split("_")[-1] + label = column_schema_name.split("_")[-1] if dtype_l in ("string", "text"): return { "@odata.type": "Microsoft.Dynamics.CRM.StringAttributeMetadata", - "SchemaName": schema_name, + "SchemaName": column_schema_name, "DisplayName": self._label(label), "RequiredLevel": {"Value": "None"}, "MaxLength": 200, @@ -1052,7 +1098,7 @@ def _attribute_payload(self, schema_name: str, dtype: Any, *, is_primary_name: b if dtype_l in ("int", "integer"): return { "@odata.type": "Microsoft.Dynamics.CRM.IntegerAttributeMetadata", - "SchemaName": schema_name, + "SchemaName": column_schema_name, "DisplayName": self._label(label), "RequiredLevel": {"Value": "None"}, "Format": "None", @@ -1062,7 +1108,7 @@ def _attribute_payload(self, schema_name: str, dtype: Any, *, is_primary_name: b if dtype_l in ("decimal", "money"): return { "@odata.type": "Microsoft.Dynamics.CRM.DecimalAttributeMetadata", - "SchemaName": schema_name, + "SchemaName": column_schema_name, "DisplayName": self._label(label), "RequiredLevel": {"Value": "None"}, "MinValue": -100000000000.0, @@ -1072,7 +1118,7 @@ def _attribute_payload(self, schema_name: str, dtype: Any, *, is_primary_name: b if dtype_l in ("float", "double"): return { "@odata.type": "Microsoft.Dynamics.CRM.DoubleAttributeMetadata", - "SchemaName": schema_name, + "SchemaName": column_schema_name, "DisplayName": self._label(label), "RequiredLevel": {"Value": "None"}, "MinValue": -100000000000.0, @@ -1082,7 +1128,7 @@ def _attribute_payload(self, schema_name: str, dtype: Any, *, is_primary_name: b if dtype_l in ("datetime", "date"): return { "@odata.type": "Microsoft.Dynamics.CRM.DateTimeAttributeMetadata", - "SchemaName": schema_name, + "SchemaName": column_schema_name, "DisplayName": self._label(label), "RequiredLevel": {"Value": "None"}, "Format": "DateOnly", @@ -1091,7 +1137,7 @@ def _attribute_payload(self, schema_name: str, dtype: Any, *, is_primary_name: b if dtype_l in ("bool", "boolean"): return { "@odata.type": "Microsoft.Dynamics.CRM.BooleanAttributeMetadata", - "SchemaName": schema_name, + "SchemaName": column_schema_name, "DisplayName": self._label(label), "RequiredLevel": {"Value": "None"}, "OptionSet": { @@ -1109,21 +1155,21 @@ def _attribute_payload(self, schema_name: str, dtype: Any, *, is_primary_name: b } return None - def _get_table_info(self, tablename: str) -> Optional[Dict[str, Any]]: + def _get_table_info(self, table_schema_name: str) -> Optional[Dict[str, Any]]: """Return basic metadata for a custom table if it exists. - :param tablename: Friendly name (without prefix) or full schema name including publisher prefix (e.g. ``new_Sample``). - :type tablename: ``str`` + :param table_schema_name: Schema name of the table. + :type table_schema_name: ``str`` :return: Metadata summary or ``None`` if not found. :rtype: ``dict[str, Any]`` | ``None`` """ - ent = self._get_entity_by_schema(tablename) + ent = self._get_entity_by_table_schema_name(table_schema_name) if not ent: return None return { - "entity_schema": ent.get("SchemaName") or tablename, - "entity_logical_name": ent.get("LogicalName"), + "table_schema_name": ent.get("SchemaName") or table_schema_name, + "table_logical_name": ent.get("LogicalName"), "entity_set_name": ent.get("EntitySetName"), "metadata_id": ent.get("MetadataId"), "columns_created": [], @@ -1144,11 +1190,11 @@ def _list_tables(self) -> List[Dict[str, Any]]: r = self._request("get", url, params=params) return r.json().get("value", []) - def _delete_table(self, tablename: str) -> None: - """Delete a custom table by friendly or full schema name. + def _delete_table(self, table_schema_name: str) -> None: + """Delete a table by schema name. - :param tablename: Friendly name (without publisher prefix) or full schema name (e.g. ``new_Sample``). - :type tablename: ``str`` + :param table_schema_name: Schema name of the table. + :type table_schema_name: ``str`` :return: ``None`` :rtype: ``None`` @@ -1156,11 +1202,10 @@ def _delete_table(self, tablename: str) -> None: :raises MetadataError: If the table does not exist. :raises HttpError: If the delete request fails. """ - entity_schema = self._normalize_entity_schema(tablename) - ent = self._get_entity_by_schema(entity_schema) + ent = self._get_entity_by_table_schema_name(table_schema_name) if not ent or not ent.get("MetadataId"): raise MetadataError( - f"Table '{entity_schema}' not found.", + f"Table '{table_schema_name}' not found.", subcode=METADATA_TABLE_NOT_FOUND, ) metadata_id = ent["MetadataId"] @@ -1169,22 +1214,21 @@ def _delete_table(self, tablename: str) -> None: def _create_table( self, - tablename: str, + table_schema_name: str, schema: Dict[str, Any], solution_unique_name: Optional[str] = None, + primary_column_schema_name: Optional[str] = None, ) -> Dict[str, Any]: """Create a custom table with specified columns. - Accepts a friendly base name (prefix omitted) or a full schema name (with publisher prefix). If - the provided ``tablename`` lacks an underscore, a schema name of the form ``new_`` is - synthesized; otherwise the given name is treated as the full schema name. - - :param tablename: Friendly base name (e.g. ``"sample"``) or full schema name (e.g. ``"new_Sample"``). - :type tablename: ``str`` - :param schema: Mapping of column name (friendly or full) -> type spec (``str`` or ``Enum`` subclass). + :param table_schema_name: Schema name of the table. + :type table_schema_name: ``str`` + :param schema: Mapping of column name -> type spec (``str`` or ``Enum`` subclass). :type schema: ``dict[str, Any]`` :param solution_unique_name: Optional solution container for the new table; if provided must be non-empty. :type solution_unique_name: ``str`` | ``None`` + :param primary_column_schema_name: Optional primary column schema name. + :type primary_column_schema_name: ``str`` | ``None`` :return: Metadata summary for the created table including created column schema names. :rtype: ``dict[str, Any]`` @@ -1194,28 +1238,31 @@ def _create_table( :raises TypeError: If ``solution_unique_name`` is not a ``str`` when provided. :raises HttpError: If underlying HTTP requests fail. """ - # Accept a friendly name and construct a default schema under 'new_'. - # If a full SchemaName is passed (contains '_'), use as-is. - entity_schema = self._normalize_entity_schema(tablename) - - ent = self._get_entity_by_schema(entity_schema) + # Check if table already exists (case-insensitive) + ent = self._get_entity_by_table_schema_name(table_schema_name) if ent: raise MetadataError( - f"Table '{entity_schema}' already exists.", + f"Table '{table_schema_name}' already exists.", subcode=METADATA_TABLE_ALREADY_EXISTS, ) created_cols: List[str] = [] - primary_attr_schema = "new_Name" if "_" not in entity_schema else f"{entity_schema.split('_',1)[0]}_Name" + + # Use provided primary column name, or derive from table_schema_name prefix (e.g., "new_Product" -> "new_Name"). + # If no prefix detected, default to "new_Name"; server will validate overall table schema. + if primary_column_schema_name: + primary_attr_schema = primary_column_schema_name + else: + primary_attr_schema = f"{table_schema_name.split('_',1)[0]}_Name" if "_" in table_schema_name else "new_Name" + attributes: List[Dict[str, Any]] = [] attributes.append(self._attribute_payload(primary_attr_schema, "string", is_primary_name=True)) for col_name, dtype in schema.items(): - attr_schema = self._normalize_attribute_schema(entity_schema, col_name) - payload = self._attribute_payload(attr_schema, dtype) + payload = self._attribute_payload(col_name, dtype) if not payload: raise ValueError(f"Unsupported column type '{dtype}' for '{col_name}'.") attributes.append(payload) - created_cols.append(attr_schema) + created_cols.append(col_name) if solution_unique_name is not None: if not isinstance(solution_unique_name, str): @@ -1224,15 +1271,15 @@ def _create_table( raise ValueError("solution_unique_name cannot be empty") metadata = self._create_entity( - entity_schema, - tablename, - attributes, - solution_unique_name, + table_schema_name=table_schema_name, + display_name=table_schema_name, + attributes=attributes, + solution_unique_name=solution_unique_name, ) return { - "entity_schema": entity_schema, - "entity_logical_name": metadata.get("LogicalName"), + "table_schema_name": table_schema_name, + "table_logical_name": metadata.get("LogicalName"), "entity_set_name": metadata.get("EntitySetName"), "metadata_id": metadata.get("MetadataId"), "columns_created": created_cols, @@ -1240,14 +1287,14 @@ def _create_table( def _create_columns( self, - tablename: str, + table_schema_name: str, columns: Dict[str, Any], ) -> List[str]: """Create new columns on an existing table. - :param tablename: Friendly base name or full schema name of the table. - :type tablename: ``str`` - :param columns: Mapping of column name (friendly or full) -> type spec (``str`` or ``Enum`` subclass). + :param table_schema_name: Schema name of the table. + :type table_schema_name: ``str`` + :param columns: Mapping of column schema name -> type spec (``str`` or ``Enum`` subclass). :type columns: ``dict[str, Any]`` :return: List of created column schema names. @@ -1260,11 +1307,11 @@ def _create_columns( """ if not isinstance(columns, dict) or not columns: raise TypeError("columns must be a non-empty dict[name -> type]") - entity_schema = self._normalize_entity_schema(tablename) - ent = self._get_entity_by_schema(entity_schema) + + ent = self._get_entity_by_table_schema_name(table_schema_name) if not ent or not ent.get("MetadataId"): raise MetadataError( - f"Table '{entity_schema}' not found.", + f"Table '{table_schema_name}' not found.", subcode=METADATA_TABLE_NOT_FOUND, ) @@ -1273,15 +1320,14 @@ def _create_columns( needs_picklist_flush = False for column_name, column_type in columns.items(): - schema_name = self._normalize_attribute_schema(entity_schema, column_name) - payload = self._attribute_payload(schema_name, column_type) + payload = self._attribute_payload(column_name, column_type) if not payload: - raise ValueError(f"Unsupported column type '{column_type}' for '{schema_name}'.") + raise ValueError(f"Unsupported column type '{column_type}' for '{column_name}'.") url = f"{self.api}/EntityDefinitions({metadata_id})/Attributes" self._request("post", url, json=payload) - created.append(schema_name) + created.append(column_name) if "OptionSet" in payload: needs_picklist_flush = True @@ -1293,14 +1339,14 @@ def _create_columns( def _delete_columns( self, - tablename: str, + table_schema_name: str, columns: Union[str, List[str]], ) -> List[str]: """Delete one or more columns from a table. - :param tablename: Friendly base name or full schema name of the table. - :type tablename: ``str`` - :param columns: Single column name or list of column names (friendly or full schema forms). + :param table_schema_name: Schema name of the table. + :type table_schema_name: ``str`` + :param columns: Single column name or list of column names :type columns: ``str`` | ``list[str]`` :return: List of deleted column schema names (empty if none removed). @@ -1323,31 +1369,31 @@ def _delete_columns( if not isinstance(name, str) or not name.strip(): raise ValueError("column names must be non-empty strings") - entity_schema = self._normalize_entity_schema(tablename) - ent = self._get_entity_by_schema(entity_schema) + ent = self._get_entity_by_table_schema_name(table_schema_name) if not ent or not ent.get("MetadataId"): raise MetadataError( - f"Table '{entity_schema}' not found.", + f"Table '{table_schema_name}' not found.", subcode=METADATA_TABLE_NOT_FOUND, ) + # Use the actual SchemaName from the entity metadata + entity_schema = ent.get("SchemaName") or table_schema_name metadata_id = ent.get("MetadataId") deleted: List[str] = [] needs_picklist_flush = False for column_name in names: - schema_name = self._normalize_attribute_schema(entity_schema, column_name) - attr_meta = self._get_attribute_metadata(metadata_id, schema_name, extra_select="@odata.type,AttributeType") + attr_meta = self._get_attribute_metadata(metadata_id, column_name, extra_select="@odata.type,AttributeType") if not attr_meta: raise MetadataError( - f"Column '{schema_name}' not found on table '{entity_schema}'.", + f"Column '{column_name}' not found on table '{entity_schema}'.", subcode=METADATA_COLUMN_NOT_FOUND, ) attr_metadata_id = attr_meta.get("MetadataId") if not attr_metadata_id: raise RuntimeError( - f"Metadata incomplete for column '{schema_name}' (missing MetadataId)." + f"Metadata incomplete for column '{column_name}' (missing MetadataId)." ) attr_url = f"{self.api}/EntityDefinitions({metadata_id})/Attributes({attr_metadata_id})" @@ -1359,7 +1405,7 @@ def _delete_columns( if "picklist" in attr_type_l or "optionset" in attr_type_l: needs_picklist_flush = True - deleted.append(schema_name) + deleted.append(column_name) if needs_picklist_flush: self._flush_cache("picklist") diff --git a/tests/unit/data/test_logical_crud.py b/tests/unit/data/test_logical_crud.py index c445679..1bc0b72 100644 --- a/tests/unit/data/test_logical_crud.py +++ b/tests/unit/data/test_logical_crud.py @@ -38,7 +38,7 @@ class MockableClient(ODataClient): def __init__(self, responses): super().__init__(DummyAuth(), "https://org.example", None) self._http = DummyHTTPClient(responses) - def _convert_labels_to_ints(self, logical_name, record): # pragma: no cover - test shim + def _convert_labels_to_ints(self, table_schema_name, record): # pragma: no cover - test shim return record # Helper metadata response for logical name resolution @@ -77,7 +77,7 @@ def test_single_create_update_delete_get(): (204, {}, {}), # delete ] c = MockableClient(responses) - entity_set = c._entity_set_from_logical("account") + entity_set = c._entity_set_from_schema_name("account") rid = c._create(entity_set, "account", {"name": "Acme"}) assert rid == guid rec = c._get("account", rid, select=["accountid", "name"]) @@ -97,7 +97,7 @@ def test_bulk_create_and_update(): (204, {}, {}), # UpdateMultiple 1:1 ] c = MockableClient(responses) - entity_set = c._entity_set_from_logical("account") + entity_set = c._entity_set_from_schema_name("account") ids = c._create_multiple(entity_set, "account", [{"name": "A"}, {"name": "B"}]) assert ids == [g1, g2] c._update_by_ids("account", ids, {"statecode": 1}) # broadcast @@ -116,10 +116,10 @@ def test_get_multiple_paging(): assert pages == [[{"accountid": "1"}], [{"accountid": "2"}]] -def test_unknown_logical_name_raises(): +def test_unknown_table_schema_name_raises(): responses = [ (200, {}, {"value": []}), # metadata lookup returns empty ] c = MockableClient(responses) with pytest.raises(MetadataError): - c._entity_set_from_logical("nonexistent") \ No newline at end of file + c._entity_set_from_schema_name("nonexistent") \ No newline at end of file