From 3d159a034d25b775a9d1766105675b2cdbfc592f Mon Sep 17 00:00:00 2001 From: Tim Pellissier Date: Thu, 13 Nov 2025 03:35:58 -0800 Subject: [PATCH 1/9] Use prefixed table name, add case sensitivity --- README.md | 20 +- examples/advanced/complete_walkthrough.py | 686 ---------------------- examples/advanced/file_upload.py | 8 +- examples/advanced/pandas_integration.py | 18 +- examples/advanced/walkthrough.py | 322 ++++++++++ examples/basic/functional_testing.py | 23 +- src/PowerPlatform/Dataverse/client.py | 138 +++-- src/PowerPlatform/Dataverse/data/odata.py | 305 ++++++---- 8 files changed, 619 insertions(+), 901 deletions(-) delete mode 100644 examples/advanced/complete_walkthrough.py create mode 100644 examples/advanced/walkthrough.py diff --git a/README.md b/README.md index 909f2cd..08d1458 100644 --- a/README.md +++ b/README.md @@ -104,6 +104,7 @@ The SDK provides a simple, pythonic interface for Dataverse operations: | **Bulk Operations** | Efficient bulk processing for multiple records with automatic optimization | | **Paging** | Automatic handling of large result sets with iterators | | **Structured Errors** | Detailed exception hierarchy with retry guidance and diagnostic information | +| **Publisher Prefixes** | Custom columns require publisher prefix (e.g., `"new_Title"` not `"Title"`) | ## Examples @@ -189,20 +190,23 @@ for page in pages: ### Table management ```python -# Create a custom table -table_info = client.create_table("Product", { - "code": "string", - "price": "decimal", - "active": "bool" +# Create a custom table with publisher-prefixed columns +table_info = client.create_table("new_Product", { + "new_Code": "string", + "new_Price": "decimal", + "new_Active": "bool" }) -# Add columns to existing table -client.create_columns("Product", {"category": "string"}) +# Add columns to existing table (columns must include publisher prefix) +client.create_columns("new_Product", {"new_Category": "string"}) # Clean up -client.delete_table("Product") +client.delete_table("new_Product") ``` +> **Important**: All custom column names must include the publisher prefix (e.g., `"new_"`). +> This ensures explicit, predictable naming and aligns with Dataverse metadata requirements. + ### File operations ```python diff --git a/examples/advanced/complete_walkthrough.py b/examples/advanced/complete_walkthrough.py deleted file mode 100644 index 948fced..0000000 --- a/examples/advanced/complete_walkthrough.py +++ /dev/null @@ -1,686 +0,0 @@ -# Copyright (c) Microsoft Corporation. -# Licensed under the MIT license. - -""" -PowerPlatform Dataverse Client - Complete SDK Walkthrough - -This comprehensive example demonstrates advanced usage of the PowerPlatform-Dataverse-Client SDK -including all major features and production-ready patterns. - -Features Demonstrated: -- Authentication setup and connection management -- Table creation with custom schemas and enums -- Single and bulk record operations (CRUD) -- Advanced querying with SQL and OData -- Paging and batch processing -- Column metadata management -- Multi-language label support -- Error handling and retry patterns -- Interactive cleanup options - -Prerequisites: - pip install PowerPlatform-Dataverse-Client - pip install azure-identity - -For local development, you can also run from source by uncommenting the sys.path line below. - -Note: This is a comprehensive demonstration. For basic installation validation, - use examples/basic/installation_example.py first. -""" - -import sys -from pathlib import Path -import os -from typing import Optional - -# Uncomment for local development from source -# sys.path.append(str(Path(__file__).resolve().parents[2] / "src")) - -from PowerPlatform.Dataverse import DataverseClient -from PowerPlatform.Dataverse.core.errors import MetadataError -from enum import IntEnum -from azure.identity import InteractiveBrowserCredential -import traceback -import requests -import time -from datetime import date, timedelta - - -entered = input("Enter Dataverse org URL (e.g. https://yourorg.crm.dynamics.com): ").strip() -if not entered: - print("No URL entered; exiting.") - sys.exit(1) - -base_url = entered.rstrip('/') -delete_choice = input("Delete the new_SampleItem table at end? (Y/n): ").strip() or "y" -delete_table_at_end = (str(delete_choice).lower() in ("y", "yes", "true", "1")) -# Ask once whether to pause between steps during this run -pause_choice = input("Pause between test steps? (y/N): ").strip() or "n" -pause_between_steps = (str(pause_choice).lower() in ("y", "yes", "true", "1")) -# Create a credential we can reuse (for DataverseClient) -credential = InteractiveBrowserCredential() -client = DataverseClient(base_url=base_url, credential=credential) - -# Small helpers: call logging and step pauses -def log_call(call: str) -> None: - print({"call": call}) - -def pause(next_step: str) -> None: - if pause_between_steps: - try: - input(f"\nNext: {next_step} — press Enter to continue...") - except EOFError: - # If stdin is not available, just proceed - pass - -# Small generic backoff helper used only in this quickstart -# Include common transient statuses like 429/5xx to improve resilience. -def backoff_retry(op, *, delays=(0, 2, 5, 10, 20), retry_http_statuses=(400, 403, 404, 409, 412, 429, 500, 502, 503, 504), retry_if=None): - last_exc = None - for delay in delays: - if delay: - time.sleep(delay) - try: - return op() - except Exception as ex: - print(f'Request failed: {ex}') - last_exc = ex - if retry_if and retry_if(ex): - print("Retrying operation...") - continue - if isinstance(ex, requests.exceptions.HTTPError): - code = getattr(getattr(ex, 'response', None), 'status_code', None) - if code in retry_http_statuses: - print("Retrying operation...") - continue - break - if last_exc: - raise last_exc - -# Enum demonstrating local option set creation with multilingual labels (for French labels to work, enable French language in the environment first) -class Status(IntEnum): - Active = 1 - Inactive = 2 - Archived = 5 - __labels__ = { - 1033: { - "Active": "Active", - "Inactive": "Inactive", - "Archived": "Archived", - }, - 1036: { - "Active": "Actif", - "Inactive": "Inactif", - "Archived": "Archivé", - } - } - -print("Ensure custom table exists (Metadata):") -table_info = None -created_this_run = False - -# Check for existing table using list_tables -log_call("client.list_tables()") -tables = client.list_tables() -existing_table = next((t for t in tables if t.get("SchemaName") == "new_SampleItem"), None) -if existing_table: - table_info = client.get_table_info("new_SampleItem") - created_this_run = False - print({ - "table": table_info.get("entity_schema"), - "existed": True, - "entity_set": table_info.get("entity_set_name"), - "logical": table_info.get("entity_logical_name"), - "metadata_id": table_info.get("metadata_id"), - }) - -else: - # Create it since it doesn't exist - try: - log_call("client.create_table('new_SampleItem', schema={code,count,amount,when,active,status})") - table_info = client.create_table( - "new_SampleItem", - { - "code": "string", - "count": "int", - "amount": "decimal", - "when": "datetime", - "active": "bool", - "status": Status, - }, - ) - created_this_run = True if table_info and table_info.get("columns_created") else False - print({ - "table": table_info.get("entity_schema") if table_info else None, - "existed": False, - "entity_set": table_info.get("entity_set_name") if table_info else None, - "logical": table_info.get("entity_logical_name") if table_info else None, - "metadata_id": table_info.get("metadata_id") if table_info else None, - }) - except Exception as e: - # Print full stack trace and any HTTP response details if present - print("Create table failed:") - traceback.print_exc() - resp = getattr(e, 'response', None) - if resp is not None: - try: - print({ - "status": resp.status_code, - "url": getattr(resp, 'url', None), - "body": resp.text[:2000] if getattr(resp, 'text', None) else None, - }) - except Exception: - pass - # Fail fast: all operations must use the custom table - sys.exit(1) -entity_schema = table_info.get("entity_schema") or "new_SampleItem" -logical = table_info.get("entity_logical_name") -metadata_id = table_info.get("metadata_id") -if not metadata_id: - refreshed_info = client.get_table_info(entity_schema) or {} - metadata_id = refreshed_info.get("metadata_id") - if metadata_id: - table_info["metadata_id"] = metadata_id - -# Derive attribute logical name prefix from the entity logical name (segment before first underscore) -attr_prefix = logical.split("_", 1)[0] if "_" in logical else logical -code_key = f"{attr_prefix}_code" -count_key = f"{attr_prefix}_count" -amount_key = f"{attr_prefix}_amount" -when_key = f"{attr_prefix}_when" -status_key = f"{attr_prefix}_status" -id_key = f"{logical}id" - -def summary_from_record(rec: dict) -> dict: - return { - "code": rec.get(code_key), - "count": rec.get(count_key), - "amount": rec.get(amount_key), - "when": rec.get(when_key), - } - -def print_line_summaries(label: str, summaries: list[dict]) -> None: - print(label) - for s in summaries: - print( - f" - id={s.get('id')} code={s.get('code')} " - f"count={s.get('count')} amount={s.get('amount')} when={s.get('when')}" - ) - -def _has_installed_language(base_url: str, credential, lcid: int) -> bool: - try: - token = credential.get_token(f"{base_url}/.default").token - url = f"{base_url}/api/data/v9.2/RetrieveAvailableLanguages()" - headers = {"Authorization": f"Bearer {token}", "Accept": "application/json"} - resp = requests.get(url, headers=headers, timeout=15) - if not resp.ok: - return False - data = resp.json() if resp.content else {} - langs: list[int] = [] - for val in data.values(): - if isinstance(val, list) and val and all(isinstance(x, int) for x in val): - langs = val - break - print({"lang_check": {"endpoint": url, "status": resp.status_code, "found": langs, "using": lcid in langs}}) - return lcid in langs - except Exception: - return False - -# if French language (1036) is installed, we use labels in both English and French -use_french_labels = _has_installed_language(base_url, credential, 1036) -if use_french_labels: - print({"labels_language": "fr", "note": "French labels in use."}) -else: - print({"labels_language": "en", "note": "Using English (and numeric values)."}) - -# 2) Create a record in the new table -print("Create records (OData) demonstrating single create and bound CreateMultiple (multi):") - -# Define base payloads -single_payload = { - f"{attr_prefix}_name": "Sample A", - code_key: "X001", - count_key: 42, - amount_key: 123.45, - when_key: "2025-01-01", - f"{attr_prefix}_active": True, - status_key: ("Actif" if use_french_labels else Status.Active.value), -} -# Generate multiple payloads -# Distribution update: roughly one-third English labels, one-third French labels, one-third raw integer values. -# We cycle per record: index % 3 == 1 -> English label, == 2 -> French label (if available, else English), == 0 -> integer value. -multi_payloads: list[dict] = [] -base_date = date(2025, 1, 2) -# Fixed 6-step cycle pattern encapsulated in helper: Active, Inactive, Actif, Inactif, 1, 2 (repeat) -def _status_value_for_index(idx: int, use_french: bool): - pattern = [ - ("label", "Active"), - ("label", "Inactive"), - ("fr_label", "Actif"), - ("fr_label", "Inactif"), - ("int", Status.Active.value), - ("int", Status.Inactive.value), - ] - kind, raw = pattern[(idx - 1) % len(pattern)] - if kind == "label": - return raw - if kind == "fr_label": - if use_french: - return raw - return "Active" if raw == "Actif" else "Inactive" - return raw - -for i in range(1, 16): - multi_payloads.append({ - f"{attr_prefix}_name": f"Sample {i:02d}", - code_key: f"X{200 + i:03d}", - count_key: 5 * i, - amount_key: round(10.0 * i, 2), - when_key: (base_date + timedelta(days=i - 1)).isoformat(), - f"{attr_prefix}_active": True, - status_key: _status_value_for_index(i, use_french_labels), - }) - -record_ids: list[str] = [] - -try: - # Single create returns list[str] (length 1) - log_call(f"client.create('{logical}', single_payload)") - single_ids = backoff_retry(lambda: client.create(logical, single_payload)) - if not (isinstance(single_ids, list) and len(single_ids) == 1): - raise RuntimeError("Unexpected single create return shape (expected one-element list)") - record_ids.extend(single_ids) - - # Multi create returns list[str] - log_call(f"client.create('{logical}', multi_payloads)") - multi_ids = backoff_retry(lambda: client.create(logical, multi_payloads)) - if isinstance(multi_ids, list): - record_ids.extend([mid for mid in multi_ids if isinstance(mid, str)]) - else: - print({"multi_unexpected_type": type(multi_ids).__name__, "value_preview": str(multi_ids)[:300]}) - - print({"entity": logical, "created_ids": record_ids}) - print_line_summaries("Created record summaries (IDs only; representation not fetched):", [{"id": rid} for rid in record_ids[:1]]) -except Exception as e: - # Surface detailed info for debugging (especially multi-create failures) - print(f"Create failed: {e}") - resp = getattr(e, 'response', None) - if resp is not None: - try: - print({ - 'status': resp.status_code, - 'url': getattr(resp, 'url', None), - 'body': resp.text[:2000] if getattr(resp, 'text', None) else None, - 'headers': {k: v for k, v in getattr(resp, 'headers', {}).items() if k.lower() in ('request-id','activityid','dataverse-instanceversion','content-type')} - }) - except Exception: - pass - sys.exit(1) - -pause("Next: Read record") - -# 3) Read record via OData -print("Read (OData):") -try: - if record_ids: - # Read only the first record and move on - target = record_ids[0] - log_call(f"client.get('{logical}', '{target}')") - rec = backoff_retry(lambda: client.get(logical, target)) - print_line_summaries("Read record summary:", [{"id": target, **summary_from_record(rec)}]) - else: - raise RuntimeError("No record created; skipping read.") -except Exception as e: - print(f"Get failed: {e}") -# 3.5) Update record, then read again and verify -print("Update (OData) and verify:") -# Show what will be updated and planned update calls, then pause -try: - if not record_ids: - raise RuntimeError("No record created; skipping update.") - - update_data = { - f"{attr_prefix}_code": "X002", - f"{attr_prefix}_count": 99, - f"{attr_prefix}_amount": 543.21, - f"{attr_prefix}_when": "2025-02-02", - f"{attr_prefix}_active": False, - status_key: ("Inactif" if use_french_labels else Status.Inactive.value), - } - expected_checks = { - f"{attr_prefix}_code": "X002", - f"{attr_prefix}_count": 99, - f"{attr_prefix}_active": False, - status_key: Status.Inactive.value, - } - amount_key = f"{attr_prefix}_amount" - - # Describe what is changing - print( - { - "updating_to": { - code_key: update_data[code_key], - count_key: update_data[count_key], - amount_key: update_data[amount_key], - when_key: update_data[when_key], - } - } - ) - - # Choose a single target to update to keep other records different - target_id = record_ids[0] - pause("Execute Update") - - # Update only the chosen record and summarize - log_call(f"client.update('{logical}', '{target_id}', update_data)") - # Perform update (returns None); follow-up read to verify - backoff_retry(lambda: client.update(logical, target_id, update_data)) - verify_rec = backoff_retry(lambda: client.get(logical, target_id)) - for k, v in expected_checks.items(): - assert verify_rec.get(k) == v, f"Field {k} expected {v}, got {verify_rec.get(k)}" - got = verify_rec.get(amount_key) - got_f = float(got) if got is not None else None - assert got_f is not None and abs(got_f - 543.21) < 1e-6, f"Field {amount_key} expected 543.21, got {got}" - print({"entity": logical, "updated": True}) - print_line_summaries("Updated record summary:", [{"id": target_id, **summary_from_record(verify_rec)}]) -except Exception as e: - print(f"Update/verify failed: {e}") - sys.exit(1) - -# 3.6) Bulk update (UpdateMultiple) demo: update count field on up to first 5 remaining records -print("Bulk update (UpdateMultiple) demo:") -try: - if len(record_ids) > 1: - # Prepare a small subset to update (skip the first already updated one) - subset = record_ids[1:6] - bulk_updates = [] - for idx, rid in enumerate(subset, start=1): - # Simple deterministic changes so user can observe - bulk_updates.append({ - id_key: rid, - count_key: 100 + idx, # new count values - }) - log_call(f"client.update('{logical}', <{len(bulk_updates)} ids>, )") - # Unified update handles multiple via list of patches (returns None) - backoff_retry(lambda: client.update(logical, subset, bulk_updates)) - print({"bulk_update_requested": len(bulk_updates), "bulk_update_completed": True}) - # Verify the updated count values by refetching the subset - verification = [] - # Small delay to reduce risk of any brief replication delay - time.sleep(1) - for rid in subset: - rec = backoff_retry(lambda rid=rid: client.get(logical, rid)) - verification.append({ - "id": rid, - "count": rec.get(count_key), - }) - print({"bulk_update_verification": verification}) - else: - print({"bulk_update_skipped": True, "reason": "not enough records"}) -except Exception as e: - print(f"Bulk update failed: {e}") - -# 4) Query records via SQL (?sql parameter)) -print("Query (SQL via ?sql query parameter):") -try: - import time - pause("Execute SQL Query") - - def _run_query(): - cols = f"{id_key}, {code_key}, {amount_key}, {when_key}" - query = f"SELECT TOP 2 {cols} FROM {logical} ORDER BY {attr_prefix}_amount DESC" - log_call(f"client.query_sql(\"{query}\") (Web API ?sql=)") - return client.query_sql(query) - - def _retry_if(ex: Exception) -> bool: - msg = str(ex) if ex else "" - return ("Invalid table name" in msg) or ("Invalid object name" in msg) - - rows = backoff_retry(_run_query, delays=(0, 2, 5), retry_http_statuses=(), retry_if=_retry_if) - id_key = f"{logical}id" - ids = [r.get(id_key) for r in rows if isinstance(r, dict) and r.get(id_key)] - print({"entity": logical, "rows": len(rows) if isinstance(rows, list) else 0, "ids": ids}) - record_summaries = [] - for row in rows if isinstance(rows, list) else []: - record_summaries.append( - { - "id": row.get(id_key), - "code": row.get(code_key), - "count": row.get(count_key), - "amount": row.get(amount_key), - "when": row.get(when_key), - } - ) - print_line_summaries("SQL record summaries (top 2 by amount):", record_summaries) -except Exception as e: - print(f"SQL query failed: {e}") - -# Pause between SQL query and retrieve-multiple demos -pause("Retrieve multiple (OData paging demos)") - -# 4.5) Retrieve multiple via OData paging (scenarios) -def run_paging_demo(label: str, *, top: Optional[int], page_size: Optional[int]) -> None: - print("") - print({"paging_demo": label, "top": top, "page_size": page_size}) - total = 0 - page_index = 0 - _select = [id_key, code_key, amount_key, when_key, status_key] - _orderby = [f"{code_key} asc"] - for page in client.get( - logical, - select=_select, - filter=None, - orderby=_orderby, - top=top, - expand=None, - page_size=page_size, - ): - page_index += 1 - total += len(page) - print({ - "page": page_index, - "page_size": len(page), - "sample": [ - { - "id": r.get(id_key), - "code": r.get(code_key), - "amount": r.get(amount_key), - "when": r.get(when_key), - "status": r.get(status_key), - } - for r in page[:5] - ], - }) - print({"paging_demo_done": label, "pages": page_index, "total_rows": total}) - print("") - -print("") -print("==============================") -print("Retrieve multiple (OData paging demos)") -print("==============================") -try: - # 1) Tiny page size, no top: force multiple pages - run_paging_demo("page_size=2 (no top)", top=None, page_size=2) - pause("Next paging demo: top=3, page_size=2") - - # 2) Limit total results while keeping small pages - run_paging_demo("top=3, page_size=2", top=3, page_size=2) - pause("Next paging demo: top=2 (default page size)") - - # 3) Limit total results with default server page size (likely one page) - run_paging_demo("top=2 (default page size)", top=2, page_size=None) -except Exception as e: - print(f"Retrieve multiple demos failed: {e}") -# 5) Delete record -print("Delete (OData):") -# Show deletes to be executed (single + bulk) -if 'record_ids' in locals() and record_ids: - print({"delete_count": len(record_ids)}) -pause("Execute Delete (single then bulk)") -try: - if record_ids: - single_target = record_ids[0] - rest_targets = record_ids[1:] - single_error: Optional[str] = None - bulk_job_id: Optional[str] = None - bulk_error: Optional[str] = None - - try: - log_call(f"client.delete('{logical}', '{single_target}')") - backoff_retry(lambda: client.delete(logical, single_target)) - except Exception as ex: - single_error = str(ex) - - half = max(1, len(rest_targets) // 2) - bulk_targets = rest_targets[:half] - sequential_targets = rest_targets[half:] - bulk_error = None - sequential_error = None - - # Fire-and-forget bulk delete for the first portion - try: - log_call(f"client.delete('{logical}', <{len(bulk_targets)} ids>, use_bulk_delete=True)") - bulk_job_id = client.delete(logical, bulk_targets) - except Exception as ex: - bulk_error = str(ex) - - # Sequential deletes for the remainder - try: - log_call(f"client.delete('{logical}', <{len(sequential_targets)} ids>, use_bulk_delete=False)") - for rid in sequential_targets: - backoff_retry(lambda rid=rid: client.delete(logical, rid, use_bulk_delete=False)) - except Exception as ex: - sequential_error = str(ex) - - print({ - "entity": logical, - "delete_single": { - "id": single_target, - "error": single_error, - }, - "delete_bulk": { - "count": len(bulk_targets), - "job_id": bulk_job_id, - "error": bulk_error, - }, - "delete_sequential": { - "count": len(sequential_targets), - "error": sequential_error, - }, - }) - else: - raise RuntimeError("No record created; skipping delete.") -except Exception as e: - print(f"Delete failed: {e}") - -pause("Next: column metadata helpers") - -# 6) Column metadata helpers: column create/delete -print("Column metadata helpers (create/delete column):") -scratch_column = f"scratch_{int(time.time())}" -column_payload = {scratch_column: "string"} -try: - log_call(f"client.create_column('{entity_schema}', {repr(column_payload)})") - column_create = client.create_columns(entity_schema, column_payload) - if not isinstance(column_create, list) or not column_create: - raise RuntimeError("create_column did not return schema list") - created_details = column_create - if not all(isinstance(item, str) for item in created_details): - raise RuntimeError("create_column entries were not schema strings") - attribute_schema = created_details[0] - odata_client = client._get_odata() - exists_after_create = None - exists_after_delete = None - attr_type_before = None - if metadata_id and attribute_schema: - _ready_message = "Column metadata not yet available" - def _metadata_after_create(): - meta = odata_client._get_attribute_metadata( - metadata_id, - attribute_schema, - extra_select="@odata.type,AttributeType", - ) - if not meta or not meta.get("MetadataId"): - raise RuntimeError(_ready_message) - return meta - - ready_meta = backoff_retry( - _metadata_after_create, - delays=(0, 1, 2, 4, 8), - retry_http_statuses=(), - retry_if=lambda exc: isinstance(exc, RuntimeError) and str(exc) == _ready_message, - ) - exists_after_create = bool(ready_meta) - raw_type = ready_meta.get("@odata.type") or ready_meta.get("AttributeType") - if isinstance(raw_type, str): - attr_type_before = raw_type - lowered = raw_type.lower() - delete_target = attribute_schema or scratch_column - log_call(f"client.delete_column('{entity_schema}', '{delete_target}')") - - def _delete_column(): - return client.delete_columns(entity_schema, delete_target) - - column_delete = backoff_retry( - _delete_column, - delays=(0, 1, 2, 4, 8), - retry_http_statuses=(), - retry_if=lambda exc: ( - isinstance(exc, MetadataError) - or "not found" in str(exc).lower() - or "not yet available" in str(exc).lower() - ), - ) - if not isinstance(column_delete, list) or not column_delete: - raise RuntimeError("delete_column did not return schema list") - deleted_details = column_delete - if not all(isinstance(item, str) for item in deleted_details): - raise RuntimeError("delete_column entries were not schema strings") - if attribute_schema not in deleted_details: - raise RuntimeError("delete_column response missing expected schema name") - if metadata_id and attribute_schema: - _delete_message = "Column metadata still present after delete" - def _ensure_removed(): - meta = odata_client._get_attribute_metadata(metadata_id, attribute_schema) - if meta: - raise RuntimeError(_delete_message) - return True - - removed = backoff_retry( - _ensure_removed, - delays=(0, 1, 2, 4, 8), - retry_http_statuses=(), - retry_if=lambda exc: isinstance(exc, RuntimeError) and str(exc) == _delete_message, - ) - exists_after_delete = not removed - print({ - "created_column": scratch_column, - "create_summary": created_details, - "delete_summary": deleted_details, - "attribute_type_before_delete": attr_type_before, - "exists_after_create": exists_after_create, - "exists_after_delete": exists_after_delete, - }) -except MetadataError as meta_err: - print({"column_metadata_error": str(meta_err)}) -except Exception as exc: - print({"column_metadata_unexpected": str(exc)}) - -pause("Next: Cleanup table") - -# 7) Cleanup: delete the custom table if it exists -print("Cleanup (Metadata):") -if delete_table_at_end: - try: - log_call("client.get_table_info('new_SampleItem')") - info = client.get_table_info("new_SampleItem") - if info: - log_call("client.delete_table('new_SampleItem')") - client.delete_table("new_SampleItem") - print({"table_deleted": True}) - else: - print({"table_deleted": False, "reason": "not found"}) - except Exception as e: - print(f"Delete table failed: {e}") -else: - print({"table_deleted": False, "reason": "user opted to keep table"}) diff --git a/examples/advanced/file_upload.py b/examples/advanced/file_upload.py index 63e9f06..5864de1 100644 --- a/examples/advanced/file_upload.py +++ b/examples/advanced/file_upload.py @@ -174,8 +174,8 @@ def ensure_table(): if existing: print({"table": TABLE_SCHEMA_NAME, "existed": True}) return existing - log("client.create_table('new_FileSample', schema={title})") - info = client.create_table(TABLE_SCHEMA_NAME, {"title": "string"}) + log("client.create_table('new_FileSample', schema={'new_Title': 'string'})") + info = client.create_table(TABLE_SCHEMA_NAME, {"new_Title": "string"}) print({"table": TABLE_SCHEMA_NAME, "existed": False, "metadata_id": info.get('metadata_id')}) return info @@ -401,8 +401,8 @@ def get_dataset_info(file_path: Path): # --------------------------- Cleanup --------------------------- if cleanup_record and record_id: try: - log(f"client.delete('{entity_set}', '{record_id}')") - backoff(lambda: client.delete(entity_set, record_id)) + log(f"client.delete('{logical}', '{record_id}')") + backoff(lambda: client.delete(logical, record_id)) print({"record_deleted": True}) except Exception as e: # noqa: BLE001 print({"record_deleted": False, "error": str(e)}) diff --git a/examples/advanced/pandas_integration.py b/examples/advanced/pandas_integration.py index fdd3a86..8bcb004 100644 --- a/examples/advanced/pandas_integration.py +++ b/examples/advanced/pandas_integration.py @@ -69,7 +69,7 @@ def backoff_retry(op, *, delays=(0, 2, 5, 10, 20), retry_http_statuses=(400, 403 created_this_run = False # First check for existing table -existing = client.get_table_info("SampleItem") +existing = client.get_table_info("new_SampleItem") if existing: table_info = existing created_this_run = False @@ -85,13 +85,13 @@ def backoff_retry(op, *, delays=(0, 2, 5, 10, 20), retry_http_statuses=(400, 403 # Create it since it doesn't exist try: table_info = client.create_table( - "SampleItem", + "new_SampleItem", { - "code": "string", - "count": "int", - "amount": "decimal", - "when": "datetime", - "active": "bool", + "new_Code": "string", + "new_Count": "int", + "new_Amount": "decimal", + "new_When": "datetime", + "new_Active": "bool", }, ) created_this_run = True if table_info and table_info.get("columns_created") else False @@ -232,9 +232,9 @@ def _retry_if(ex: Exception) -> bool: print("Cleanup (Metadata):") try: # Delete if present, regardless of whether it was created in this run - info = client.get_table_info("SampleItem") + info = client.get_table_info("new_SampleItem") if info: - client.delete_table("SampleItem") + client.delete_table("new_SampleItem") print({"table_deleted": True}) else: print({"table_deleted": False, "reason": "not found"}) diff --git a/examples/advanced/walkthrough.py b/examples/advanced/walkthrough.py new file mode 100644 index 0000000..25b7433 --- /dev/null +++ b/examples/advanced/walkthrough.py @@ -0,0 +1,322 @@ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT license. + +""" +Walkthrough demonstrating core Dataverse SDK operations. + +This example shows: +- Table creation with various column types including enums +- Single and multiple record CRUD operations +- Querying with filtering, paging, and SQL +- Picklist label-to-value conversion +- Column management +- Cleanup + +Prerequisites: +- pip install PowerPlatform-Dataverse-Client +- pip install azure-identity +""" + +import sys +import json +from enum import IntEnum +from azure.identity import InteractiveBrowserCredential +from PowerPlatform.Dataverse.client import DataverseClient + + +# Simple logging helper +def log_call(description): + print(f"\n→ {description}") + + +# Define enum for priority picklist +class Priority(IntEnum): + LOW = 1 + MEDIUM = 2 + HIGH = 3 + + +def main(): + print("=" * 80) + print("Dataverse SDK Walkthrough") + print("=" * 80) + + # ============================================================================ + # 1. SETUP & AUTHENTICATION + # ============================================================================ + print("\n" + "=" * 80) + print("1. Setup & Authentication") + print("=" * 80) + + base_url = input("Enter Dataverse org URL (e.g. https://yourorg.crm.dynamics.com): ").strip() + if not base_url: + print("No URL entered; exiting.") + sys.exit(1) + + base_url = base_url.rstrip('/') + + log_call("InteractiveBrowserCredential()") + credential = InteractiveBrowserCredential() + + log_call(f"DataverseClient(base_url='{base_url}', credential=...)") + client = DataverseClient(base_url=base_url, credential=credential) + print(f"✓ Connected to: {base_url}") + + # ============================================================================ + # 2. TABLE CREATION (METADATA) + # ============================================================================ + print("\n" + "=" * 80) + print("2. Table Creation (Metadata)") + print("=" * 80) + + table_name = "new_WalkthroughDemo" + + log_call(f"client.get_table_info('{table_name}')") + table_info = client.get_table_info(table_name) + + if table_info: + print(f"✓ Table already exists: {table_info.get('entity_schema')}") + print(f" Logical Name: {table_info.get('entity_logical_name')}") + print(f" Entity Set: {table_info.get('entity_set_name')}") + else: + log_call(f"client.create_table('{table_name}', schema={{...}})") + schema = { + "new_Title": "string", + "new_Quantity": "int", + "new_Amount": "decimal", + "new_Completed": "bool", + "new_Priority": Priority + } + table_info = client.create_table(table_name, schema) + print(f"✓ Created table: {table_info.get('entity_schema')}") + print(f" Columns created: {', '.join(table_info.get('columns_created', []))}") + + # ============================================================================ + # 3. CREATE OPERATIONS + # ============================================================================ + print("\n" + "=" * 80) + print("3. Create Operations") + print("=" * 80) + + # Single create + log_call(f"client.create('{table_name}', {{...}})") + single_record = { + "new_Title": "Complete project documentation", + "new_Quantity": 5, + "new_Amount": 1250.50, + "new_Completed": False, + "new_Priority": Priority.MEDIUM + } + id1 = client.create(table_name, single_record)[0] + print(f"✓ Created single record: {id1}") + + # Multiple create + log_call(f"client.create('{table_name}', [{{...}}, {{...}}, {{...}}])") + multiple_records = [ + { + "new_Title": "Review code changes", + "new_Quantity": 10, + "new_Amount": 500.00, + "new_Completed": True, + "new_Priority": Priority.HIGH + }, + { + "new_Title": "Update test cases", + "new_Quantity": 8, + "new_Amount": 750.25, + "new_Completed": False, + "new_Priority": Priority.LOW + }, + { + "new_Title": "Deploy to staging", + "new_Quantity": 3, + "new_Amount": 2000.00, + "new_Completed": False, + "new_Priority": Priority.HIGH + } + ] + ids = client.create(table_name, multiple_records) + print(f"✓ Created {len(ids)} records: {ids}") + + # ============================================================================ + # 4. READ OPERATIONS + # ============================================================================ + print("\n" + "=" * 80) + print("4. Read Operations") + print("=" * 80) + + # Single read by ID + log_call(f"client.get('{table_name}', '{id1}')") + record = client.get(table_name, id1) + print("✓ Retrieved single record:") + print(json.dumps({ + "new_walkthroughdemoid": record.get("new_walkthroughdemoid"), + "new_title": record.get("new_title"), + "new_quantity": record.get("new_quantity"), + "new_amount": record.get("new_amount"), + "new_completed": record.get("new_completed"), + "new_priority": record.get("new_priority"), + "new_priority@FormattedValue": record.get("new_priority@OData.Community.Display.V1.FormattedValue") + }, indent=2)) + + # Multiple read with filter + log_call(f"client.get('{table_name}', filter='new_quantity gt 5')") + all_records = [] + for page in client.get(table_name, filter="new_quantity gt 5"): + all_records.extend(page) + print(f"✓ Found {len(all_records)} records with new_quantity > 5") + for rec in all_records: + print(f" - new_Title='{rec.get('new_title')}', new_Quantity={rec.get('new_quantity')}") + + # ============================================================================ + # 5. UPDATE OPERATIONS + # ============================================================================ + print("\n" + "=" * 80) + print("5. Update Operations") + print("=" * 80) + + # Single update + log_call(f"client.update('{table_name}', '{id1}', {{...}})") + client.update(table_name, id1, {"new_Quantity": 100}) + updated = client.get(table_name, id1) + print(f"✓ Updated single record new_Quantity: {updated.get('new_quantity')}") + + # Multiple update (broadcast same change) + log_call(f"client.update('{table_name}', [{len(ids)} IDs], {{...}})") + client.update(table_name, ids, {"new_Completed": True}) + print(f"✓ Updated {len(ids)} records to new_Completed=True") + + # ============================================================================ + # 6. PAGING DEMO + # ============================================================================ + print("\n" + "=" * 80) + print("6. Paging Demo") + print("=" * 80) + + # Create 20 records for paging + log_call(f"client.create('{table_name}', [20 records])") + paging_records = [ + { + "new_Title": f"Paging test item {i}", + "new_Quantity": i, + "new_Amount": i * 10.0, + "new_Completed": False, + "new_Priority": Priority.LOW + } + for i in range(1, 21) + ] + paging_ids = client.create(table_name, paging_records) + print(f"✓ Created {len(paging_ids)} records for paging demo") + + # Query with paging + log_call(f"client.get('{table_name}', page_size=5)") + print("Fetching records with page_size=5...") + for page_num, page in enumerate(client.get(table_name, orderby=["new_Quantity"], page_size=5), start=1): + record_ids = [r.get('new_walkthroughdemoid')[:8] + "..." for r in page] + print(f" Page {page_num}: {len(page)} records - IDs: {record_ids}") + + # ============================================================================ + # 7. SQL QUERY + # ============================================================================ + print("\n" + "=" * 80) + print("7. SQL Query") + print("=" * 80) + + log_call(f"client.query_sql('SELECT new_title, new_quantity FROM {table_name} WHERE new_completed = 1')") + sql = f"SELECT new_title, new_quantity FROM new_walkthroughdemo WHERE new_completed = 1" + try: + results = client.query_sql(sql) + print(f"✓ SQL query returned {len(results)} completed records:") + for result in results[:5]: # Show first 5 + print(f" - new_Title='{result.get('new_title')}', new_Quantity={result.get('new_quantity')}") + except Exception as e: + print(f"⚠ SQL query failed (known server-side bug): {str(e)}") + + # ============================================================================ + # 8. PICKLIST LABEL CONVERSION + # ============================================================================ + print("\n" + "=" * 80) + print("8. Picklist Label Conversion") + print("=" * 80) + + log_call(f"client.create('{table_name}', {{'new_Priority': 'High'}})") + label_record = { + "new_Title": "Test label conversion", + "new_Quantity": 1, + "new_Amount": 99.99, + "new_Completed": False, + "new_Priority": "High" # String label instead of int + } + label_id = client.create(table_name, label_record)[0] + retrieved = client.get(table_name, label_id) + print(f"✓ Created record with string label 'High' for new_Priority") + print(f" new_Priority stored as integer: {retrieved.get('new_priority')}") + print(f" new_Priority@FormattedValue: {retrieved.get('new_priority@OData.Community.Display.V1.FormattedValue')}") + + # ============================================================================ + # 9. COLUMN MANAGEMENT + # ============================================================================ + print("\n" + "=" * 80) + print("9. Column Management") + print("=" * 80) + + log_call(f"client.create_columns('{table_name}', {{'new_Notes': 'string'}})") + created_cols = client.create_columns(table_name, {"new_Notes": "string"}) + print(f"✓ Added column: {created_cols[0]}") + + # Delete the column we just added + log_call(f"client.delete_columns('{table_name}', ['new_Notes'])") + client.delete_columns(table_name, ["new_Notes"]) + print(f"✓ Deleted column: new_Notes") + + # ============================================================================ + # 10. DELETE OPERATIONS + # ============================================================================ + print("\n" + "=" * 80) + print("10. Delete Operations") + print("=" * 80) + + # Single delete + log_call(f"client.delete('{table_name}', '{id1}')") + client.delete(table_name, id1) + print(f"✓ Deleted single record: {id1}") + + # Multiple delete (delete the paging demo records) + log_call(f"client.delete('{table_name}', [{len(paging_ids)} IDs])") + job_id = client.delete(table_name, paging_ids) + print(f"✓ Bulk delete job started: {job_id}") + print(f" (Deleting {len(paging_ids)} paging demo records)") + + # ============================================================================ + # 11. CLEANUP + # ============================================================================ + print("\n" + "=" * 80) + print("11. Cleanup") + print("=" * 80) + + log_call(f"client.delete_table('{table_name}')") + client.delete_table(table_name) + print(f"✓ Deleted table: {table_name}") + + # ============================================================================ + # SUMMARY + # ============================================================================ + print("\n" + "=" * 80) + print("Walkthrough Complete!") + print("=" * 80) + print("\nDemonstrated operations:") + print(" ✓ Table creation with multiple column types") + print(" ✓ Single and multiple record creation") + print(" ✓ Reading records by ID and with filters") + print(" ✓ Single and multiple record updates") + print(" ✓ Paging through large result sets") + print(" ✓ SQL queries") + print(" ✓ Picklist label-to-value conversion") + print(" ✓ Column management") + print(" ✓ Single and bulk delete operations") + print(" ✓ Table cleanup") + print("=" * 80) + + +if __name__ == "__main__": + main() diff --git a/examples/basic/functional_testing.py b/examples/basic/functional_testing.py index dd5fdf5..07512ce 100644 --- a/examples/basic/functional_testing.py +++ b/examples/basic/functional_testing.py @@ -78,30 +78,31 @@ def ensure_test_table(client: DataverseClient) -> Dict[str, Any]: print("\n📋 Test Table Setup") print("=" * 50) - table_schema = "TestSDKFunctionality" + tablename = "test_TestSDKFunctionality" try: # Check if table already exists - existing_table = client.get_table_info(table_schema) + existing_table = client.get_table_info(tablename) if existing_table: - print(f"✅ Test table '{table_schema}' already exists") + print(f"✅ Test table '{tablename}' already exists") return existing_table except Exception: - print(f"📝 Table '{table_schema}' not found, creating...") + print(f"📝 Table '{tablename}' not found, creating...") try: print("🔨 Creating new test table...") # Create the test table with various field types table_info = client.create_table( - table_schema, + tablename, + primary_column_name="test_name", + schema= { - "name": "string", # Primary name field - "description": "string", # Description field - "count": "int", # Integer field - "amount": "decimal", # Decimal field - "is_active": "bool", # Boolean field - "created_date": "datetime" # DateTime field + "test_description": "string", # Description field + "test_count": "int", # Integer field + "test_amount": "decimal", # Decimal field + "test_is_active": "bool", # Boolean field + "test_created_date": "datetime" # DateTime field } ) diff --git a/src/PowerPlatform/Dataverse/client.py b/src/PowerPlatform/Dataverse/client.py index 9a18fc8..ceead14 100644 --- a/src/PowerPlatform/Dataverse/client.py +++ b/src/PowerPlatform/Dataverse/client.py @@ -101,12 +101,12 @@ def _get_odata(self) -> ODataClient: return self._odata # ---------------- Unified CRUD: create/update/delete ---------------- - def create(self, logical_name: str, records: Union[Dict[str, Any], List[Dict[str, Any]]]) -> List[str]: + def create(self, tablename: str, records: Union[Dict[str, Any], List[Dict[str, Any]]]) -> List[str]: """ - Create one or more records by logical (singular) entity name. + Create one or more records by table name. - :param logical_name: Logical (singular) entity name, e.g. ``"account"`` or ``"contact"``. - :type logical_name: str + :param tablename: Table name (e.g. ``"account"``, ``"contact"``, or ``"new_customtable"``). + :type tablename: str :param records: A single record dictionary or a list of record dictionaries. Each dictionary should contain attribute logical names as keys. :type records: dict or list[dict] @@ -134,21 +134,21 @@ def create(self, logical_name: str, records: Union[Dict[str, Any], List[Dict[str print(f"Created {len(ids)} accounts") """ od = self._get_odata() - entity_set = od._entity_set_from_logical(logical_name) + entity_set = od._entity_set_from_logical(tablename) if isinstance(records, dict): - rid = od._create(entity_set, logical_name, records) + rid = od._create(entity_set, tablename, records) # _create returns str on single input if not isinstance(rid, str): raise TypeError("_create (single) did not return GUID string") return [rid] if isinstance(records, list): - ids = od._create_multiple(entity_set, logical_name, records) + ids = od._create_multiple(entity_set, tablename, records) if not isinstance(ids, list) or not all(isinstance(x, str) for x in ids): raise TypeError("_create (multi) did not return list[str]") return ids raise TypeError("records must be dict or list[dict]") - def update(self, logical_name: str, ids: Union[str, List[str]], changes: Union[Dict[str, Any], List[Dict[str, Any]]]) -> None: + def update(self, tablename: str, ids: Union[str, List[str]], changes: Union[Dict[str, Any], List[Dict[str, Any]]]) -> None: """ Update one or more records. @@ -158,8 +158,8 @@ def update(self, logical_name: str, ids: Union[str, List[str]], changes: Union[D 2. Broadcast update: ``update("account", [id1, id2], {"status": 1})`` - applies same changes to all IDs 3. Paired updates: ``update("account", [id1, id2], [changes1, changes2])`` - one-to-one mapping - :param logical_name: Logical (singular) entity name, e.g. ``"account"``. - :type logical_name: str + :param tablename: Table name (e.g. ``"account"`` or ``"new_customtable"``). + :type tablename: str :param ids: Single GUID string or list of GUID strings to update. :type ids: str or list[str] :param changes: Dictionary of changes for single/broadcast mode, or list of dictionaries @@ -197,24 +197,24 @@ def update(self, logical_name: str, ids: Union[str, List[str]], changes: Union[D if isinstance(ids, str): if not isinstance(changes, dict): raise TypeError("For single id, changes must be a dict") - od._update(logical_name, ids, changes) # discard representation + od._update(tablename, ids, changes) # discard representation return None if not isinstance(ids, list): raise TypeError("ids must be str or list[str]") - od._update_by_ids(logical_name, ids, changes) + od._update_by_ids(tablename, ids, changes) return None def delete( self, - logical_name: str, + tablename: str, ids: Union[str, List[str]], use_bulk_delete: bool = True, ) -> Optional[str]: """ Delete one or more records by GUID. - :param logical_name: Logical (singular) entity name, e.g. ``"account"``. - :type logical_name: str + :param tablename: Table name (e.g. ``"account"`` or ``"new_customtable"``). + :type tablename: str :param ids: Single GUID string or list of GUID strings to delete. :type ids: str or list[str] :param use_bulk_delete: When ``True`` (default) and ``ids`` is a list, execute the BulkDelete action and @@ -238,7 +238,7 @@ def delete( """ od = self._get_odata() if isinstance(ids, str): - od._delete(logical_name, ids) + od._delete(tablename, ids) return None if not isinstance(ids, list): raise TypeError("ids must be str or list[str]") @@ -247,14 +247,14 @@ def delete( if not all(isinstance(rid, str) for rid in ids): raise TypeError("ids must contain string GUIDs") if use_bulk_delete: - return od._delete_multiple(logical_name, ids) + return od._delete_multiple(tablename, ids) for rid in ids: - od._delete(logical_name, rid) + od._delete(tablename, rid) return None def get( self, - logical_name: str, + tablename: str, record_id: Optional[str] = None, select: Optional[List[str]] = None, filter: Optional[str] = None, @@ -269,15 +269,20 @@ def get( When ``record_id`` is provided, returns a single record dictionary. When ``record_id`` is None, returns a generator yielding batches of records. - :param logical_name: Logical (singular) entity name, e.g. ``"account"``. - :type logical_name: str + :param tablename: Table name (e.g. ``"account"`` or ``"new_customtable"``). + :type tablename: str :param record_id: Optional GUID to fetch a specific record. If None, queries multiple records. :type record_id: str or None - :param select: Optional list of attribute logical names to retrieve. + :param select: Optional list of attribute logical names to retrieve. Column names are + case-insensitive and automatically lowercased (e.g. ``["new_Title", "new_Amount"]`` + becomes ``"new_title,new_amount"``). :type select: list[str] or None - :param filter: Optional OData filter string, e.g. ``"name eq 'Contoso'"``. + :param filter: Optional OData filter string, e.g. ``"name eq 'Contoso'"`` or + ``"new_quantity gt 5"``. **Column names in filter expressions must use lowercase + logical names** (e.g. ``"new_quantity"`` not ``"new_Quantity"``). :type filter: str or None :param orderby: Optional list of attributes to sort by, e.g. ``["name asc", "createdon desc"]``. + Column names are automatically lowercased. :type orderby: list[str] or None :param top: Optional maximum number of records to return. :type top: int or None @@ -319,12 +324,12 @@ def get( if not isinstance(record_id, str): raise TypeError("record_id must be str") return od._get( - logical_name, + tablename, record_id, select=select, ) return od._get_multiple( - logical_name, + tablename, select=select, filter=filter, orderby=orderby, @@ -374,10 +379,9 @@ def query_sql(self, sql: str): # Table metadata helpers def get_table_info(self, tablename: str) -> Optional[Dict[str, Any]]: """ - Get basic metadata for a custom table if it exists. + Get basic metadata for a table if it exists. - :param tablename: Table friendly name (e.g. ``"SampleItem"``) or full schema name - (e.g. ``"new_SampleItem"``). + :param tablename: Table name (e.g. ``"new_SampleItem"`` or ``"account"``). :type tablename: str :return: Dictionary containing table metadata with keys ``entity_schema``, @@ -388,7 +392,7 @@ def get_table_info(self, tablename: str) -> Optional[Dict[str, Any]]: Example: Retrieve table metadata:: - info = client.get_table_info("SampleItem") + info = client.get_table_info("new_SampleItem") if info: print(f"Logical name: {info['entity_logical_name']}") print(f"Entity set: {info['entity_set_name']}") @@ -400,15 +404,15 @@ def create_table( tablename: str, schema: Dict[str, Any], solution_unique_name: Optional[str] = None, + primary_column_name: Optional[str] = None, ) -> Dict[str, Any]: """ Create a simple custom table with specified columns. - :param tablename: Table friendly name (e.g. ``"SampleItem"``) or full schema name - (e.g. ``"new_SampleItem"``). If a publisher prefix is not included, the default - publisher prefix will be applied. + :param tablename: Table name with publisher prefix (e.g. ``"new_SampleItem"``). :type tablename: str - :param schema: Dictionary mapping column logical names (without prefix) to their types. + :param schema: Dictionary mapping column names (with publisher prefix) to their types. + **All custom column names must include the publisher prefix** (e.g. ``"new_Title"``). Supported types: - Primitive types: ``"string"``, ``"int"``, ``"decimal"``, ``"float"``, ``"datetime"``, ``"bool"`` @@ -427,6 +431,10 @@ class ItemStatus(IntEnum): :param solution_unique_name: Optional solution unique name that should own the new table. When omitted the table is created in the default solution. :type solution_unique_name: str or None + :param primary_column_name: Optional primary name column schema name with publisher prefix + (e.g. ``"new_ProductName"``). If not provided, defaults to ``"{prefix}_Name"`` derived + from the table's publisher prefix. + :type primary_column_name: str or None :return: Dictionary containing table metadata including ``entity_schema``, ``entity_set_name``, ``entity_logical_name``, ``metadata_id``, and ``columns_created``. @@ -444,29 +452,37 @@ class ItemStatus(IntEnum): INACTIVE = 2 schema = { - "title": "string", - "quantity": "int", - "price": "decimal", - "available": "bool", - "status": ItemStatus + "new_Title": "string", # Note: includes 'new_' prefix + "new_Quantity": "int", + "new_Price": "decimal", + "new_Available": "bool", + "new_Status": ItemStatus } - result = client.create_table("SampleItem", schema) + result = client.create_table("new_SampleItem", schema) print(f"Created table: {result['entity_logical_name']}") print(f"Columns: {result['columns_created']}") + + Create a table with a custom primary column name:: + + result = client.create_table( + "new_Product", + {"new_Price": "decimal"}, + primary_column_name="new_ProductName" + ) """ return self._get_odata()._create_table( tablename, schema, solution_unique_name, + primary_column_name, ) def delete_table(self, tablename: str) -> None: """ Delete a custom table by name. - :param tablename: Table friendly name (e.g. ``"SampleItem"``) or full schema name - (e.g. ``"new_SampleItem"``). + :param tablename: Table name (e.g. ``"new_SampleItem"`` or ``"account"``). :type tablename: str :raises ~PowerPlatform.Dataverse.core.errors.MetadataError: If the table does not exist or deletion fails. @@ -478,7 +494,7 @@ def delete_table(self, tablename: str) -> None: Example: Delete a custom table:: - client.delete_table("SampleItem") + client.delete_table("new_SampleItem") """ self._get_odata()._delete_table(tablename) @@ -506,25 +522,28 @@ def create_columns( """ Create one or more columns on an existing table using a schema-style mapping. - :param tablename: Friendly name ("SampleItem") or full schema name ("new_SampleItem"). + :param tablename: Table name (e.g. ``"new_SampleItem"``). :type tablename: str - :param columns: Mapping of logical names (without prefix) to supported types. Primitive types include - ``string``, ``int``, ``decimal``, ``float``, ``datetime``, and ``bool``. Enum subclasses (IntEnum preferred) - generate a local option set and can specify localized labels via ``__labels__``. + :param columns: Mapping of column schema names (with publisher prefix) to supported types. + **All custom column names must include the publisher prefix** (e.g. ``"new_Notes"``). + Primitive types include ``string``, ``int``, ``decimal``, ``float``, ``datetime``, and ``bool``. + Enum subclasses (IntEnum preferred) generate a local option set and can specify localized + labels via ``__labels__``. :type columns: Dict[str, Any] :returns: Schema names for the columns that were created. :rtype: list[str] + Example: Create two columns on the custom table:: created = client.create_columns( "new_SampleItem", { - "scratch": "string", - "flags": "bool", + "new_Scratch": "string", + "new_Flags": "bool", }, ) - print(created) + print(created) # ['new_Scratch', 'new_Flags'] """ return self._get_odata()._create_columns( tablename, @@ -539,13 +558,14 @@ def delete_columns( """ Delete one or more columns from a table. - :param tablename: Friendly or schema name of the table. + :param tablename: Table name (e.g. ``"new_SampleItem"``). :type tablename: str - :param columns: Column name or list of column names to remove. Friendly names are normalized to schema - names using the same prefix logic as ``create_columns``. + :param columns: Column name or list of column schema names to remove. + **Must include publisher prefix** (e.g. ``"new_Scratch"``). Case-insensitive. :type columns: str | list[str] :returns: Schema names for the columns that were removed. :rtype: list[str] + Example: Remove two custom columns by schema name: @@ -553,7 +573,7 @@ def delete_columns( "new_SampleItem", ["new_Scratch", "new_Flags"], ) - print(removed) + print(removed) # ['new_Scratch', 'new_Flags'] """ return self._get_odata()._delete_columns( tablename, @@ -563,7 +583,7 @@ def delete_columns( # File upload def upload_file( self, - logical_name: str, + tablename: str, record_id: str, file_name_attribute: str, path: str, @@ -574,8 +594,8 @@ def upload_file( """ Upload a file to a Dataverse file column. - :param logical_name: Singular logical table name, e.g. ``"account"``. - :type logical_name: str + :param tablename: Table name, e.g. ``"account"`` or ``"new_customtable"``. + :type tablename: str :param record_id: GUID of the target record. :type record_id: str :param file_name_attribute: Logical name of the file column attribute. @@ -606,7 +626,7 @@ def upload_file( Upload a PDF file:: client.upload_file( - logical_name="account", + tablename="account", record_id=account_id, file_name_attribute="new_contract", path="/path/to/contract.pdf", @@ -616,7 +636,7 @@ def upload_file( Upload with auto mode selection:: client.upload_file( - logical_name="email", + tablename="email", record_id=email_id, file_name_attribute="new_attachment", path="/path/to/large_file.zip", @@ -624,7 +644,7 @@ def upload_file( ) """ od = self._get_odata() - entity_set = od._entity_set_from_logical(logical_name) + entity_set = od._entity_set_from_logical(tablename) od.upload_file( entity_set, record_id, diff --git a/src/PowerPlatform/Dataverse/data/odata.py b/src/PowerPlatform/Dataverse/data/odata.py index c2880bd..94663be 100644 --- a/src/PowerPlatform/Dataverse/data/odata.py +++ b/src/PowerPlatform/Dataverse/data/odata.py @@ -32,6 +32,32 @@ def _escape_odata_quotes(value: str) -> str: """Escape single quotes for OData queries (by doubling them).""" return value.replace("'", "''") + @staticmethod + def _normalize_cache_key(tablename: str) -> str: + """Normalize tablename to lowercase for case-insensitive cache keys.""" + return tablename.lower() if isinstance(tablename, str) else "" + + @staticmethod + def _lowercase_keys(record: Dict[str, Any]) -> Dict[str, Any]: + """Convert all dictionary keys to lowercase for case-insensitive column names. + + Dataverse LogicalNames for attributes are stored lowercase, but users may + provide PascalCase names (matching SchemaName). This normalizes the input. + """ + if not isinstance(record, dict): + return record + return {k.lower() if isinstance(k, str) else k: v for k, v in record.items()} + + @staticmethod + def _lowercase_list(items: Optional[List[str]]) -> Optional[List[str]]: + """Convert all strings in a list to lowercase for case-insensitive column names. + + Used for $select, $orderby, $expand parameters where column names must be lowercase. + """ + if not items: + return items + return [item.lower() if isinstance(item, str) else item for item in items] + def __init__( self, auth, @@ -49,11 +75,11 @@ def __init__( backoff=self.config.http_backoff, timeout=self.config.http_timeout, ) - # Cache: logical name -> entity set name (plural) resolved from metadata + # Cache: normalized tablename (lowercase) -> entity set name (plural) resolved from metadata self._logical_to_entityset_cache: dict[str, str] = {} - # Cache: logical name -> primary id attribute (e.g. accountid) + # Cache: normalized tablename (lowercase) -> primary id attribute (e.g. accountid) self._logical_primaryid_cache: dict[str, str] = {} - # Picklist label cache: (logical_name, attribute_logical) -> {'map': {...}, 'ts': epoch_seconds} + # Picklist label cache: (normalized_tablename, normalized_attribute) -> {'map': {...}, 'ts': epoch_seconds} self._picklist_label_cache = {} self._picklist_cache_ttl_seconds = 3600 # 1 hour TTL @@ -133,15 +159,15 @@ def _request(self, method: str, url: str, *, expected: tuple[int, ...] = (200, 2 ) # --- CRUD Internal functions --- - def _create(self, entity_set: str, logical_name: str, record: Dict[str, Any]) -> str: + def _create(self, entity_set: str, tablename: str, record: Dict[str, Any]) -> str: """Create a single record and return its GUID. Parameters ------- entity_set : str Resolved entity set (plural) name. - logical_name : str - Singular logical entity name. + tablename : str + Table name. record : dict[str, Any] Attribute payload mapped by logical column names. @@ -155,7 +181,9 @@ def _create(self, entity_set: str, logical_name: str, record: Dict[str, Any]) -> Relies on OData-EntityId (canonical) or Location header. No response body parsing is performed. Raises RuntimeError if neither header contains a GUID. """ - record = self._convert_labels_to_ints(logical_name, record) + # Lowercase all keys to match Dataverse LogicalName expectations + record = self._lowercase_keys(record) + record = self._convert_labels_to_ints(tablename, record) url = f"{self.api}/{entity_set}" r = self._request("post", url, json=record) @@ -174,21 +202,21 @@ def _create(self, entity_set: str, logical_name: str, record: Dict[str, Any]) -> f"Create response missing GUID in OData-EntityId/Location headers (status={getattr(r,'status_code', '?')}). Headers: {header_keys}" ) - def _create_multiple(self, entity_set: str, logical_name: str, records: List[Dict[str, Any]]) -> List[str]: + def _create_multiple(self, entity_set: str, tablename: str, records: List[Dict[str, Any]]) -> List[str]: """Create multiple records using the collection-bound CreateMultiple action. Parameters ---------- entity_set : str Resolved entity set (plural) name. - logical_name : str - Singular logical entity name. + tablename : str + Table name. records : list[dict[str, Any]] Payloads mapped by logical attribute names. Multi-create logical name resolution ------------------------------------ - - If any payload omits ``@odata.type`` the client stamps ``Microsoft.Dynamics.CRM.``. + - If any payload omits ``@odata.type`` the client stamps ``Microsoft.Dynamics.CRM.``. - If all payloads already include ``@odata.type`` no modification occurs. Returns @@ -199,9 +227,13 @@ def _create_multiple(self, entity_set: str, logical_name: str, records: List[Dic if not all(isinstance(r, dict) for r in records): raise TypeError("All items for multi-create must be dicts") need_logical = any("@odata.type" not in r for r in records) + # @odata.type uses LogicalName (lowercase) + logical_name = tablename.lower() enriched: List[Dict[str, Any]] = [] for r in records: - r = self._convert_labels_to_ints(logical_name, r) + # Lowercase all keys to match Dataverse LogicalName expectations + r = self._lowercase_keys(r) + r = self._convert_labels_to_ints(tablename, r) if "@odata.type" in r or not need_logical: enriched.append(r) else: @@ -239,27 +271,28 @@ def _create_multiple(self, entity_set: str, logical_name: str, records: List[Dic return [] # --- Derived helpers for high-level client ergonomics --- - def _primary_id_attr(self, logical_name: str) -> str: + def _primary_id_attr(self, tablename: str) -> str: """Return primary key attribute using metadata; error if unavailable.""" - pid = self._logical_primaryid_cache.get(logical_name) + cache_key = self._normalize_cache_key(tablename) + pid = self._logical_primaryid_cache.get(cache_key) if pid: return pid - # Resolve metadata (populates _logical_primaryid_cache or raises if logical unknown) - self._entity_set_from_logical(logical_name) - pid2 = self._logical_primaryid_cache.get(logical_name) + # Resolve metadata (populates _logical_primaryid_cache or raises if tablename unknown) + self._entity_set_from_logical(tablename) + pid2 = self._logical_primaryid_cache.get(cache_key) if pid2: return pid2 raise RuntimeError( - f"PrimaryIdAttribute not resolved for logical name '{logical_name}'. Metadata did not include PrimaryIdAttribute." + f"PrimaryIdAttribute not resolved for tablename '{tablename}'. Metadata did not include PrimaryIdAttribute." ) - def _update_by_ids(self, logical_name: str, ids: List[str], changes: Union[Dict[str, Any], List[Dict[str, Any]]]) -> None: + def _update_by_ids(self, tablename: str, ids: List[str], changes: Union[Dict[str, Any], List[Dict[str, Any]]]) -> None: """Update many records by GUID list using UpdateMultiple under the hood. Parameters ---------- - logical_name : str - Logical name (singular). + tablename : str + Table name. ids : list[str] GUIDs of target records. changes : dict | list[dict] @@ -269,11 +302,11 @@ def _update_by_ids(self, logical_name: str, ids: List[str], changes: Union[Dict[ raise TypeError("ids must be list[str]") if not ids: return None - pk_attr = self._primary_id_attr(logical_name) - entity_set = self._entity_set_from_logical(logical_name) + pk_attr = self._primary_id_attr(tablename) + entity_set = self._entity_set_from_logical(tablename) if isinstance(changes, dict): batch = [{pk_attr: rid, **changes} for rid in ids] - self._update_multiple(entity_set, logical_name, batch) + self._update_multiple(entity_set, tablename, batch) return None if not isinstance(changes, list): raise TypeError("changes must be dict or list[dict]") @@ -284,12 +317,12 @@ def _update_by_ids(self, logical_name: str, ids: List[str], changes: Union[Dict[ if not isinstance(patch, dict): raise TypeError("Each patch must be a dict") batch.append({pk_attr: rid, **patch}) - self._update_multiple(entity_set, logical_name, batch) + self._update_multiple(entity_set, tablename, batch) return None def _delete_multiple( self, - logical_name: str, + tablename: str, ids: List[str], ) -> Optional[str]: """Delete many records by GUID list. @@ -301,9 +334,12 @@ def _delete_multiple( return None value_objects = [{"Value": rid, "Type": "System.Guid"} for rid in targets] - pk_attr = self._primary_id_attr(logical_name) + pk_attr = self._primary_id_attr(tablename) timestamp = datetime.now(timezone.utc).isoformat(timespec="seconds").replace("+00:00", "Z") - job_label = f"Bulk delete {logical_name} records @ {timestamp}" + job_label = f"Bulk delete {tablename} records @ {timestamp}" + + # EntityName must use lowercase LogicalName + logical_name = tablename.lower() query = { "@odata.type": "Microsoft.Dynamics.CRM.QueryExpression", @@ -365,13 +401,13 @@ def esc(match): return f"({k})" return f"({k})" - def _update(self, logical_name: str, key: str, data: Dict[str, Any]) -> None: + def _update(self, tablename: str, key: str, data: Dict[str, Any]) -> None: """Update an existing record. Parameters ---------- - logical_name : str - Logical (singular) entity name. + tablename : str + Table name. key : str Record GUID (with or without parentheses) or alternate key. data : dict @@ -381,24 +417,26 @@ def _update(self, logical_name: str, key: str, data: Dict[str, Any]) -> None: ------- None """ - data = self._convert_labels_to_ints(logical_name, data) - entity_set = self._entity_set_from_logical(logical_name) + # Lowercase all keys to match Dataverse LogicalName expectations + data = self._lowercase_keys(data) + data = self._convert_labels_to_ints(tablename, data) + entity_set = self._entity_set_from_logical(tablename) url = f"{self.api}/{entity_set}{self._format_key(key)}" r = self._request("patch", url, headers={"If-Match": "*"}, json=data) - def _update_multiple(self, entity_set: str, logical_name: str, records: List[Dict[str, Any]]) -> None: + def _update_multiple(self, entity_set: str, tablename: str, records: List[Dict[str, Any]]) -> None: """Bulk update existing records via the collection-bound UpdateMultiple action. Parameters ---------- entity_set : str Resolved entity set name. - logical_name : str - Logical (singular) name, e.g. "account". + tablename : str + Table name, e.g. "account". records : list[dict] Each dict must include the real primary key attribute for the entity (e.g. ``accountid``) and one or more - fields to update. If ``@odata.type`` is omitted in any payload, the logical name is resolved once and - stamped into those payloads as ``Microsoft.Dynamics.CRM.`` (same behaviour as bulk create). + fields to update. If ``@odata.type`` is omitted in any payload, the table name is resolved once and + stamped into those payloads as ``Microsoft.Dynamics.CRM.`` (same behaviour as bulk create). Behaviour --------- @@ -421,9 +459,13 @@ def _update_multiple(self, entity_set: str, logical_name: str, records: List[Dic # Determine whether we need logical name resolution (@odata.type missing in any payload) need_logical = any("@odata.type" not in r for r in records) + # @odata.type uses LogicalName (lowercase) + logical_name = tablename.lower() enriched: List[Dict[str, Any]] = [] for r in records: - r = self._convert_labels_to_ints(logical_name, r) + # Lowercase all keys to match Dataverse LogicalName expectations + r = self._lowercase_keys(r) + r = self._convert_labels_to_ints(tablename, r) if "@odata.type" in r or not need_logical: enriched.append(r) else: @@ -437,19 +479,19 @@ def _update_multiple(self, entity_set: str, logical_name: str, records: List[Dic # Intentionally ignore response content: no stable contract for IDs across environments. return None - def _delete(self, logical_name: str, key: str) -> None: + def _delete(self, tablename: str, key: str) -> None: """Delete a record by GUID or alternate key.""" - entity_set = self._entity_set_from_logical(logical_name) + entity_set = self._entity_set_from_logical(tablename) url = f"{self.api}/{entity_set}{self._format_key(key)}" self._request("delete", url, headers={"If-Match": "*"}) - def _get(self, logical_name: str, key: str, select: Optional[str] = None) -> Dict[str, Any]: + def _get(self, tablename: str, key: str, select: Optional[str] = None) -> Dict[str, Any]: """Retrieve a single record. Parameters ---------- - logical_name : str - Logical (singular) name. + tablename : str + Table name. key : str Record GUID (with or without parentheses) or alternate key syntax. select : str | None @@ -457,15 +499,16 @@ def _get(self, logical_name: str, key: str, select: Optional[str] = None) -> Dic """ params = {} if select: - params["$select"] = select - entity_set = self._entity_set_from_logical(logical_name) + # Lowercase column names for case-insensitive matching + params["$select"] = select.lower() + entity_set = self._entity_set_from_logical(tablename) url = f"{self.api}/{entity_set}{self._format_key(key)}" r = self._request("get", url, params=params) return r.json() def _get_multiple( self, - logical_name: str, + tablename: str, select: Optional[List[str]] = None, filter: Optional[str] = None, orderby: Optional[List[str]] = None, @@ -477,8 +520,8 @@ def _get_multiple( Parameters ---------- - logical_name : str - Logical (singular) entity name. + tablename : str + Table name. select : list[str] | None Columns to select; joined with commas into $select. filter : str | None @@ -512,17 +555,21 @@ def _do_request(url: str, *, params: Optional[Dict[str, Any]] = None) -> Dict[st except ValueError: return {} - entity_set = self._entity_set_from_logical(logical_name) + entity_set = self._entity_set_from_logical(tablename) base_url = f"{self.api}/{entity_set}" params: Dict[str, Any] = {} if select: - params["$select"] = ",".join(select) + # Lowercase column names for case-insensitive matching + params["$select"] = ",".join(self._lowercase_list(select)) if filter: + # Filter is passed as-is; users must use lowercase column names in filter expressions params["$filter"] = filter if orderby: - params["$orderby"] = ",".join(orderby) + # Lowercase column names for case-insensitive matching + params["$orderby"] = ",".join(self._lowercase_list(orderby)) if expand: - params["$expand"] = ",".join(expand) + # Lowercase navigation property names for case-insensitive matching + params["$expand"] = ",".join(self._lowercase_list(expand)) if top is not None: params["$top"] = int(top) @@ -620,15 +667,20 @@ def _extract_logical_table(sql: str) -> str: def _entity_set_from_logical(self, logical: str) -> str: """Resolve entity set name (plural) from a logical (singular) name using metadata. - Caches results for subsequent SQL queries. + Caches results for subsequent queries. Case-insensitive. """ if not logical: raise ValueError("logical name required") - cached = self._logical_to_entityset_cache.get(logical) + + # Use normalized (lowercase) key for cache lookup + cache_key = self._normalize_cache_key(logical) + cached = self._logical_to_entityset_cache.get(cache_key) if cached: return cached url = f"{self.api}/EntityDefinitions" - logical_escaped = self._escape_odata_quotes(logical) + # LogicalName in Dataverse is stored in lowercase, so we need to lowercase for the filter + logical_lower = logical.lower() + logical_escaped = self._escape_odata_quotes(logical_lower) params = { "$select": "LogicalName,EntitySetName,PrimaryIdAttribute", "$filter": f"LogicalName eq '{logical_escaped}'", @@ -652,10 +704,10 @@ def _entity_set_from_logical(self, logical: str) -> str: f"Metadata response missing EntitySetName for logical '{logical}'.", subcode=ec.METADATA_ENTITYSET_NAME_MISSING, ) - self._logical_to_entityset_cache[logical] = es + self._logical_to_entityset_cache[cache_key] = es primary_id_attr = md.get("PrimaryIdAttribute") if isinstance(primary_id_attr, str) and primary_id_attr: - self._logical_primaryid_cache[logical] = primary_id_attr + self._logical_primaryid_cache[cache_key] = primary_id_attr return es # ---------------------- Table metadata helpers ---------------------- @@ -676,22 +728,24 @@ def _to_pascal(self, name: str) -> str: parts = re.split(r"[^A-Za-z0-9]+", name) return "".join(p[:1].upper() + p[1:] for p in parts if p) - def _normalize_entity_schema(self, tablename: str) -> str: - if "_" in tablename: - return tablename - return f"new_{self._to_pascal(tablename)}" - - def _get_entity_by_schema( + def _get_entity_by_logical_name( self, - schema_name: str, + logical_name: str, headers: Optional[Dict[str, str]] = None, ) -> Optional[Dict[str, Any]]: + """Get entity metadata by LogicalName. Case-insensitive. + + Note: LogicalName is stored lowercase in Dataverse, so we lowercase the input + for case-insensitive matching. The response includes SchemaName, LogicalName, + EntitySetName, and MetadataId. + """ url = f"{self.api}/EntityDefinitions" - # Escape single quotes in schema name - schema_escaped = self._escape_odata_quotes(schema_name) + # LogicalName is stored lowercase, so we lowercase the input for lookup + logical_lower = logical_name.lower() + logical_escaped = self._escape_odata_quotes(logical_lower) params = { "$select": "MetadataId,LogicalName,SchemaName,EntitySetName", - "$filter": f"SchemaName eq '{schema_escaped}'", + "$filter": f"LogicalName eq '{logical_escaped}'", } r = self._request("get", url, params=params, headers=headers) items = r.json().get("value", []) @@ -721,7 +775,7 @@ def _create_entity( if solution_unique_name: params = {"SolutionUniqueName": solution_unique_name} self._request("post", url, json=payload, params=params) - ent = self._get_entity_by_schema( + ent = self._get_entity_by_logical_name( schema_name, headers={"Consistency": "Strong"}, ) @@ -735,16 +789,6 @@ def _create_entity( ) return ent - def _normalize_attribute_schema(self, entity_schema: str, column_name: str) -> str: - # Use same publisher prefix segment as entity_schema if present; else default to 'new_'. - if not isinstance(column_name, str) or not column_name.strip(): - raise ValueError("column_name must be a non-empty string") - publisher = entity_schema.split("_", 1)[0] if "_" in entity_schema else "new" - expected_prefix = f"{publisher}_" - if column_name.lower().startswith(expected_prefix.lower()): - return column_name - return f"{publisher}_{self._to_pascal(column_name)}" - def _get_attribute_metadata( self, entity_metadata_id: str, @@ -903,7 +947,7 @@ def _normalize_picklist_label(self, label: str) -> str: norm = re.sub(r"\s+", " ", norm).strip().lower() return norm - def _optionset_map(self, logical_name: str, attr_logical: str) -> Optional[Dict[str, int]]: + def _optionset_map(self, tablename: str, attr_logical: str) -> Optional[Dict[str, int]]: """Build or return cached mapping of normalized label -> value for a picklist attribute. Returns empty dict if attribute is not a picklist or has no options. Returns None only @@ -913,20 +957,22 @@ def _optionset_map(self, logical_name: str, attr_logical: str) -> Optional[Dict[ ----- - This method calls the Web API twice per attribute so it could have perf impact when there are lots of columns on the entity. """ - if not logical_name or not attr_logical: + if not tablename or not attr_logical: return None - cache_key = (logical_name, attr_logical.lower()) + # Normalize cache key for case-insensitive lookups + cache_key = (self._normalize_cache_key(tablename), self._normalize_cache_key(attr_logical)) now = time.time() entry = self._picklist_label_cache.get(cache_key) if isinstance(entry, dict) and 'map' in entry and (now - entry.get('ts', 0)) < self._picklist_cache_ttl_seconds: return entry['map'] - attr_esc = self._escape_odata_quotes(attr_logical) - logical_esc = self._escape_odata_quotes(logical_name) + # LogicalNames in Dataverse are stored in lowercase, so we need to lowercase for filters + attr_esc = self._escape_odata_quotes(attr_logical.lower()) + tablename_esc = self._escape_odata_quotes(tablename.lower()) # Step 1: lightweight fetch (no expand) to determine attribute type url_type = ( - f"{self.api}/EntityDefinitions(LogicalName='{logical_esc}')/Attributes" + f"{self.api}/EntityDefinitions(LogicalName='{tablename_esc}')/Attributes" f"?$filter=LogicalName eq '{attr_esc}'&$select=LogicalName,AttributeType" ) # Retry up to 3 times on 404 (new or not-yet-published attribute metadata). If still 404, raise. @@ -942,7 +988,7 @@ def _optionset_map(self, logical_name: str, attr_logical: str) -> Optional[Dict[ time.sleep(0.4 * (2 ** attempt)) continue raise RuntimeError( - f"Picklist attribute metadata not found after retries: entity='{logical_name}' attribute='{attr_logical}' (404)" + f"Picklist attribute metadata not found after retries: entity='{tablename}' attribute='{attr_logical}' (404)" ) from err raise if r_type is None: @@ -960,7 +1006,7 @@ def _optionset_map(self, logical_name: str, attr_logical: str) -> Optional[Dict[ # Step 2: fetch with expand only now that we know it's a picklist # Need to cast to the derived PicklistAttributeMetadata type; OptionSet is not a nav on base AttributeMetadata. cast_url = ( - f"{self.api}/EntityDefinitions(LogicalName='{logical_esc}')/Attributes(LogicalName='{attr_esc}')/" + f"{self.api}/EntityDefinitions(LogicalName='{tablename_esc}')/Attributes(LogicalName='{attr_esc}')/" "Microsoft.Dynamics.CRM.PicklistAttributeMetadata?$select=LogicalName&$expand=OptionSet($select=Options)" ) # Step 2 fetch with retries: expanded OptionSet (cast form first) @@ -975,7 +1021,7 @@ def _optionset_map(self, logical_name: str, attr_logical: str) -> Optional[Dict[ time.sleep(0.4 * (2 ** attempt)) # 0.4s, 0.8s continue raise RuntimeError( - f"Picklist OptionSet metadata not found after retries: entity='{logical_name}' attribute='{attr_logical}' (404)" + f"Picklist OptionSet metadata not found after retries: entity='{tablename}' attribute='{attr_logical}' (404)" ) from err raise if r_opts is None: @@ -1013,7 +1059,7 @@ def _optionset_map(self, logical_name: str, attr_logical: str) -> Optional[Dict[ self._picklist_label_cache[cache_key] = {'map': {}, 'ts': now} return {} - def _convert_labels_to_ints(self, logical_name: str, record: Dict[str, Any]) -> Dict[str, Any]: + def _convert_labels_to_ints(self, tablename: str, record: Dict[str, Any]) -> Dict[str, Any]: """Return a copy of record with any labels converted to option ints. Heuristic: For each string value, attempt to resolve against picklist metadata. @@ -1023,7 +1069,7 @@ def _convert_labels_to_ints(self, logical_name: str, record: Dict[str, Any]) -> for k, v in list(out.items()): if not isinstance(v, str) or not v.strip(): continue - mapping = self._optionset_map(logical_name, k) + mapping = self._optionset_map(tablename, k) if not mapping: continue norm = self._normalize_picklist_label(v) @@ -1123,7 +1169,7 @@ def _get_table_info(self, tablename: str) -> Optional[Dict[str, Any]]: dict | None Metadata summary or ``None`` if not found. """ - ent = self._get_entity_by_schema(tablename) + ent = self._get_entity_by_logical_name(tablename) if not ent: return None return { @@ -1144,11 +1190,11 @@ def _list_tables(self) -> List[Dict[str, Any]]: return r.json().get("value", []) def _delete_table(self, tablename: str) -> None: - entity_schema = self._normalize_entity_schema(tablename) - ent = self._get_entity_by_schema(entity_schema) + """Delete a table by SchemaName. Case-insensitive.""" + ent = self._get_entity_by_logical_name(tablename) if not ent or not ent.get("MetadataId"): raise MetadataError( - f"Table '{entity_schema}' not found.", + f"Table '{tablename}' not found.", subcode=ec.METADATA_TABLE_NOT_FOUND, ) metadata_id = ent["MetadataId"] @@ -1160,29 +1206,37 @@ def _create_table( tablename: str, schema: Dict[str, Any], solution_unique_name: Optional[str] = None, + primary_column_name: Optional[str] = None, ) -> Dict[str, Any]: - # Accept a friendly name and construct a default schema under 'new_'. - # If a full SchemaName is passed (contains '_'), use as-is. - entity_schema = self._normalize_entity_schema(tablename) - - ent = self._get_entity_by_schema(entity_schema) + """Create a table using tablename as SchemaName directly. + + The server will determine the LogicalName automatically (usually lowercased SchemaName). + """ + # Check if table already exists (case-insensitive) + ent = self._get_entity_by_logical_name(tablename) if ent: raise MetadataError( - f"Table '{entity_schema}' already exists.", + f"Table '{tablename}' already exists.", subcode=ec.METADATA_TABLE_ALREADY_EXISTS, ) created_cols: List[str] = [] - primary_attr_schema = "new_Name" if "_" not in entity_schema else f"{entity_schema.split('_',1)[0]}_Name" + + # Use provided primary column name, or derive from tablename prefix (e.g., "new_Product" -> "new_Name"). + # If no prefix detected, default to "new_Name"; server will validate overall table schema. + if primary_column_name: + primary_attr_schema = primary_column_name + else: + primary_attr_schema = f"{tablename.split('_',1)[0]}_Name" if "_" in tablename else "new_Name" + attributes: List[Dict[str, Any]] = [] attributes.append(self._attribute_payload(primary_attr_schema, "string", is_primary_name=True)) for col_name, dtype in schema.items(): - attr_schema = self._normalize_attribute_schema(entity_schema, col_name) - payload = self._attribute_payload(attr_schema, dtype) + payload = self._attribute_payload(col_name, dtype) if not payload: raise ValueError(f"Unsupported column type '{dtype}' for '{col_name}'.") attributes.append(payload) - created_cols.append(attr_schema) + created_cols.append(col_name) if solution_unique_name is not None: if not isinstance(solution_unique_name, str): @@ -1191,14 +1245,14 @@ def _create_table( raise ValueError("solution_unique_name cannot be empty") metadata = self._create_entity( - entity_schema, - tablename, - attributes, - solution_unique_name, + schema_name=tablename, + display_name=tablename, + attributes=attributes, + solution_unique_name=solution_unique_name, ) return { - "entity_schema": entity_schema, + "entity_schema": tablename, "entity_logical_name": metadata.get("LogicalName"), "entity_set_name": metadata.get("EntitySetName"), "metadata_id": metadata.get("MetadataId"), @@ -1210,30 +1264,32 @@ def _create_columns( tablename: str, columns: Dict[str, Any], ) -> List[str]: + """Create columns on an existing table. Case-insensitive table lookup.""" if not isinstance(columns, dict) or not columns: raise TypeError("columns must be a non-empty dict[name -> type]") - entity_schema = self._normalize_entity_schema(tablename) - ent = self._get_entity_by_schema(entity_schema) + + ent = self._get_entity_by_logical_name(tablename) if not ent or not ent.get("MetadataId"): raise MetadataError( - f"Table '{entity_schema}' not found.", + f"Table '{tablename}' not found.", subcode=ec.METADATA_TABLE_NOT_FOUND, ) + # Use the actual SchemaName from the entity metadata + entity_schema = ent.get("SchemaName") or tablename metadata_id = ent.get("MetadataId") created: List[str] = [] needs_picklist_flush = False for column_name, column_type in columns.items(): - schema_name = self._normalize_attribute_schema(entity_schema, column_name) - payload = self._attribute_payload(schema_name, column_type) + payload = self._attribute_payload(column_name, column_type) if not payload: - raise ValueError(f"Unsupported column type '{column_type}' for '{schema_name}'.") + raise ValueError(f"Unsupported column type '{column_type}' for '{column_name}'.") url = f"{self.api}/EntityDefinitions({metadata_id})/Attributes" self._request("post", url, json=payload) - created.append(schema_name) + created.append(column_name) if "OptionSet" in payload: needs_picklist_flush = True @@ -1248,6 +1304,7 @@ def _delete_columns( tablename: str, columns: Union[str, List[str]], ) -> List[str]: + """Delete columns from an existing table. Case-insensitive table lookup.""" if isinstance(columns, str): names = [columns] elif isinstance(columns, list): @@ -1259,31 +1316,31 @@ def _delete_columns( if not isinstance(name, str) or not name.strip(): raise ValueError("column names must be non-empty strings") - entity_schema = self._normalize_entity_schema(tablename) - ent = self._get_entity_by_schema(entity_schema) + ent = self._get_entity_by_logical_name(tablename) if not ent or not ent.get("MetadataId"): raise MetadataError( - f"Table '{entity_schema}' not found.", + f"Table '{tablename}' not found.", subcode=ec.METADATA_TABLE_NOT_FOUND, ) + # Use the actual SchemaName from the entity metadata + entity_schema = ent.get("SchemaName") or tablename metadata_id = ent.get("MetadataId") deleted: List[str] = [] needs_picklist_flush = False for column_name in names: - schema_name = self._normalize_attribute_schema(entity_schema, column_name) - attr_meta = self._get_attribute_metadata(metadata_id, schema_name, extra_select="@odata.type,AttributeType") + attr_meta = self._get_attribute_metadata(metadata_id, column_name, extra_select="@odata.type,AttributeType") if not attr_meta: raise MetadataError( - f"Column '{schema_name}' not found on table '{entity_schema}'.", + f"Column '{column_name}' not found on table '{entity_schema}'.", subcode=ec.METADATA_COLUMN_NOT_FOUND, ) attr_metadata_id = attr_meta.get("MetadataId") if not attr_metadata_id: raise RuntimeError( - f"Metadata incomplete for column '{schema_name}' (missing MetadataId)." + f"Metadata incomplete for column '{column_name}' (missing MetadataId)." ) attr_url = f"{self.api}/EntityDefinitions({metadata_id})/Attributes({attr_metadata_id})" @@ -1295,7 +1352,7 @@ def _delete_columns( if "picklist" in attr_type_l or "optionset" in attr_type_l: needs_picklist_flush = True - deleted.append(schema_name) + deleted.append(column_name) if needs_picklist_flush: self._flush_cache("picklist") From d92ecf396eb1f38fc58fa683832c53a35c6a0822 Mon Sep 17 00:00:00 2001 From: Tim Pellissier Date: Thu, 13 Nov 2025 03:42:03 -0800 Subject: [PATCH 2/9] Update README --- README.md | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/README.md b/README.md index 08d1458..62d5712 100644 --- a/README.md +++ b/README.md @@ -197,9 +197,23 @@ table_info = client.create_table("new_Product", { "new_Active": "bool" }) +# Create with custom primary column name and solution assignment +table_info = client.create_table( + tablename="new_Product", + schema={ + "new_Code": "string", + "new_Price": "decimal" + }, + solution_unique_name="MyPublisher", # Optional: add to specific solution + primary_column_name="new_ProductName" # Optional: custom primary column (default is "{prefix}_Name") +) + # Add columns to existing table (columns must include publisher prefix) client.create_columns("new_Product", {"new_Category": "string"}) +# Remove columns +client.delete_columns("new_Product", ["new_Category"]) + # Clean up client.delete_table("new_Product") ``` From ca82b8cfbe4cbd195d90f9a3819f9aa4f738d10c Mon Sep 17 00:00:00 2001 From: Tim Pellissier Date: Thu, 13 Nov 2025 14:43:35 -0800 Subject: [PATCH 3/9] naming / doc updates --- README.md | 34 +++- examples/advanced/file_upload.py | 2 +- examples/basic/functional_testing.py | 12 +- src/PowerPlatform/Dataverse/client.py | 150 ++++++++------ src/PowerPlatform/Dataverse/data/odata.py | 232 +++++++++++----------- 5 files changed, 234 insertions(+), 196 deletions(-) diff --git a/README.md b/README.md index 62d5712..c9c0b39 100644 --- a/README.md +++ b/README.md @@ -104,7 +104,7 @@ The SDK provides a simple, pythonic interface for Dataverse operations: | **Bulk Operations** | Efficient bulk processing for multiple records with automatic optimization | | **Paging** | Automatic handling of large result sets with iterators | | **Structured Errors** | Detailed exception hierarchy with retry guidance and diagnostic information | -| **Publisher Prefixes** | Custom columns require publisher prefix (e.g., `"new_Title"` not `"Title"`) | +| **Customization prefix values** | Custom tables and columns require a customization prefix values to be included for all operations (e.g., `"new_Title"`, not `"Title"`). See: [Table definitions in Microsoft Dataverse](https://learn.microsoft.com/en-us/power-apps/developer/data-platform/entity-metadata) | ## Examples @@ -176,21 +176,39 @@ for record in results: print(record["name"]) # OData query with paging +# Note: filter and expand parameters require exact casing pages = client.get( "account", - select=["accountid", "name"], - filter="statecode eq 0", + select=["accountid", "name"], # select is case-insensitive (automatically lowercased) + filter="statecode eq 0", # filter must use lowercase logical names (not transformed) top=100 ) for page in pages: for record in page: print(record["name"]) + +# Query with navigation property expansion (case-sensitive!) +pages = client.get( + "account", + select=["name"], + expand=["primarycontactid"], # Navigation property names are case-sensitive + filter="statecode eq 0" # Column names must be lowercase logical names +) +for page in pages: + for account in page: + contact = account.get("primarycontactid", {}) + print(f"{account['name']} - Contact: {contact.get('fullname', 'N/A')}") ``` +> **Important**: When using `filter` and `expand` parameters: +> - **`filter`**: Column names must use exact lowercase logical names (e.g., `"statecode eq 0"`, not `"StateCode eq 0"`) +> - **`expand`**: Navigation property names are case-sensitive and must match the exact server names +> - **`select`** and **`orderby`**: Case-insensitive; automatically converted to lowercase + ### Table management ```python -# Create a custom table with publisher-prefixed columns +# Create a custom table, including the customization prefix value in the schema names for the table and columns. table_info = client.create_table("new_Product", { "new_Code": "string", "new_Price": "decimal", @@ -199,16 +217,16 @@ table_info = client.create_table("new_Product", { # Create with custom primary column name and solution assignment table_info = client.create_table( - tablename="new_Product", + table_schema_name="new_Product", schema={ "new_Code": "string", "new_Price": "decimal" }, solution_unique_name="MyPublisher", # Optional: add to specific solution - primary_column_name="new_ProductName" # Optional: custom primary column (default is "{prefix}_Name") + primary_column_schema_name="new_ProductName" # Optional: custom primary column (default is "{customization prefix value}_Name") ) -# Add columns to existing table (columns must include publisher prefix) +# Add columns to existing table (columns must include customization prefix value) client.create_columns("new_Product", {"new_Category": "string"}) # Remove columns @@ -218,7 +236,7 @@ client.delete_columns("new_Product", ["new_Category"]) client.delete_table("new_Product") ``` -> **Important**: All custom column names must include the publisher prefix (e.g., `"new_"`). +> **Important**: All custom column names must include the customization prefix value (e.g., `"new_"`). > This ensures explicit, predictable naming and aligns with Dataverse metadata requirements. ### File operations diff --git a/examples/advanced/file_upload.py b/examples/advanced/file_upload.py index 5864de1..e6aa454 100644 --- a/examples/advanced/file_upload.py +++ b/examples/advanced/file_upload.py @@ -166,7 +166,7 @@ def backoff(op, *, delays=(0,2,5,10), retry_status=(400,403,404,409,412,429,500, # --------------------------- Table ensure --------------------------- TABLE_SCHEMA_NAME = "new_FileSample" -# If user wants new publisher prefix / naming, adjust above. +# If user wants new customization prefix value / naming, adjust above. def ensure_table(): # Check by schema diff --git a/examples/basic/functional_testing.py b/examples/basic/functional_testing.py index 07512ce..f16d785 100644 --- a/examples/basic/functional_testing.py +++ b/examples/basic/functional_testing.py @@ -78,24 +78,24 @@ def ensure_test_table(client: DataverseClient) -> Dict[str, Any]: print("\n📋 Test Table Setup") print("=" * 50) - tablename = "test_TestSDKFunctionality" + table_schema_name = "test_TestSDKFunctionality" try: # Check if table already exists - existing_table = client.get_table_info(tablename) + existing_table = client.get_table_info(table_schema_name) if existing_table: - print(f"✅ Test table '{tablename}' already exists") + print(f"✅ Test table '{table_schema_name}' already exists") return existing_table except Exception: - print(f"📝 Table '{tablename}' not found, creating...") + print(f"📝 Table '{table_schema_name}' not found, creating...") try: print("🔨 Creating new test table...") # Create the test table with various field types table_info = client.create_table( - tablename, - primary_column_name="test_name", + table_schema_name, + primary_column_schema_name="test_name", schema= { "test_description": "string", # Description field diff --git a/src/PowerPlatform/Dataverse/client.py b/src/PowerPlatform/Dataverse/client.py index ceead14..4758f4b 100644 --- a/src/PowerPlatform/Dataverse/client.py +++ b/src/PowerPlatform/Dataverse/client.py @@ -101,12 +101,12 @@ def _get_odata(self) -> ODataClient: return self._odata # ---------------- Unified CRUD: create/update/delete ---------------- - def create(self, tablename: str, records: Union[Dict[str, Any], List[Dict[str, Any]]]) -> List[str]: + def create(self, table_schema_name: str, records: Union[Dict[str, Any], List[Dict[str, Any]]]) -> List[str]: """ Create one or more records by table name. - :param tablename: Table name (e.g. ``"account"``, ``"contact"``, or ``"new_customtable"``). - :type tablename: str + :param table_schema_name: Table schema name (e.g. ``"account"``, ``"contact"``, or ``"new_customtable"``). + :type table_schema_name: str :param records: A single record dictionary or a list of record dictionaries. Each dictionary should contain attribute logical names as keys. :type records: dict or list[dict] @@ -134,21 +134,21 @@ def create(self, tablename: str, records: Union[Dict[str, Any], List[Dict[str, A print(f"Created {len(ids)} accounts") """ od = self._get_odata() - entity_set = od._entity_set_from_logical(tablename) + entity_set = od._entity_set_from_logical(table_schema_name) if isinstance(records, dict): - rid = od._create(entity_set, tablename, records) + rid = od._create(entity_set, table_schema_name, records) # _create returns str on single input if not isinstance(rid, str): raise TypeError("_create (single) did not return GUID string") return [rid] if isinstance(records, list): - ids = od._create_multiple(entity_set, tablename, records) + ids = od._create_multiple(entity_set, table_schema_name, records) if not isinstance(ids, list) or not all(isinstance(x, str) for x in ids): raise TypeError("_create (multi) did not return list[str]") return ids raise TypeError("records must be dict or list[dict]") - def update(self, tablename: str, ids: Union[str, List[str]], changes: Union[Dict[str, Any], List[Dict[str, Any]]]) -> None: + def update(self, table_schema_name: str, ids: Union[str, List[str]], changes: Union[Dict[str, Any], List[Dict[str, Any]]]) -> None: """ Update one or more records. @@ -158,8 +158,8 @@ def update(self, tablename: str, ids: Union[str, List[str]], changes: Union[Dict 2. Broadcast update: ``update("account", [id1, id2], {"status": 1})`` - applies same changes to all IDs 3. Paired updates: ``update("account", [id1, id2], [changes1, changes2])`` - one-to-one mapping - :param tablename: Table name (e.g. ``"account"`` or ``"new_customtable"``). - :type tablename: str + :param table_schema_name: Table schema name (e.g. ``"account"`` or ``"new_customtable"``). + :type table_schema_name: str :param ids: Single GUID string or list of GUID strings to update. :type ids: str or list[str] :param changes: Dictionary of changes for single/broadcast mode, or list of dictionaries @@ -197,24 +197,24 @@ def update(self, tablename: str, ids: Union[str, List[str]], changes: Union[Dict if isinstance(ids, str): if not isinstance(changes, dict): raise TypeError("For single id, changes must be a dict") - od._update(tablename, ids, changes) # discard representation + od._update(table_schema_name, ids, changes) # discard representation return None if not isinstance(ids, list): raise TypeError("ids must be str or list[str]") - od._update_by_ids(tablename, ids, changes) + od._update_by_ids(table_schema_name, ids, changes) return None def delete( self, - tablename: str, + table_schema_name: str, ids: Union[str, List[str]], use_bulk_delete: bool = True, ) -> Optional[str]: """ Delete one or more records by GUID. - :param tablename: Table name (e.g. ``"account"`` or ``"new_customtable"``). - :type tablename: str + :param table_schema_name: Table schema name (e.g. ``"account"`` or ``"new_customtable"``). + :type table_schema_name: str :param ids: Single GUID string or list of GUID strings to delete. :type ids: str or list[str] :param use_bulk_delete: When ``True`` (default) and ``ids`` is a list, execute the BulkDelete action and @@ -238,7 +238,7 @@ def delete( """ od = self._get_odata() if isinstance(ids, str): - od._delete(tablename, ids) + od._delete(table_schema_name, ids) return None if not isinstance(ids, list): raise TypeError("ids must be str or list[str]") @@ -247,14 +247,14 @@ def delete( if not all(isinstance(rid, str) for rid in ids): raise TypeError("ids must contain string GUIDs") if use_bulk_delete: - return od._delete_multiple(tablename, ids) + return od._delete_multiple(table_schema_name, ids) for rid in ids: - od._delete(tablename, rid) + od._delete(table_schema_name, rid) return None def get( self, - tablename: str, + table_schema_name: str, record_id: Optional[str] = None, select: Optional[List[str]] = None, filter: Optional[str] = None, @@ -269,24 +269,27 @@ def get( When ``record_id`` is provided, returns a single record dictionary. When ``record_id`` is None, returns a generator yielding batches of records. - :param tablename: Table name (e.g. ``"account"`` or ``"new_customtable"``). - :type tablename: str + :param table_schema_name: Table schema name (e.g. ``"account"`` or ``"new_customtable"``). + :type table_schema_name: str :param record_id: Optional GUID to fetch a specific record. If None, queries multiple records. :type record_id: str or None :param select: Optional list of attribute logical names to retrieve. Column names are case-insensitive and automatically lowercased (e.g. ``["new_Title", "new_Amount"]`` becomes ``"new_title,new_amount"``). :type select: list[str] or None - :param filter: Optional OData filter string, e.g. ``"name eq 'Contoso'"`` or - ``"new_quantity gt 5"``. **Column names in filter expressions must use lowercase - logical names** (e.g. ``"new_quantity"`` not ``"new_Quantity"``). + :param filter: Optional OData $filter expression as a string, e.g. ``"name eq 'Contoso'"`` or + ``"new_quantity gt 5"``. **IMPORTANT: Column names in filter expressions must use exact + lowercase logical names** (e.g. ``"new_quantity"`` not ``"new_Quantity"``). The filter + string is passed directly to the Dataverse Web API without transformation. :type filter: str or None :param orderby: Optional list of attributes to sort by, e.g. ``["name asc", "createdon desc"]``. Column names are automatically lowercased. :type orderby: list[str] or None :param top: Optional maximum number of records to return. :type top: int or None - :param expand: Optional list of navigation properties to expand. + :param expand: Optional list of navigation property names to expand, e.g. ``["primarycontactid"]``. + **IMPORTANT: Navigation property names are case-sensitive and must match the server-defined names exactly.**. + These are NOT automatically transformed. Consult entity metadata for correct casing. :type expand: list[str] or None :param page_size: Optional number of records per page for pagination. :type page_size: int or None @@ -303,12 +306,27 @@ def get( record = client.get("account", record_id=account_id, select=["name", "telephone1"]) print(record["name"]) - Query multiple records with filtering:: + Query multiple records with filtering (note: exact logical names in filter):: - for batch in client.get("account", filter="name eq 'Contoso'", select=["name"]): + for batch in client.get( + "account", + filter="statecode eq 0 and name eq 'Contoso'", # Must use exact logical names (lower-case) + select=["name", "telephone1"] + ): for account in batch: print(account["name"]) + Query with navigation property expansion (note: case-sensitive property name):: + + for batch in client.get( + "account", + select=["name"], + expand=["primarycontactid"], # Case-sensitive! Check metadata for exact name + filter="statecode eq 0" + ): + for account in batch: + print(f"{account['name']} - Contact: {account.get('primarycontactid', {}).get('fullname')}") + Query with sorting and pagination:: for batch in client.get( @@ -324,12 +342,12 @@ def get( if not isinstance(record_id, str): raise TypeError("record_id must be str") return od._get( - tablename, + table_schema_name, record_id, select=select, ) return od._get_multiple( - tablename, + table_schema_name, select=select, filter=filter, orderby=orderby, @@ -377,12 +395,12 @@ def query_sql(self, sql: str): return self._get_odata()._query_sql(sql) # Table metadata helpers - def get_table_info(self, tablename: str) -> Optional[Dict[str, Any]]: + def get_table_info(self, table_schema_name: str) -> Optional[Dict[str, Any]]: """ Get basic metadata for a table if it exists. - :param tablename: Table name (e.g. ``"new_SampleItem"`` or ``"account"``). - :type tablename: str + :param table_schema_name: Table schema name (e.g. ``"new_SampleItem"`` or ``"account"``). + :type table_schema_name: str :return: Dictionary containing table metadata with keys ``entity_schema``, ``entity_logical_name``, ``entity_set_name``, and ``metadata_id``. @@ -397,22 +415,22 @@ def get_table_info(self, tablename: str) -> Optional[Dict[str, Any]]: print(f"Logical name: {info['entity_logical_name']}") print(f"Entity set: {info['entity_set_name']}") """ - return self._get_odata()._get_table_info(tablename) + return self._get_odata()._get_table_info(table_schema_name) def create_table( self, - tablename: str, + table_schema_name: str, schema: Dict[str, Any], solution_unique_name: Optional[str] = None, - primary_column_name: Optional[str] = None, + primary_column_schema_name: Optional[str] = None, ) -> Dict[str, Any]: """ Create a simple custom table with specified columns. - :param tablename: Table name with publisher prefix (e.g. ``"new_SampleItem"``). - :type tablename: str - :param schema: Dictionary mapping column names (with publisher prefix) to their types. - **All custom column names must include the publisher prefix** (e.g. ``"new_Title"``). + :param table_schema_name: Table schema name with customization prefix value (e.g. ``"new_SampleItem"``). + :type table_schema_name: str + :param schema: Dictionary mapping column names (with customization prefix value) to their types. + **All custom column names must include the customization prefix value** (e.g. ``"new_Title"``). Supported types: - Primitive types: ``"string"``, ``"int"``, ``"decimal"``, ``"float"``, ``"datetime"``, ``"bool"`` @@ -431,10 +449,10 @@ class ItemStatus(IntEnum): :param solution_unique_name: Optional solution unique name that should own the new table. When omitted the table is created in the default solution. :type solution_unique_name: str or None - :param primary_column_name: Optional primary name column schema name with publisher prefix + :param primary_column_schema_name: Optional primary name column schema name with customization prefix value (e.g. ``"new_ProductName"``). If not provided, defaults to ``"{prefix}_Name"`` derived - from the table's publisher prefix. - :type primary_column_name: str or None + from the table's customization prefix value. + :type primary_column_schema_name: str or None :return: Dictionary containing table metadata including ``entity_schema``, ``entity_set_name``, ``entity_logical_name``, ``metadata_id``, and ``columns_created``. @@ -452,7 +470,7 @@ class ItemStatus(IntEnum): INACTIVE = 2 schema = { - "new_Title": "string", # Note: includes 'new_' prefix + "new_Title": "string", # Note: includes 'new_' customization prefix value "new_Quantity": "int", "new_Price": "decimal", "new_Available": "bool", @@ -468,22 +486,22 @@ class ItemStatus(IntEnum): result = client.create_table( "new_Product", {"new_Price": "decimal"}, - primary_column_name="new_ProductName" + primary_column_schema_name="new_ProductName" ) """ return self._get_odata()._create_table( - tablename, + table_schema_name, schema, solution_unique_name, - primary_column_name, + primary_column_schema_name, ) - def delete_table(self, tablename: str) -> None: + def delete_table(self, table_schema_name: str) -> None: """ Delete a custom table by name. - :param tablename: Table name (e.g. ``"new_SampleItem"`` or ``"account"``). - :type tablename: str + :param table_schema_name: Table schema name (e.g. ``"new_SampleItem"`` or ``"account"``). + :type table_schema_name: str :raises ~PowerPlatform.Dataverse.core.errors.MetadataError: If the table does not exist or deletion fails. @@ -496,7 +514,7 @@ def delete_table(self, tablename: str) -> None: client.delete_table("new_SampleItem") """ - self._get_odata()._delete_table(tablename) + self._get_odata()._delete_table(table_schema_name) def list_tables(self) -> list[str]: """ @@ -516,16 +534,16 @@ def list_tables(self) -> list[str]: def create_columns( self, - tablename: str, + table_schema_name: str, columns: Dict[str, Any], ) -> List[str]: """ Create one or more columns on an existing table using a schema-style mapping. - :param tablename: Table name (e.g. ``"new_SampleItem"``). - :type tablename: str - :param columns: Mapping of column schema names (with publisher prefix) to supported types. - **All custom column names must include the publisher prefix** (e.g. ``"new_Notes"``). + :param table_schema_name: Table schema name (e.g. ``"new_SampleItem"``). + :type table_schema_name: str + :param columns: Mapping of column schema names (with customization prefix value) to supported types. + **All custom column names must include the customization prefix value** (e.g. ``"new_Notes"``). Primitive types include ``string``, ``int``, ``decimal``, ``float``, ``datetime``, and ``bool``. Enum subclasses (IntEnum preferred) generate a local option set and can specify localized labels via ``__labels__``. @@ -546,22 +564,22 @@ def create_columns( print(created) # ['new_Scratch', 'new_Flags'] """ return self._get_odata()._create_columns( - tablename, + table_schema_name, columns, ) def delete_columns( self, - tablename: str, + table_schema_name: str, columns: Union[str, List[str]], ) -> List[str]: """ Delete one or more columns from a table. - :param tablename: Table name (e.g. ``"new_SampleItem"``). - :type tablename: str + :param table_schema_name: Table schema name (e.g. ``"new_SampleItem"``). + :type table_schema_name: str :param columns: Column name or list of column schema names to remove. - **Must include publisher prefix** (e.g. ``"new_Scratch"``). Case-insensitive. + **Must include customization prefix value** (e.g. ``"new_Scratch"``). Case-insensitive. :type columns: str | list[str] :returns: Schema names for the columns that were removed. :rtype: list[str] @@ -576,14 +594,14 @@ def delete_columns( print(removed) # ['new_Scratch', 'new_Flags'] """ return self._get_odata()._delete_columns( - tablename, + table_schema_name, columns, ) # File upload def upload_file( self, - tablename: str, + table_schema_name: str, record_id: str, file_name_attribute: str, path: str, @@ -594,8 +612,8 @@ def upload_file( """ Upload a file to a Dataverse file column. - :param tablename: Table name, e.g. ``"account"`` or ``"new_customtable"``. - :type tablename: str + :param table_schema_name: Table schema name, e.g. ``"account"`` or ``"new_customtable"``. + :type table_schema_name: str :param record_id: GUID of the target record. :type record_id: str :param file_name_attribute: Logical name of the file column attribute. @@ -626,7 +644,7 @@ def upload_file( Upload a PDF file:: client.upload_file( - tablename="account", + table_schema_name="account", record_id=account_id, file_name_attribute="new_contract", path="/path/to/contract.pdf", @@ -636,7 +654,7 @@ def upload_file( Upload with auto mode selection:: client.upload_file( - tablename="email", + table_schema_name="email", record_id=email_id, file_name_attribute="new_attachment", path="/path/to/large_file.zip", @@ -644,7 +662,7 @@ def upload_file( ) """ od = self._get_odata() - entity_set = od._entity_set_from_logical(tablename) + entity_set = od._entity_set_from_logical(table_schema_name) od.upload_file( entity_set, record_id, diff --git a/src/PowerPlatform/Dataverse/data/odata.py b/src/PowerPlatform/Dataverse/data/odata.py index 94663be..7137286 100644 --- a/src/PowerPlatform/Dataverse/data/odata.py +++ b/src/PowerPlatform/Dataverse/data/odata.py @@ -33,9 +33,9 @@ def _escape_odata_quotes(value: str) -> str: return value.replace("'", "''") @staticmethod - def _normalize_cache_key(tablename: str) -> str: - """Normalize tablename to lowercase for case-insensitive cache keys.""" - return tablename.lower() if isinstance(tablename, str) else "" + def _normalize_cache_key(table_schema_name: str) -> str: + """Normalize table_schema_name to lowercase for case-insensitive cache keys.""" + return table_schema_name.lower() if isinstance(table_schema_name, str) else "" @staticmethod def _lowercase_keys(record: Dict[str, Any]) -> Dict[str, Any]: @@ -75,11 +75,11 @@ def __init__( backoff=self.config.http_backoff, timeout=self.config.http_timeout, ) - # Cache: normalized tablename (lowercase) -> entity set name (plural) resolved from metadata + # Cache: normalized table_schema_name (lowercase) -> entity set name (plural) resolved from metadata self._logical_to_entityset_cache: dict[str, str] = {} - # Cache: normalized tablename (lowercase) -> primary id attribute (e.g. accountid) + # Cache: normalized table_schema_name (lowercase) -> primary id attribute (e.g. accountid) self._logical_primaryid_cache: dict[str, str] = {} - # Picklist label cache: (normalized_tablename, normalized_attribute) -> {'map': {...}, 'ts': epoch_seconds} + # Picklist label cache: (normalized_table_schema_name, normalized_attribute) -> {'map': {...}, 'ts': epoch_seconds} self._picklist_label_cache = {} self._picklist_cache_ttl_seconds = 3600 # 1 hour TTL @@ -159,15 +159,15 @@ def _request(self, method: str, url: str, *, expected: tuple[int, ...] = (200, 2 ) # --- CRUD Internal functions --- - def _create(self, entity_set: str, tablename: str, record: Dict[str, Any]) -> str: + def _create(self, entity_set: str, table_schema_name: str, record: Dict[str, Any]) -> str: """Create a single record and return its GUID. Parameters ------- entity_set : str Resolved entity set (plural) name. - tablename : str - Table name. + table_schema_name : str + Table schema name. record : dict[str, Any] Attribute payload mapped by logical column names. @@ -183,7 +183,7 @@ def _create(self, entity_set: str, tablename: str, record: Dict[str, Any]) -> st """ # Lowercase all keys to match Dataverse LogicalName expectations record = self._lowercase_keys(record) - record = self._convert_labels_to_ints(tablename, record) + record = self._convert_labels_to_ints(table_schema_name, record) url = f"{self.api}/{entity_set}" r = self._request("post", url, json=record) @@ -202,21 +202,21 @@ def _create(self, entity_set: str, tablename: str, record: Dict[str, Any]) -> st f"Create response missing GUID in OData-EntityId/Location headers (status={getattr(r,'status_code', '?')}). Headers: {header_keys}" ) - def _create_multiple(self, entity_set: str, tablename: str, records: List[Dict[str, Any]]) -> List[str]: + def _create_multiple(self, entity_set: str, table_schema_name: str, records: List[Dict[str, Any]]) -> List[str]: """Create multiple records using the collection-bound CreateMultiple action. Parameters ---------- entity_set : str Resolved entity set (plural) name. - tablename : str - Table name. + table_schema_name : str + Table schema name. records : list[dict[str, Any]] Payloads mapped by logical attribute names. Multi-create logical name resolution ------------------------------------ - - If any payload omits ``@odata.type`` the client stamps ``Microsoft.Dynamics.CRM.``. + - If any payload omits ``@odata.type`` the client stamps ``Microsoft.Dynamics.CRM.``. - If all payloads already include ``@odata.type`` no modification occurs. Returns @@ -228,12 +228,12 @@ def _create_multiple(self, entity_set: str, tablename: str, records: List[Dict[s raise TypeError("All items for multi-create must be dicts") need_logical = any("@odata.type" not in r for r in records) # @odata.type uses LogicalName (lowercase) - logical_name = tablename.lower() + logical_name = table_schema_name.lower() enriched: List[Dict[str, Any]] = [] for r in records: # Lowercase all keys to match Dataverse LogicalName expectations r = self._lowercase_keys(r) - r = self._convert_labels_to_ints(tablename, r) + r = self._convert_labels_to_ints(table_schema_name, r) if "@odata.type" in r or not need_logical: enriched.append(r) else: @@ -271,28 +271,28 @@ def _create_multiple(self, entity_set: str, tablename: str, records: List[Dict[s return [] # --- Derived helpers for high-level client ergonomics --- - def _primary_id_attr(self, tablename: str) -> str: + def _primary_id_attr(self, table_schema_name: str) -> str: """Return primary key attribute using metadata; error if unavailable.""" - cache_key = self._normalize_cache_key(tablename) + cache_key = self._normalize_cache_key(table_schema_name) pid = self._logical_primaryid_cache.get(cache_key) if pid: return pid - # Resolve metadata (populates _logical_primaryid_cache or raises if tablename unknown) - self._entity_set_from_logical(tablename) + # Resolve metadata (populates _logical_primaryid_cache or raises if table_schema_name unknown) + self._entity_set_from_logical(table_schema_name) pid2 = self._logical_primaryid_cache.get(cache_key) if pid2: return pid2 raise RuntimeError( - f"PrimaryIdAttribute not resolved for tablename '{tablename}'. Metadata did not include PrimaryIdAttribute." + f"PrimaryIdAttribute not resolved for table_schema_name '{table_schema_name}'. Metadata did not include PrimaryIdAttribute." ) - def _update_by_ids(self, tablename: str, ids: List[str], changes: Union[Dict[str, Any], List[Dict[str, Any]]]) -> None: + def _update_by_ids(self, table_schema_name: str, ids: List[str], changes: Union[Dict[str, Any], List[Dict[str, Any]]]) -> None: """Update many records by GUID list using UpdateMultiple under the hood. Parameters ---------- - tablename : str - Table name. + table_schema_name : str + Table schema name. ids : list[str] GUIDs of target records. changes : dict | list[dict] @@ -302,11 +302,11 @@ def _update_by_ids(self, tablename: str, ids: List[str], changes: Union[Dict[str raise TypeError("ids must be list[str]") if not ids: return None - pk_attr = self._primary_id_attr(tablename) - entity_set = self._entity_set_from_logical(tablename) + pk_attr = self._primary_id_attr(table_schema_name) + entity_set = self._entity_set_from_logical(table_schema_name) if isinstance(changes, dict): batch = [{pk_attr: rid, **changes} for rid in ids] - self._update_multiple(entity_set, tablename, batch) + self._update_multiple(entity_set, table_schema_name, batch) return None if not isinstance(changes, list): raise TypeError("changes must be dict or list[dict]") @@ -317,12 +317,12 @@ def _update_by_ids(self, tablename: str, ids: List[str], changes: Union[Dict[str if not isinstance(patch, dict): raise TypeError("Each patch must be a dict") batch.append({pk_attr: rid, **patch}) - self._update_multiple(entity_set, tablename, batch) + self._update_multiple(entity_set, table_schema_name, batch) return None def _delete_multiple( self, - tablename: str, + table_schema_name: str, ids: List[str], ) -> Optional[str]: """Delete many records by GUID list. @@ -334,12 +334,12 @@ def _delete_multiple( return None value_objects = [{"Value": rid, "Type": "System.Guid"} for rid in targets] - pk_attr = self._primary_id_attr(tablename) + pk_attr = self._primary_id_attr(table_schema_name) timestamp = datetime.now(timezone.utc).isoformat(timespec="seconds").replace("+00:00", "Z") - job_label = f"Bulk delete {tablename} records @ {timestamp}" + job_label = f"Bulk delete {table_schema_name} records @ {timestamp}" # EntityName must use lowercase LogicalName - logical_name = tablename.lower() + logical_name = table_schema_name.lower() query = { "@odata.type": "Microsoft.Dynamics.CRM.QueryExpression", @@ -401,13 +401,13 @@ def esc(match): return f"({k})" return f"({k})" - def _update(self, tablename: str, key: str, data: Dict[str, Any]) -> None: + def _update(self, table_schema_name: str, key: str, data: Dict[str, Any]) -> None: """Update an existing record. Parameters ---------- - tablename : str - Table name. + table_schema_name : str + Table schema name. key : str Record GUID (with or without parentheses) or alternate key. data : dict @@ -419,24 +419,24 @@ def _update(self, tablename: str, key: str, data: Dict[str, Any]) -> None: """ # Lowercase all keys to match Dataverse LogicalName expectations data = self._lowercase_keys(data) - data = self._convert_labels_to_ints(tablename, data) - entity_set = self._entity_set_from_logical(tablename) + data = self._convert_labels_to_ints(table_schema_name, data) + entity_set = self._entity_set_from_logical(table_schema_name) url = f"{self.api}/{entity_set}{self._format_key(key)}" r = self._request("patch", url, headers={"If-Match": "*"}, json=data) - def _update_multiple(self, entity_set: str, tablename: str, records: List[Dict[str, Any]]) -> None: + def _update_multiple(self, entity_set: str, table_schema_name: str, records: List[Dict[str, Any]]) -> None: """Bulk update existing records via the collection-bound UpdateMultiple action. Parameters ---------- entity_set : str Resolved entity set name. - tablename : str - Table name, e.g. "account". + table_schema_name : str + Table schema name, e.g. "account". records : list[dict] Each dict must include the real primary key attribute for the entity (e.g. ``accountid``) and one or more fields to update. If ``@odata.type`` is omitted in any payload, the table name is resolved once and - stamped into those payloads as ``Microsoft.Dynamics.CRM.`` (same behaviour as bulk create). + stamped into those payloads as ``Microsoft.Dynamics.CRM.`` (same behaviour as bulk create). Behaviour --------- @@ -460,12 +460,12 @@ def _update_multiple(self, entity_set: str, tablename: str, records: List[Dict[s # Determine whether we need logical name resolution (@odata.type missing in any payload) need_logical = any("@odata.type" not in r for r in records) # @odata.type uses LogicalName (lowercase) - logical_name = tablename.lower() + logical_name = table_schema_name.lower() enriched: List[Dict[str, Any]] = [] for r in records: # Lowercase all keys to match Dataverse LogicalName expectations r = self._lowercase_keys(r) - r = self._convert_labels_to_ints(tablename, r) + r = self._convert_labels_to_ints(table_schema_name, r) if "@odata.type" in r or not need_logical: enriched.append(r) else: @@ -479,19 +479,19 @@ def _update_multiple(self, entity_set: str, tablename: str, records: List[Dict[s # Intentionally ignore response content: no stable contract for IDs across environments. return None - def _delete(self, tablename: str, key: str) -> None: + def _delete(self, table_schema_name: str, key: str) -> None: """Delete a record by GUID or alternate key.""" - entity_set = self._entity_set_from_logical(tablename) + entity_set = self._entity_set_from_logical(table_schema_name) url = f"{self.api}/{entity_set}{self._format_key(key)}" self._request("delete", url, headers={"If-Match": "*"}) - def _get(self, tablename: str, key: str, select: Optional[str] = None) -> Dict[str, Any]: + def _get(self, table_schema_name: str, key: str, select: Optional[str] = None) -> Dict[str, Any]: """Retrieve a single record. Parameters ---------- - tablename : str - Table name. + table_schema_name : str + Table schema name. key : str Record GUID (with or without parentheses) or alternate key syntax. select : str | None @@ -501,14 +501,14 @@ def _get(self, tablename: str, key: str, select: Optional[str] = None) -> Dict[s if select: # Lowercase column names for case-insensitive matching params["$select"] = select.lower() - entity_set = self._entity_set_from_logical(tablename) + entity_set = self._entity_set_from_logical(table_schema_name) url = f"{self.api}/{entity_set}{self._format_key(key)}" r = self._request("get", url, params=params) return r.json() def _get_multiple( self, - tablename: str, + table_schema_name: str, select: Optional[List[str]] = None, filter: Optional[str] = None, orderby: Optional[List[str]] = None, @@ -520,18 +520,20 @@ def _get_multiple( Parameters ---------- - tablename : str - Table name. + table_schema_name : str + Table schema name. select : list[str] | None - Columns to select; joined with commas into $select. + Columns to select; joined with commas into $select. Column names are automatically lowercased. filter : str | None - OData $filter expression as a string. + OData $filter expression as a string. IMPORTANT: This is passed as-is without transformation. + Users must provide lowercase logical column names (e.g., "statecode eq 0"). orderby : list[str] | None - Order expressions; joined with commas into $orderby. + Order expressions; joined with commas into $orderby. Column names are automatically lowercased. top : int | None Max number of records across all pages. Passed as $top on the first request; the server will paginate via nextLink as needed. expand : list[str] | None - Navigation properties to expand; joined with commas into $expand. + Navigation properties to expand; joined with commas into $expand. IMPORTANT: These are case-sensitive + and passed as-is. Users must provide exact navigation property names from entity metadata. page_size : int | None Hint for per-page size using Prefer: ``odata.maxpagesize``. @@ -555,7 +557,7 @@ def _do_request(url: str, *, params: Optional[Dict[str, Any]] = None) -> Dict[st except ValueError: return {} - entity_set = self._entity_set_from_logical(tablename) + entity_set = self._entity_set_from_logical(table_schema_name) base_url = f"{self.api}/{entity_set}" params: Dict[str, Any] = {} if select: @@ -753,7 +755,7 @@ def _get_entity_by_logical_name( def _create_entity( self, - schema_name: str, + table_schema_name: str, display_name: str, attributes: List[Dict[str, Any]], solution_unique_name: Optional[str] = None, @@ -761,7 +763,7 @@ def _create_entity( url = f"{self.api}/EntityDefinitions" payload = { "@odata.type": "Microsoft.Dynamics.CRM.EntityMetadata", - "SchemaName": schema_name, + "SchemaName": table_schema_name, "DisplayName": self._label(display_name), "DisplayCollectionName": self._label(display_name + "s"), "Description": self._label(f"Custom entity for {display_name}"), @@ -776,26 +778,26 @@ def _create_entity( params = {"SolutionUniqueName": solution_unique_name} self._request("post", url, json=payload, params=params) ent = self._get_entity_by_logical_name( - schema_name, + table_schema_name, headers={"Consistency": "Strong"}, ) if not ent or not ent.get("EntitySetName"): raise RuntimeError( - f"Failed to create or retrieve entity '{schema_name}' (EntitySetName not available)." + f"Failed to create or retrieve entity '{table_schema_name}' (EntitySetName not available)." ) if not ent.get("MetadataId"): raise RuntimeError( - f"MetadataId missing after creating entity '{schema_name}'." + f"MetadataId missing after creating entity '{table_schema_name}'." ) return ent def _get_attribute_metadata( self, entity_metadata_id: str, - schema_name: str, + column_schema_name: str, extra_select: Optional[str] = None, ) -> Optional[Dict[str, Any]]: - attr_escaped = self._escape_odata_quotes(schema_name) + attr_escaped = self._escape_odata_quotes(column_schema_name) url = f"{self.api}/EntityDefinitions({entity_metadata_id})/Attributes" select_fields = ["MetadataId", "LogicalName", "SchemaName"] if extra_select: @@ -847,7 +849,7 @@ def _build_localizedlabels_payload(self, translations: Dict[int, str]) -> Dict[s "LocalizedLabels": locs, } - def _enum_optionset_payload(self, schema_name: str, enum_cls: type[Enum], is_primary_name: bool = False) -> Dict[str, Any]: + def _enum_optionset_payload(self, column_schema_name: str, enum_cls: type[Enum], is_primary_name: bool = False) -> Dict[str, Any]: """Create local (IsGlobal=False) PicklistAttributeMetadata from an Enum subclass. Supports translation mapping via optional class attribute `__labels__`: @@ -922,10 +924,10 @@ def _enum_optionset_payload(self, schema_name: str, enum_cls: type[Enum], is_pri "Label": self._build_localizedlabels_payload(per_lang), }) - attr_label = schema_name.split("_")[-1] + attr_label = column_schema_name.split("_")[-1] return { "@odata.type": "Microsoft.Dynamics.CRM.PicklistAttributeMetadata", - "SchemaName": schema_name, + "SchemaName": column_schema_name, "DisplayName": self._label(attr_label), "RequiredLevel": {"Value": "None"}, "IsPrimaryName": bool(is_primary_name), @@ -947,7 +949,7 @@ def _normalize_picklist_label(self, label: str) -> str: norm = re.sub(r"\s+", " ", norm).strip().lower() return norm - def _optionset_map(self, tablename: str, attr_logical: str) -> Optional[Dict[str, int]]: + def _optionset_map(self, table_schema_name: str, attr_logical: str) -> Optional[Dict[str, int]]: """Build or return cached mapping of normalized label -> value for a picklist attribute. Returns empty dict if attribute is not a picklist or has no options. Returns None only @@ -957,10 +959,10 @@ def _optionset_map(self, tablename: str, attr_logical: str) -> Optional[Dict[str ----- - This method calls the Web API twice per attribute so it could have perf impact when there are lots of columns on the entity. """ - if not tablename or not attr_logical: + if not table_schema_name or not attr_logical: return None # Normalize cache key for case-insensitive lookups - cache_key = (self._normalize_cache_key(tablename), self._normalize_cache_key(attr_logical)) + cache_key = (self._normalize_cache_key(table_schema_name), self._normalize_cache_key(attr_logical)) now = time.time() entry = self._picklist_label_cache.get(cache_key) if isinstance(entry, dict) and 'map' in entry and (now - entry.get('ts', 0)) < self._picklist_cache_ttl_seconds: @@ -968,11 +970,11 @@ def _optionset_map(self, tablename: str, attr_logical: str) -> Optional[Dict[str # LogicalNames in Dataverse are stored in lowercase, so we need to lowercase for filters attr_esc = self._escape_odata_quotes(attr_logical.lower()) - tablename_esc = self._escape_odata_quotes(tablename.lower()) + table_schema_name_esc = self._escape_odata_quotes(table_schema_name.lower()) # Step 1: lightweight fetch (no expand) to determine attribute type url_type = ( - f"{self.api}/EntityDefinitions(LogicalName='{tablename_esc}')/Attributes" + f"{self.api}/EntityDefinitions(LogicalName='{table_schema_name_esc}')/Attributes" f"?$filter=LogicalName eq '{attr_esc}'&$select=LogicalName,AttributeType" ) # Retry up to 3 times on 404 (new or not-yet-published attribute metadata). If still 404, raise. @@ -988,7 +990,7 @@ def _optionset_map(self, tablename: str, attr_logical: str) -> Optional[Dict[str time.sleep(0.4 * (2 ** attempt)) continue raise RuntimeError( - f"Picklist attribute metadata not found after retries: entity='{tablename}' attribute='{attr_logical}' (404)" + f"Picklist attribute metadata not found after retries: entity='{table_schema_name}' attribute='{attr_logical}' (404)" ) from err raise if r_type is None: @@ -1006,7 +1008,7 @@ def _optionset_map(self, tablename: str, attr_logical: str) -> Optional[Dict[str # Step 2: fetch with expand only now that we know it's a picklist # Need to cast to the derived PicklistAttributeMetadata type; OptionSet is not a nav on base AttributeMetadata. cast_url = ( - f"{self.api}/EntityDefinitions(LogicalName='{tablename_esc}')/Attributes(LogicalName='{attr_esc}')/" + f"{self.api}/EntityDefinitions(LogicalName='{table_schema_name_esc}')/Attributes(LogicalName='{attr_esc}')/" "Microsoft.Dynamics.CRM.PicklistAttributeMetadata?$select=LogicalName&$expand=OptionSet($select=Options)" ) # Step 2 fetch with retries: expanded OptionSet (cast form first) @@ -1021,7 +1023,7 @@ def _optionset_map(self, tablename: str, attr_logical: str) -> Optional[Dict[str time.sleep(0.4 * (2 ** attempt)) # 0.4s, 0.8s continue raise RuntimeError( - f"Picklist OptionSet metadata not found after retries: entity='{tablename}' attribute='{attr_logical}' (404)" + f"Picklist OptionSet metadata not found after retries: entity='{table_schema_name}' attribute='{attr_logical}' (404)" ) from err raise if r_opts is None: @@ -1059,7 +1061,7 @@ def _optionset_map(self, tablename: str, attr_logical: str) -> Optional[Dict[str self._picklist_label_cache[cache_key] = {'map': {}, 'ts': now} return {} - def _convert_labels_to_ints(self, tablename: str, record: Dict[str, Any]) -> Dict[str, Any]: + def _convert_labels_to_ints(self, table_schema_name: str, record: Dict[str, Any]) -> Dict[str, Any]: """Return a copy of record with any labels converted to option ints. Heuristic: For each string value, attempt to resolve against picklist metadata. @@ -1069,7 +1071,7 @@ def _convert_labels_to_ints(self, tablename: str, record: Dict[str, Any]) -> Dic for k, v in list(out.items()): if not isinstance(v, str) or not v.strip(): continue - mapping = self._optionset_map(tablename, k) + mapping = self._optionset_map(table_schema_name, k) if not mapping: continue norm = self._normalize_picklist_label(v) @@ -1078,18 +1080,18 @@ def _convert_labels_to_ints(self, tablename: str, record: Dict[str, Any]) -> Dic out[k] = val return out - def _attribute_payload(self, schema_name: str, dtype: Any, *, is_primary_name: bool = False) -> Optional[Dict[str, Any]]: + def _attribute_payload(self, column_schema_name: str, dtype: Any, *, is_primary_name: bool = False) -> Optional[Dict[str, Any]]: # Enum-based local option set support if isinstance(dtype, type) and issubclass(dtype, Enum): - return self._enum_optionset_payload(schema_name, dtype, is_primary_name=is_primary_name) + return self._enum_optionset_payload(column_schema_name, dtype, is_primary_name=is_primary_name) if not isinstance(dtype, str): - raise ValueError(f"Unsupported column spec type for '{schema_name}': {type(dtype)} (expected str or Enum subclass)") + raise ValueError(f"Unsupported column spec type for '{column_schema_name}': {type(dtype)} (expected str or Enum subclass)") dtype_l = dtype.lower().strip() - label = schema_name.split("_")[-1] + label = column_schema_name.split("_")[-1] if dtype_l in ("string", "text"): return { "@odata.type": "Microsoft.Dynamics.CRM.StringAttributeMetadata", - "SchemaName": schema_name, + "SchemaName": column_schema_name, "DisplayName": self._label(label), "RequiredLevel": {"Value": "None"}, "MaxLength": 200, @@ -1099,7 +1101,7 @@ def _attribute_payload(self, schema_name: str, dtype: Any, *, is_primary_name: b if dtype_l in ("int", "integer"): return { "@odata.type": "Microsoft.Dynamics.CRM.IntegerAttributeMetadata", - "SchemaName": schema_name, + "SchemaName": column_schema_name, "DisplayName": self._label(label), "RequiredLevel": {"Value": "None"}, "Format": "None", @@ -1109,7 +1111,7 @@ def _attribute_payload(self, schema_name: str, dtype: Any, *, is_primary_name: b if dtype_l in ("decimal", "money"): return { "@odata.type": "Microsoft.Dynamics.CRM.DecimalAttributeMetadata", - "SchemaName": schema_name, + "SchemaName": column_schema_name, "DisplayName": self._label(label), "RequiredLevel": {"Value": "None"}, "MinValue": -100000000000.0, @@ -1119,7 +1121,7 @@ def _attribute_payload(self, schema_name: str, dtype: Any, *, is_primary_name: b if dtype_l in ("float", "double"): return { "@odata.type": "Microsoft.Dynamics.CRM.DoubleAttributeMetadata", - "SchemaName": schema_name, + "SchemaName": column_schema_name, "DisplayName": self._label(label), "RequiredLevel": {"Value": "None"}, "MinValue": -100000000000.0, @@ -1129,7 +1131,7 @@ def _attribute_payload(self, schema_name: str, dtype: Any, *, is_primary_name: b if dtype_l in ("datetime", "date"): return { "@odata.type": "Microsoft.Dynamics.CRM.DateTimeAttributeMetadata", - "SchemaName": schema_name, + "SchemaName": column_schema_name, "DisplayName": self._label(label), "RequiredLevel": {"Value": "None"}, "Format": "DateOnly", @@ -1138,7 +1140,7 @@ def _attribute_payload(self, schema_name: str, dtype: Any, *, is_primary_name: b if dtype_l in ("bool", "boolean"): return { "@odata.type": "Microsoft.Dynamics.CRM.BooleanAttributeMetadata", - "SchemaName": schema_name, + "SchemaName": column_schema_name, "DisplayName": self._label(label), "RequiredLevel": {"Value": "None"}, "OptionSet": { @@ -1156,24 +1158,24 @@ def _attribute_payload(self, schema_name: str, dtype: Any, *, is_primary_name: b } return None - def _get_table_info(self, tablename: str) -> Optional[Dict[str, Any]]: + def _get_table_info(self, table_schema_name: str) -> Optional[Dict[str, Any]]: """Return basic metadata for a custom table if it exists. Parameters ---------- - tablename : str - Friendly name or full schema name (with publisher prefix and underscore). + table_schema_name : str + Friendly name or full schema name (with customization prefix value and underscore). Returns ------- dict | None Metadata summary or ``None`` if not found. """ - ent = self._get_entity_by_logical_name(tablename) + ent = self._get_entity_by_logical_name(table_schema_name) if not ent: return None return { - "entity_schema": ent.get("SchemaName") or tablename, + "entity_schema": ent.get("SchemaName") or table_schema_name, "entity_logical_name": ent.get("LogicalName"), "entity_set_name": ent.get("EntitySetName"), "metadata_id": ent.get("MetadataId"), @@ -1189,12 +1191,12 @@ def _list_tables(self) -> List[Dict[str, Any]]: r = self._request("get", url, params=params) return r.json().get("value", []) - def _delete_table(self, tablename: str) -> None: + def _delete_table(self, table_schema_name: str) -> None: """Delete a table by SchemaName. Case-insensitive.""" - ent = self._get_entity_by_logical_name(tablename) + ent = self._get_entity_by_logical_name(table_schema_name) if not ent or not ent.get("MetadataId"): raise MetadataError( - f"Table '{tablename}' not found.", + f"Table '{table_schema_name}' not found.", subcode=ec.METADATA_TABLE_NOT_FOUND, ) metadata_id = ent["MetadataId"] @@ -1203,31 +1205,31 @@ def _delete_table(self, tablename: str) -> None: def _create_table( self, - tablename: str, + table_schema_name: str, schema: Dict[str, Any], solution_unique_name: Optional[str] = None, - primary_column_name: Optional[str] = None, + primary_column_schema_name: Optional[str] = None, ) -> Dict[str, Any]: - """Create a table using tablename as SchemaName directly. + """Create a table using table_schema_name as SchemaName directly. The server will determine the LogicalName automatically (usually lowercased SchemaName). """ # Check if table already exists (case-insensitive) - ent = self._get_entity_by_logical_name(tablename) + ent = self._get_entity_by_logical_name(table_schema_name) if ent: raise MetadataError( - f"Table '{tablename}' already exists.", + f"Table '{table_schema_name}' already exists.", subcode=ec.METADATA_TABLE_ALREADY_EXISTS, ) created_cols: List[str] = [] - # Use provided primary column name, or derive from tablename prefix (e.g., "new_Product" -> "new_Name"). + # Use provided primary column name, or derive from table_schema_name prefix (e.g., "new_Product" -> "new_Name"). # If no prefix detected, default to "new_Name"; server will validate overall table schema. - if primary_column_name: - primary_attr_schema = primary_column_name + if primary_column_schema_name: + primary_attr_schema = primary_column_schema_name else: - primary_attr_schema = f"{tablename.split('_',1)[0]}_Name" if "_" in tablename else "new_Name" + primary_attr_schema = f"{table_schema_name.split('_',1)[0]}_Name" if "_" in table_schema_name else "new_Name" attributes: List[Dict[str, Any]] = [] attributes.append(self._attribute_payload(primary_attr_schema, "string", is_primary_name=True)) @@ -1245,14 +1247,14 @@ def _create_table( raise ValueError("solution_unique_name cannot be empty") metadata = self._create_entity( - schema_name=tablename, - display_name=tablename, + table_schema_name=table_schema_name, + display_name=table_schema_name, attributes=attributes, solution_unique_name=solution_unique_name, ) return { - "entity_schema": tablename, + "entity_schema": table_schema_name, "entity_logical_name": metadata.get("LogicalName"), "entity_set_name": metadata.get("EntitySetName"), "metadata_id": metadata.get("MetadataId"), @@ -1261,22 +1263,22 @@ def _create_table( def _create_columns( self, - tablename: str, + table_schema_name: str, columns: Dict[str, Any], ) -> List[str]: """Create columns on an existing table. Case-insensitive table lookup.""" if not isinstance(columns, dict) or not columns: raise TypeError("columns must be a non-empty dict[name -> type]") - ent = self._get_entity_by_logical_name(tablename) + ent = self._get_entity_by_logical_name(table_schema_name) if not ent or not ent.get("MetadataId"): raise MetadataError( - f"Table '{tablename}' not found.", + f"Table '{table_schema_name}' not found.", subcode=ec.METADATA_TABLE_NOT_FOUND, ) # Use the actual SchemaName from the entity metadata - entity_schema = ent.get("SchemaName") or tablename + entity_schema = ent.get("SchemaName") or table_schema_name metadata_id = ent.get("MetadataId") created: List[str] = [] needs_picklist_flush = False @@ -1301,7 +1303,7 @@ def _create_columns( def _delete_columns( self, - tablename: str, + table_schema_name: str, columns: Union[str, List[str]], ) -> List[str]: """Delete columns from an existing table. Case-insensitive table lookup.""" @@ -1316,15 +1318,15 @@ def _delete_columns( if not isinstance(name, str) or not name.strip(): raise ValueError("column names must be non-empty strings") - ent = self._get_entity_by_logical_name(tablename) + ent = self._get_entity_by_logical_name(table_schema_name) if not ent or not ent.get("MetadataId"): raise MetadataError( - f"Table '{tablename}' not found.", + f"Table '{table_schema_name}' not found.", subcode=ec.METADATA_TABLE_NOT_FOUND, ) # Use the actual SchemaName from the entity metadata - entity_schema = ent.get("SchemaName") or tablename + entity_schema = ent.get("SchemaName") or table_schema_name metadata_id = ent.get("MetadataId") deleted: List[str] = [] needs_picklist_flush = False From 18418c0f8a60771c28143baa5b7b85fde0a62883 Mon Sep 17 00:00:00 2001 From: Tim Pellissier Date: Thu, 13 Nov 2025 14:49:21 -0800 Subject: [PATCH 4/9] schemaName rename cleanup --- examples/advanced/file_upload.py | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/examples/advanced/file_upload.py b/examples/advanced/file_upload.py index e6aa454..6b3c871 100644 --- a/examples/advanced/file_upload.py +++ b/examples/advanced/file_upload.py @@ -187,8 +187,8 @@ def ensure_table(): sys.exit(1) entity_set = table_info.get("entity_set_name") -logical = table_info.get("entity_logical_name") or entity_set.rstrip("s") -attr_prefix = logical.split('_',1)[0] if '_' in logical else logical +table_schema_name = table_info.get("entity_schema_name") or entity_set.rstrip("s") +attr_prefix = table_schema_name.split('_',1)[0] if '_' in table_schema_name else table_schema_name name_attr = f"{attr_prefix}_name" small_file_attr_schema = f"{attr_prefix}_SmallDocument" # second file attribute for small single-request demo small_file_attr_logical = f"{attr_prefix}_smalldocument" # expected logical name (lowercase) @@ -258,13 +258,13 @@ def ensure_file_attribute_generic(schema_name: str, label: str, key_prefix: str) record_id = None try: payload = {name_attr: "File Sample Record"} - log(f"client.create('{logical}', payload)") - created_ids = backoff(lambda: client.create(logical, payload)) + log(f"client.create('{table_schema_name}', payload)") + created_ids = backoff(lambda: client.create(table_schema_name, payload)) if isinstance(created_ids, list) and created_ids: record_id = created_ids[0] else: raise RuntimeError("Unexpected create return; expected list[str] with at least one GUID") - print({"record_created": True, "id": record_id, "logical": logical}) + print({"record_created": True, "id": record_id, "logical": table_schema_name}) except Exception as e: # noqa: BLE001 print({"record_created": False, "error": str(e)}) sys.exit(1) @@ -295,7 +295,7 @@ def get_dataset_info(file_path: Path): try: DATASET_FILE, small_file_size, src_hash = get_dataset_info(_GENERATED_TEST_FILE) backoff(lambda: client.upload_file( - logical, + table_schema_name, record_id, small_file_attr_logical, str(DATASET_FILE), @@ -322,7 +322,7 @@ def get_dataset_info(file_path: Path): print("Small single-request upload demo - REPLACE with 8MB file:") replacement_file, replace_size_small, replace_hash_small = get_dataset_info(_GENERATED_TEST_FILE_8MB) backoff(lambda: client.upload_file( - logical, + table_schema_name, record_id, small_file_attr_logical, str(replacement_file), @@ -350,7 +350,7 @@ def get_dataset_info(file_path: Path): try: DATASET_FILE, src_size_chunk, src_hash_chunk = get_dataset_info(_GENERATED_TEST_FILE) backoff(lambda: client.upload_file( - logical, + table_schema_name, record_id, chunk_file_attr_logical, str(DATASET_FILE), @@ -376,7 +376,7 @@ def get_dataset_info(file_path: Path): print("Streaming chunk upload demo - REPLACE with 8MB file:") replacement_file, replace_size_chunk, replace_hash_chunk = get_dataset_info(_GENERATED_TEST_FILE_8MB) backoff(lambda: client.upload_file( - logical, + table_schema_name, record_id, chunk_file_attr_logical, str(replacement_file), @@ -401,8 +401,8 @@ def get_dataset_info(file_path: Path): # --------------------------- Cleanup --------------------------- if cleanup_record and record_id: try: - log(f"client.delete('{logical}', '{record_id}')") - backoff(lambda: client.delete(logical, record_id)) + log(f"client.delete('{table_schema_name}', '{record_id}')") + backoff(lambda: client.delete(table_schema_name, record_id)) print({"record_deleted": True}) except Exception as e: # noqa: BLE001 print({"record_deleted": False, "error": str(e)}) From 9a6dc4e5ff790d847ec029c3e51ef6e3cdd0ffd2 Mon Sep 17 00:00:00 2001 From: Tim Pellissier Date: Thu, 13 Nov 2025 15:19:08 -0800 Subject: [PATCH 5/9] Copilot comments --- examples/advanced/file_upload.py | 2 +- examples/advanced/pandas_integration.py | 10 +++++----- examples/advanced/walkthrough.py | 6 +++--- examples/basic/functional_testing.py | 14 +++++++------- examples/basic/installation_example.py | 2 +- src/PowerPlatform/Dataverse/client.py | 12 ++++++------ src/PowerPlatform/Dataverse/data/odata.py | 12 +++++------- 7 files changed, 28 insertions(+), 30 deletions(-) diff --git a/examples/advanced/file_upload.py b/examples/advanced/file_upload.py index 6b3c871..527d421 100644 --- a/examples/advanced/file_upload.py +++ b/examples/advanced/file_upload.py @@ -187,7 +187,7 @@ def ensure_table(): sys.exit(1) entity_set = table_info.get("entity_set_name") -table_schema_name = table_info.get("entity_schema_name") or entity_set.rstrip("s") +table_schema_name = table_info.get("table_schema_name") or entity_set.rstrip("s") attr_prefix = table_schema_name.split('_',1)[0] if '_' in table_schema_name else table_schema_name name_attr = f"{attr_prefix}_name" small_file_attr_schema = f"{attr_prefix}_SmallDocument" # second file attribute for small single-request demo diff --git a/examples/advanced/pandas_integration.py b/examples/advanced/pandas_integration.py index 8bcb004..fc62e06 100644 --- a/examples/advanced/pandas_integration.py +++ b/examples/advanced/pandas_integration.py @@ -74,10 +74,10 @@ def backoff_retry(op, *, delays=(0, 2, 5, 10, 20), retry_http_statuses=(400, 403 table_info = existing created_this_run = False print({ - "table": table_info.get("entity_schema"), + "table": table_info.get("table_schema_name"), "existed": True, "entity_set": table_info.get("entity_set_name"), - "logical": table_info.get("entity_logical_name"), + "logical": table_info.get("table_logical_name"), "metadata_id": table_info.get("metadata_id"), }) @@ -96,10 +96,10 @@ def backoff_retry(op, *, delays=(0, 2, 5, 10, 20), retry_http_statuses=(400, 403 ) created_this_run = True if table_info and table_info.get("columns_created") else False print({ - "table": table_info.get("entity_schema") if table_info else None, + "table": table_info.get("table_schema_name") if table_info else None, "existed": False, "entity_set": table_info.get("entity_set_name") if table_info else None, - "logical": table_info.get("entity_logical_name") if table_info else None, + "logical": table_info.get("table_logical_name") if table_info else None, "metadata_id": table_info.get("metadata_id") if table_info else None, }) except Exception as e: @@ -119,7 +119,7 @@ def backoff_retry(op, *, delays=(0, 2, 5, 10, 20), retry_http_statuses=(400, 403 # Fail fast: all operations must use the custom table sys.exit(1) -logical = table_info.get("entity_logical_name") +logical = table_info.get("table_logical_name") # Derive attribute logical name prefix from the entity logical name attr_prefix = logical.split("_", 1)[0] if "_" in logical else logical record_data = { diff --git a/examples/advanced/walkthrough.py b/examples/advanced/walkthrough.py index 25b7433..315dbfc 100644 --- a/examples/advanced/walkthrough.py +++ b/examples/advanced/walkthrough.py @@ -75,8 +75,8 @@ def main(): table_info = client.get_table_info(table_name) if table_info: - print(f"✓ Table already exists: {table_info.get('entity_schema')}") - print(f" Logical Name: {table_info.get('entity_logical_name')}") + print(f"✓ Table already exists: {table_info.get('table_schema_name')}") + print(f" Logical Name: {table_info.get('table_logical_name')}") print(f" Entity Set: {table_info.get('entity_set_name')}") else: log_call(f"client.create_table('{table_name}', schema={{...}})") @@ -88,7 +88,7 @@ def main(): "new_Priority": Priority } table_info = client.create_table(table_name, schema) - print(f"✓ Created table: {table_info.get('entity_schema')}") + print(f"✓ Created table: {table_info.get('table_schema_name')}") print(f" Columns created: {', '.join(table_info.get('columns_created', []))}") # ============================================================================ diff --git a/examples/basic/functional_testing.py b/examples/basic/functional_testing.py index f16d785..00c02ba 100644 --- a/examples/basic/functional_testing.py +++ b/examples/basic/functional_testing.py @@ -106,8 +106,8 @@ def ensure_test_table(client: DataverseClient) -> Dict[str, Any]: } ) - print(f"✅ Created test table: {table_info.get('entity_schema')}") - print(f" Logical name: {table_info.get('entity_logical_name')}") + print(f"✅ Created test table: {table_info.get('table_schema_name')}") + print(f" Logical name: {table_info.get('table_logical_name')}") print(f" Entity set: {table_info.get('entity_set_name')}") # Wait a moment for table to be ready @@ -124,7 +124,7 @@ def test_create_record(client: DataverseClient, table_info: Dict[str, Any]) -> s print("\n📝 Record Creation Test") print("=" * 50) - logical_name = table_info.get("entity_logical_name") + logical_name = table_info.get("table_logical_name") attr_prefix = logical_name.split("_", 1)[0] if "_" in logical_name else logical_name # Create test record data @@ -163,7 +163,7 @@ def test_read_record(client: DataverseClient, table_info: Dict[str, Any], record print("\n📖 Record Reading Test") print("=" * 50) - logical_name = table_info.get("entity_logical_name") + logical_name = table_info.get("table_logical_name") attr_prefix = logical_name.split("_", 1)[0] if "_" in logical_name else logical_name try: @@ -198,7 +198,7 @@ def test_query_records(client: DataverseClient, table_info: Dict[str, Any]) -> N print("\n🔍 Record Query Test") print("=" * 50) - logical_name = table_info.get("entity_logical_name") + logical_name = table_info.get("table_logical_name") attr_prefix = logical_name.split("_", 1)[0] if "_" in logical_name else logical_name try: @@ -234,7 +234,7 @@ def cleanup_test_data(client: DataverseClient, table_info: Dict[str, Any], recor print("\n🧹 Cleanup") print("=" * 50) - logical_name = table_info.get("entity_logical_name") + logical_name = table_info.get("table_logical_name") # Ask user if they want to clean up cleanup_choice = input("Do you want to delete the test record? (y/N): ").strip().lower() @@ -253,7 +253,7 @@ def cleanup_test_data(client: DataverseClient, table_info: Dict[str, Any], recor if table_cleanup in ['y', 'yes']: try: - client.delete_table(table_info.get("entity_schema")) + client.delete_table(table_info.get("table_schema_name")) print("✅ Test table deleted successfully") except Exception as e: print(f"⚠️ Failed to delete test table: {e}") diff --git a/examples/basic/installation_example.py b/examples/basic/installation_example.py index ef47e79..1974a64 100644 --- a/examples/basic/installation_example.py +++ b/examples/basic/installation_example.py @@ -226,7 +226,7 @@ def show_usage_examples(): # Get table information info = client.get_table_info("CustomEntity") -print(f"Table: {info['entity_schema']}") +print(f"Table: {info['table_schema_name']}") # List all tables tables = client.list_tables() diff --git a/src/PowerPlatform/Dataverse/client.py b/src/PowerPlatform/Dataverse/client.py index 4758f4b..87c41ef 100644 --- a/src/PowerPlatform/Dataverse/client.py +++ b/src/PowerPlatform/Dataverse/client.py @@ -402,8 +402,8 @@ def get_table_info(self, table_schema_name: str) -> Optional[Dict[str, Any]]: :param table_schema_name: Table schema name (e.g. ``"new_SampleItem"`` or ``"account"``). :type table_schema_name: str - :return: Dictionary containing table metadata with keys ``entity_schema``, - ``entity_logical_name``, ``entity_set_name``, and ``metadata_id``. + :return: Dictionary containing table metadata with keys ``table_schema_name``, + ``table_logical_name``, ``entity_set_name``, and ``metadata_id``. Returns None if the table is not found. :rtype: dict or None @@ -412,7 +412,7 @@ def get_table_info(self, table_schema_name: str) -> Optional[Dict[str, Any]]: info = client.get_table_info("new_SampleItem") if info: - print(f"Logical name: {info['entity_logical_name']}") + print(f"Logical name: {info['table_logical_name']}") print(f"Entity set: {info['entity_set_name']}") """ return self._get_odata()._get_table_info(table_schema_name) @@ -454,8 +454,8 @@ class ItemStatus(IntEnum): from the table's customization prefix value. :type primary_column_schema_name: str or None - :return: Dictionary containing table metadata including ``entity_schema``, - ``entity_set_name``, ``entity_logical_name``, ``metadata_id``, and ``columns_created``. + :return: Dictionary containing table metadata including ``table_schema_name``, + ``entity_set_name``, ``table_logical_name``, ``metadata_id``, and ``columns_created``. :rtype: dict :raises ~PowerPlatform.Dataverse.core.errors.MetadataError: If table creation fails or the schema is invalid. @@ -478,7 +478,7 @@ class ItemStatus(IntEnum): } result = client.create_table("new_SampleItem", schema) - print(f"Created table: {result['entity_logical_name']}") + print(f"Created table: {result['table_logical_name']}") print(f"Columns: {result['columns_created']}") Create a table with a custom primary column name:: diff --git a/src/PowerPlatform/Dataverse/data/odata.py b/src/PowerPlatform/Dataverse/data/odata.py index 7137286..c4c2b2f 100644 --- a/src/PowerPlatform/Dataverse/data/odata.py +++ b/src/PowerPlatform/Dataverse/data/odata.py @@ -571,7 +571,7 @@ def _do_request(url: str, *, params: Optional[Dict[str, Any]] = None) -> Dict[st params["$orderby"] = ",".join(self._lowercase_list(orderby)) if expand: # Lowercase navigation property names for case-insensitive matching - params["$expand"] = ",".join(self._lowercase_list(expand)) + params["$expand"] = ",".join(expand) if top is not None: params["$top"] = int(top) @@ -1175,8 +1175,8 @@ def _get_table_info(self, table_schema_name: str) -> Optional[Dict[str, Any]]: if not ent: return None return { - "entity_schema": ent.get("SchemaName") or table_schema_name, - "entity_logical_name": ent.get("LogicalName"), + "table_schema_name": ent.get("SchemaName") or table_schema_name, + "table_logical_name": ent.get("LogicalName"), "entity_set_name": ent.get("EntitySetName"), "metadata_id": ent.get("MetadataId"), "columns_created": [], @@ -1254,8 +1254,8 @@ def _create_table( ) return { - "entity_schema": table_schema_name, - "entity_logical_name": metadata.get("LogicalName"), + "table_schema_name": table_schema_name, + "table_logical_name": metadata.get("LogicalName"), "entity_set_name": metadata.get("EntitySetName"), "metadata_id": metadata.get("MetadataId"), "columns_created": created_cols, @@ -1277,8 +1277,6 @@ def _create_columns( subcode=ec.METADATA_TABLE_NOT_FOUND, ) - # Use the actual SchemaName from the entity metadata - entity_schema = ent.get("SchemaName") or table_schema_name metadata_id = ent.get("MetadataId") created: List[str] = [] needs_picklist_flush = False From 059118841ad51235d1ab881fd3efb519eb7a67ea Mon Sep 17 00:00:00 2001 From: Tim Pellissier Date: Thu, 13 Nov 2025 16:31:51 -0800 Subject: [PATCH 6/9] more naming updates --- README.md | 2 +- examples/advanced/walkthrough.py | 4 ++-- examples/basic/functional_testing.py | 2 +- src/PowerPlatform/Dataverse/client.py | 8 ++++---- src/PowerPlatform/Dataverse/data/odata.py | 16 ++++++++-------- tests/unit/data/test_logical_crud.py | 6 +++--- 6 files changed, 19 insertions(+), 19 deletions(-) diff --git a/README.md b/README.md index c9c0b39..69a9967 100644 --- a/README.md +++ b/README.md @@ -104,7 +104,7 @@ The SDK provides a simple, pythonic interface for Dataverse operations: | **Bulk Operations** | Efficient bulk processing for multiple records with automatic optimization | | **Paging** | Automatic handling of large result sets with iterators | | **Structured Errors** | Detailed exception hierarchy with retry guidance and diagnostic information | -| **Customization prefix values** | Custom tables and columns require a customization prefix values to be included for all operations (e.g., `"new_Title"`, not `"Title"`). See: [Table definitions in Microsoft Dataverse](https://learn.microsoft.com/en-us/power-apps/developer/data-platform/entity-metadata) | +| **Customization prefix values** | Custom tables and columns require a customization prefix value to be included for all operations (e.g., `"new_Title"`, not `"Title"`). See: [Table definitions in Microsoft Dataverse](https://learn.microsoft.com/en-us/power-apps/developer/data-platform/entity-metadata) | ## Examples diff --git a/examples/advanced/walkthrough.py b/examples/advanced/walkthrough.py index 315dbfc..5311592 100644 --- a/examples/advanced/walkthrough.py +++ b/examples/advanced/walkthrough.py @@ -80,14 +80,14 @@ def main(): print(f" Entity Set: {table_info.get('entity_set_name')}") else: log_call(f"client.create_table('{table_name}', schema={{...}})") - schema = { + columns = { "new_Title": "string", "new_Quantity": "int", "new_Amount": "decimal", "new_Completed": "bool", "new_Priority": Priority } - table_info = client.create_table(table_name, schema) + table_info = client.create_table(table_name, columns) print(f"✓ Created table: {table_info.get('table_schema_name')}") print(f" Columns created: {', '.join(table_info.get('columns_created', []))}") diff --git a/examples/basic/functional_testing.py b/examples/basic/functional_testing.py index 00c02ba..97a308d 100644 --- a/examples/basic/functional_testing.py +++ b/examples/basic/functional_testing.py @@ -96,7 +96,7 @@ def ensure_test_table(client: DataverseClient) -> Dict[str, Any]: table_info = client.create_table( table_schema_name, primary_column_schema_name="test_name", - schema= + columns= { "test_description": "string", # Description field "test_count": "int", # Integer field diff --git a/src/PowerPlatform/Dataverse/client.py b/src/PowerPlatform/Dataverse/client.py index 87c41ef..ea2e21b 100644 --- a/src/PowerPlatform/Dataverse/client.py +++ b/src/PowerPlatform/Dataverse/client.py @@ -134,7 +134,7 @@ def create(self, table_schema_name: str, records: Union[Dict[str, Any], List[Dic print(f"Created {len(ids)} accounts") """ od = self._get_odata() - entity_set = od._entity_set_from_logical(table_schema_name) + entity_set = od._entity_set_from_schema_name(table_schema_name) if isinstance(records, dict): rid = od._create(entity_set, table_schema_name, records) # _create returns str on single input @@ -420,7 +420,7 @@ def get_table_info(self, table_schema_name: str) -> Optional[Dict[str, Any]]: def create_table( self, table_schema_name: str, - schema: Dict[str, Any], + columns: Dict[str, Any], solution_unique_name: Optional[str] = None, primary_column_schema_name: Optional[str] = None, ) -> Dict[str, Any]: @@ -491,7 +491,7 @@ class ItemStatus(IntEnum): """ return self._get_odata()._create_table( table_schema_name, - schema, + columns, solution_unique_name, primary_column_schema_name, ) @@ -662,7 +662,7 @@ def upload_file( ) """ od = self._get_odata() - entity_set = od._entity_set_from_logical(table_schema_name) + entity_set = od._entity_set_from_schema_name(table_schema_name) od.upload_file( entity_set, record_id, diff --git a/src/PowerPlatform/Dataverse/data/odata.py b/src/PowerPlatform/Dataverse/data/odata.py index c4c2b2f..a063ada 100644 --- a/src/PowerPlatform/Dataverse/data/odata.py +++ b/src/PowerPlatform/Dataverse/data/odata.py @@ -278,7 +278,7 @@ def _primary_id_attr(self, table_schema_name: str) -> str: if pid: return pid # Resolve metadata (populates _logical_primaryid_cache or raises if table_schema_name unknown) - self._entity_set_from_logical(table_schema_name) + self._entity_set_from_schema_name(table_schema_name) pid2 = self._logical_primaryid_cache.get(cache_key) if pid2: return pid2 @@ -303,7 +303,7 @@ def _update_by_ids(self, table_schema_name: str, ids: List[str], changes: Union[ if not ids: return None pk_attr = self._primary_id_attr(table_schema_name) - entity_set = self._entity_set_from_logical(table_schema_name) + entity_set = self._entity_set_from_schema_name(table_schema_name) if isinstance(changes, dict): batch = [{pk_attr: rid, **changes} for rid in ids] self._update_multiple(entity_set, table_schema_name, batch) @@ -420,7 +420,7 @@ def _update(self, table_schema_name: str, key: str, data: Dict[str, Any]) -> Non # Lowercase all keys to match Dataverse LogicalName expectations data = self._lowercase_keys(data) data = self._convert_labels_to_ints(table_schema_name, data) - entity_set = self._entity_set_from_logical(table_schema_name) + entity_set = self._entity_set_from_schema_name(table_schema_name) url = f"{self.api}/{entity_set}{self._format_key(key)}" r = self._request("patch", url, headers={"If-Match": "*"}, json=data) @@ -481,7 +481,7 @@ def _update_multiple(self, entity_set: str, table_schema_name: str, records: Lis def _delete(self, table_schema_name: str, key: str) -> None: """Delete a record by GUID or alternate key.""" - entity_set = self._entity_set_from_logical(table_schema_name) + entity_set = self._entity_set_from_schema_name(table_schema_name) url = f"{self.api}/{entity_set}{self._format_key(key)}" self._request("delete", url, headers={"If-Match": "*"}) @@ -501,7 +501,7 @@ def _get(self, table_schema_name: str, key: str, select: Optional[str] = None) - if select: # Lowercase column names for case-insensitive matching params["$select"] = select.lower() - entity_set = self._entity_set_from_logical(table_schema_name) + entity_set = self._entity_set_from_schema_name(table_schema_name) url = f"{self.api}/{entity_set}{self._format_key(key)}" r = self._request("get", url, params=params) return r.json() @@ -557,7 +557,7 @@ def _do_request(url: str, *, params: Optional[Dict[str, Any]] = None) -> Dict[st except ValueError: return {} - entity_set = self._entity_set_from_logical(table_schema_name) + entity_set = self._entity_set_from_schema_name(table_schema_name) base_url = f"{self.api}/{entity_set}" params: Dict[str, Any] = {} if select: @@ -627,7 +627,7 @@ def _query_sql(self, sql: str) -> list[dict[str, Any]]: # Extract logical table name via helper (robust to identifiers ending with 'from') logical = self._extract_logical_table(sql) - entity_set = self._entity_set_from_logical(logical) + entity_set = self._entity_set_from_schema_name(logical) # Issue GET /{entity_set}?sql= url = f"{self.api}/{entity_set}" params = {"sql": sql} @@ -666,7 +666,7 @@ def _extract_logical_table(sql: str) -> str: return m.group(1).lower() # ---------------------- Entity set resolution ----------------------- - def _entity_set_from_logical(self, logical: str) -> str: + def _entity_set_from_schema_name(self, logical: str) -> str: """Resolve entity set name (plural) from a logical (singular) name using metadata. Caches results for subsequent queries. Case-insensitive. diff --git a/tests/unit/data/test_logical_crud.py b/tests/unit/data/test_logical_crud.py index 78280d0..3d08653 100644 --- a/tests/unit/data/test_logical_crud.py +++ b/tests/unit/data/test_logical_crud.py @@ -77,7 +77,7 @@ def test_single_create_update_delete_get(): (204, {}, {}), # delete ] c = TestableClient(responses) - entity_set = c._entity_set_from_logical("account") + entity_set = c._entity_set_from_schema_name("account") rid = c._create(entity_set, "account", {"name": "Acme"}) assert rid == guid rec = c._get("account", rid, select="accountid,name") @@ -97,7 +97,7 @@ def test_bulk_create_and_update(): (204, {}, {}), # UpdateMultiple 1:1 ] c = TestableClient(responses) - entity_set = c._entity_set_from_logical("account") + entity_set = c._entity_set_from_schema_name("account") ids = c._create_multiple(entity_set, "account", [{"name": "A"}, {"name": "B"}]) assert ids == [g1, g2] c._update_by_ids("account", ids, {"statecode": 1}) # broadcast @@ -122,4 +122,4 @@ def test_unknown_logical_name_raises(): ] c = TestableClient(responses) with pytest.raises(MetadataError): - c._entity_set_from_logical("nonexistent") \ No newline at end of file + c._entity_set_from_schema_name("nonexistent") \ No newline at end of file From 7c2cd836e1b4af6f6a5e9954c50bee2d47fe36eb Mon Sep 17 00:00:00 2001 From: Tim Pellissier Date: Thu, 13 Nov 2025 16:38:00 -0800 Subject: [PATCH 7/9] couple more schema->columns renames --- README.md | 2 +- src/PowerPlatform/Dataverse/client.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index 69a9967..eae6881 100644 --- a/README.md +++ b/README.md @@ -218,7 +218,7 @@ table_info = client.create_table("new_Product", { # Create with custom primary column name and solution assignment table_info = client.create_table( table_schema_name="new_Product", - schema={ + columns={ "new_Code": "string", "new_Price": "decimal" }, diff --git a/src/PowerPlatform/Dataverse/client.py b/src/PowerPlatform/Dataverse/client.py index ea2e21b..b5694ba 100644 --- a/src/PowerPlatform/Dataverse/client.py +++ b/src/PowerPlatform/Dataverse/client.py @@ -469,7 +469,7 @@ class ItemStatus(IntEnum): ACTIVE = 1 INACTIVE = 2 - schema = { + columns = { "new_Title": "string", # Note: includes 'new_' customization prefix value "new_Quantity": "int", "new_Price": "decimal", @@ -477,7 +477,7 @@ class ItemStatus(IntEnum): "new_Status": ItemStatus } - result = client.create_table("new_SampleItem", schema) + result = client.create_table("new_SampleItem", columns) print(f"Created table: {result['table_logical_name']}") print(f"Columns: {result['columns_created']}") From b69d8f268842a6da97bf16827282e9f4801b7acb Mon Sep 17 00:00:00 2001 From: Tim Pellissier Date: Thu, 13 Nov 2025 21:23:46 -0800 Subject: [PATCH 8/9] more logicalName -> schemaName renaming --- README.md | 10 +++--- examples/advanced/file_upload.py | 4 +-- examples/basic/functional_testing.py | 24 ++++++------- src/PowerPlatform/Dataverse/client.py | 6 ++-- src/PowerPlatform/Dataverse/data/odata.py | 44 +++++++++++------------ tests/unit/data/test_logical_crud.py | 4 +-- 6 files changed, 46 insertions(+), 46 deletions(-) diff --git a/README.md b/README.md index eae6881..3030559 100644 --- a/README.md +++ b/README.md @@ -99,12 +99,12 @@ The SDK provides a simple, pythonic interface for Dataverse operations: | Concept | Description | |---------|-------------| | **DataverseClient** | Main entry point for all operations with environment connection | -| **Records** | Dataverse records represented as Python dictionaries with logical field names | -| **Logical Names** | Use table logical names (`"account"`) and column logical names (`"name"`) | +| **Records** | Dataverse records represented as Python dictionaries with column schema names | +| **Schema Names** | Use table schema names (`"account"`, `"new_MyTestTable"`) and column schema names (`"name"`, `"new_MyTestColumn"`). See: [Table definitions in Microsoft Dataverse](https://learn.microsoft.com/en-us/power-apps/developer/data-platform/entity-metadata) | | **Bulk Operations** | Efficient bulk processing for multiple records with automatic optimization | | **Paging** | Automatic handling of large result sets with iterators | | **Structured Errors** | Detailed exception hierarchy with retry guidance and diagnostic information | -| **Customization prefix values** | Custom tables and columns require a customization prefix value to be included for all operations (e.g., `"new_Title"`, not `"Title"`). See: [Table definitions in Microsoft Dataverse](https://learn.microsoft.com/en-us/power-apps/developer/data-platform/entity-metadata) | +| **Customization prefix values** | Custom tables and columns require a customization prefix value to be included for all operations (e.g., `"new_MyTestTable"`, not `"MyTestTable"`). See: [Table definitions in Microsoft Dataverse](https://learn.microsoft.com/en-us/power-apps/developer/data-platform/entity-metadata) | ## Examples @@ -176,7 +176,7 @@ for record in results: print(record["name"]) # OData query with paging -# Note: filter and expand parameters require exact casing +# Note: filter and expand parameters are case sensitive pages = client.get( "account", select=["accountid", "name"], # select is case-insensitive (automatically lowercased) @@ -244,7 +244,7 @@ client.delete_table("new_Product") ```python # Upload a file to a record client.upload_file( - logical_name="account", + table_schema_name="account", record_id=account_id, file_name_attribute="new_document", path="/path/to/document.pdf" diff --git a/examples/advanced/file_upload.py b/examples/advanced/file_upload.py index 527d421..798596e 100644 --- a/examples/advanced/file_upload.py +++ b/examples/advanced/file_upload.py @@ -187,7 +187,7 @@ def ensure_table(): sys.exit(1) entity_set = table_info.get("entity_set_name") -table_schema_name = table_info.get("table_schema_name") or entity_set.rstrip("s") +table_schema_name = table_info.get("table_schema_name") attr_prefix = table_schema_name.split('_',1)[0] if '_' in table_schema_name else table_schema_name name_attr = f"{attr_prefix}_name" small_file_attr_schema = f"{attr_prefix}_SmallDocument" # second file attribute for small single-request demo @@ -264,7 +264,7 @@ def ensure_file_attribute_generic(schema_name: str, label: str, key_prefix: str) record_id = created_ids[0] else: raise RuntimeError("Unexpected create return; expected list[str] with at least one GUID") - print({"record_created": True, "id": record_id, "logical": table_schema_name}) + print({"record_created": True, "id": record_id, "table schema name": table_schema_name}) except Exception as e: # noqa: BLE001 print({"record_created": False, "error": str(e)}) sys.exit(1) diff --git a/examples/basic/functional_testing.py b/examples/basic/functional_testing.py index 97a308d..ea4b727 100644 --- a/examples/basic/functional_testing.py +++ b/examples/basic/functional_testing.py @@ -123,9 +123,9 @@ def test_create_record(client: DataverseClient, table_info: Dict[str, Any]) -> s """Test record creation.""" print("\n📝 Record Creation Test") print("=" * 50) - - logical_name = table_info.get("table_logical_name") - attr_prefix = logical_name.split("_", 1)[0] if "_" in logical_name else logical_name + + table_schema_name = table_info.get("table_schema_name") + attr_prefix = table_schema_name.split("_", 1)[0] if "_" in table_schema_name else table_schema_name # Create test record data test_data = { @@ -139,7 +139,7 @@ def test_create_record(client: DataverseClient, table_info: Dict[str, Any]) -> s try: print("🚀 Creating test record...") - created_ids = client.create(logical_name, test_data) + created_ids = client.create(table_schema_name, test_data) if isinstance(created_ids, list) and created_ids: record_id = created_ids[0] @@ -163,12 +163,12 @@ def test_read_record(client: DataverseClient, table_info: Dict[str, Any], record print("\n📖 Record Reading Test") print("=" * 50) - logical_name = table_info.get("table_logical_name") - attr_prefix = logical_name.split("_", 1)[0] if "_" in logical_name else logical_name + table_schema_name = table_info.get("table_schema_name") + attr_prefix = table_schema_name.split("_", 1)[0] if "_" in table_schema_name else table_schema_name try: print(f"🔍 Reading record: {record_id}") - record = client.get(logical_name, record_id) + record = client.get(table_schema_name, record_id) if record: print("✅ Record retrieved successfully!") @@ -198,15 +198,15 @@ def test_query_records(client: DataverseClient, table_info: Dict[str, Any]) -> N print("\n🔍 Record Query Test") print("=" * 50) - logical_name = table_info.get("table_logical_name") - attr_prefix = logical_name.split("_", 1)[0] if "_" in logical_name else logical_name + table_schema_name = table_info.get("table_schema_name") + attr_prefix = table_schema_name.split("_", 1)[0] if "_" in table_schema_name else table_schema_name try: print("🔍 Querying records from test table...") # Query with filter and select records_iterator = client.get( - logical_name, + table_schema_name, select=[f"{attr_prefix}_name", f"{attr_prefix}_count", f"{attr_prefix}_amount"], filter=f"{attr_prefix}_is_active eq true", top=5, @@ -234,14 +234,14 @@ def cleanup_test_data(client: DataverseClient, table_info: Dict[str, Any], recor print("\n🧹 Cleanup") print("=" * 50) - logical_name = table_info.get("table_logical_name") + table_schema_name = table_info.get("table_schema_name") # Ask user if they want to clean up cleanup_choice = input("Do you want to delete the test record? (y/N): ").strip().lower() if cleanup_choice in ['y', 'yes']: try: - client.delete(logical_name, record_id) + client.delete(table_schema_name, record_id) print("✅ Test record deleted successfully") except Exception as e: print(f"⚠️ Failed to delete test record: {e}") diff --git a/src/PowerPlatform/Dataverse/client.py b/src/PowerPlatform/Dataverse/client.py index b5694ba..b7e3934 100644 --- a/src/PowerPlatform/Dataverse/client.py +++ b/src/PowerPlatform/Dataverse/client.py @@ -108,7 +108,7 @@ def create(self, table_schema_name: str, records: Union[Dict[str, Any], List[Dic :param table_schema_name: Table schema name (e.g. ``"account"``, ``"contact"``, or ``"new_customtable"``). :type table_schema_name: str :param records: A single record dictionary or a list of record dictionaries. - Each dictionary should contain attribute logical names as keys. + Each dictionary should contain attribute schema names as keys. :type records: dict or list[dict] :return: List of created record GUIDs. Returns a single-element list for a single input. @@ -273,7 +273,7 @@ def get( :type table_schema_name: str :param record_id: Optional GUID to fetch a specific record. If None, queries multiple records. :type record_id: str or None - :param select: Optional list of attribute logical names to retrieve. Column names are + :param select: Optional list of column schema names to retrieve. Column names are case-insensitive and automatically lowercased (e.g. ``["new_Title", "new_Amount"]`` becomes ``"new_title,new_amount"``). :type select: list[str] or None @@ -478,7 +478,7 @@ class ItemStatus(IntEnum): } result = client.create_table("new_SampleItem", columns) - print(f"Created table: {result['table_logical_name']}") + print(f"Created table: {result['table_schema_name']}") print(f"Columns: {result['columns_created']}") Create a table with a custom primary column name:: diff --git a/src/PowerPlatform/Dataverse/data/odata.py b/src/PowerPlatform/Dataverse/data/odata.py index a063ada..3a1673a 100644 --- a/src/PowerPlatform/Dataverse/data/odata.py +++ b/src/PowerPlatform/Dataverse/data/odata.py @@ -169,7 +169,7 @@ def _create(self, entity_set: str, table_schema_name: str, record: Dict[str, Any table_schema_name : str Table schema name. record : dict[str, Any] - Attribute payload mapped by logical column names. + Attribute payload mapped by column schema names. Returns ------- @@ -212,11 +212,11 @@ def _create_multiple(self, entity_set: str, table_schema_name: str, records: Lis table_schema_name : str Table schema name. records : list[dict[str, Any]] - Payloads mapped by logical attribute names. + Payloads mapped by column schema names. Multi-create logical name resolution ------------------------------------ - - If any payload omits ``@odata.type`` the client stamps ``Microsoft.Dynamics.CRM.``. + - If any payload omits ``@odata.type`` the client stamps ``Microsoft.Dynamics.CRM.``. - If all payloads already include ``@odata.type`` no modification occurs. Returns @@ -666,22 +666,22 @@ def _extract_logical_table(sql: str) -> str: return m.group(1).lower() # ---------------------- Entity set resolution ----------------------- - def _entity_set_from_schema_name(self, logical: str) -> str: - """Resolve entity set name (plural) from a logical (singular) name using metadata. + def _entity_set_from_schema_name(self, table_schema_name: str) -> str: + """Resolve entity set name (plural) from a schema name (singular) name using metadata. Caches results for subsequent queries. Case-insensitive. """ - if not logical: - raise ValueError("logical name required") + if not table_schema_name: + raise ValueError("table schema name required") # Use normalized (lowercase) key for cache lookup - cache_key = self._normalize_cache_key(logical) + cache_key = self._normalize_cache_key(table_schema_name) cached = self._logical_to_entityset_cache.get(cache_key) if cached: return cached url = f"{self.api}/EntityDefinitions" # LogicalName in Dataverse is stored in lowercase, so we need to lowercase for the filter - logical_lower = logical.lower() + logical_lower = table_schema_name.lower() logical_escaped = self._escape_odata_quotes(logical_lower) params = { "$select": "LogicalName,EntitySetName,PrimaryIdAttribute", @@ -694,16 +694,16 @@ def _entity_set_from_schema_name(self, logical: str) -> str: except ValueError: items = [] if not items: - plural_hint = " (did you pass a plural entity set name instead of the singular logical name?)" if logical.endswith("s") and not logical.endswith("ss") else "" + plural_hint = " (did you pass a plural entity set name instead of the singular table schema name?)" if table_schema_name.endswith("s") and not table_schema_name.endswith("ss") else "" raise MetadataError( - f"Unable to resolve entity set for logical name '{logical}'. Provide the singular logical name.{plural_hint}", + f"Unable to resolve entity set for table schema name '{table_schema_name}'. Provide the singular table schema name.{plural_hint}", subcode=ec.METADATA_ENTITYSET_NOT_FOUND, ) md = items[0] es = md.get("EntitySetName") if not es: raise MetadataError( - f"Metadata response missing EntitySetName for logical '{logical}'.", + f"Metadata response missing EntitySetName for table schema name '{table_schema_name}'.", subcode=ec.METADATA_ENTITYSET_NAME_MISSING, ) self._logical_to_entityset_cache[cache_key] = es @@ -730,12 +730,12 @@ def _to_pascal(self, name: str) -> str: parts = re.split(r"[^A-Za-z0-9]+", name) return "".join(p[:1].upper() + p[1:] for p in parts if p) - def _get_entity_by_logical_name( + def _get_entity_by_table_schema_name( self, - logical_name: str, + table_schema_name: str, headers: Optional[Dict[str, str]] = None, ) -> Optional[Dict[str, Any]]: - """Get entity metadata by LogicalName. Case-insensitive. + """Get entity metadata by table schema name. Case-insensitive. Note: LogicalName is stored lowercase in Dataverse, so we lowercase the input for case-insensitive matching. The response includes SchemaName, LogicalName, @@ -743,7 +743,7 @@ def _get_entity_by_logical_name( """ url = f"{self.api}/EntityDefinitions" # LogicalName is stored lowercase, so we lowercase the input for lookup - logical_lower = logical_name.lower() + logical_lower = table_schema_name.lower() logical_escaped = self._escape_odata_quotes(logical_lower) params = { "$select": "MetadataId,LogicalName,SchemaName,EntitySetName", @@ -777,7 +777,7 @@ def _create_entity( if solution_unique_name: params = {"SolutionUniqueName": solution_unique_name} self._request("post", url, json=payload, params=params) - ent = self._get_entity_by_logical_name( + ent = self._get_entity_by_table_schema_name( table_schema_name, headers={"Consistency": "Strong"}, ) @@ -1171,7 +1171,7 @@ def _get_table_info(self, table_schema_name: str) -> Optional[Dict[str, Any]]: dict | None Metadata summary or ``None`` if not found. """ - ent = self._get_entity_by_logical_name(table_schema_name) + ent = self._get_entity_by_table_schema_name(table_schema_name) if not ent: return None return { @@ -1193,7 +1193,7 @@ def _list_tables(self) -> List[Dict[str, Any]]: def _delete_table(self, table_schema_name: str) -> None: """Delete a table by SchemaName. Case-insensitive.""" - ent = self._get_entity_by_logical_name(table_schema_name) + ent = self._get_entity_by_table_schema_name(table_schema_name) if not ent or not ent.get("MetadataId"): raise MetadataError( f"Table '{table_schema_name}' not found.", @@ -1215,7 +1215,7 @@ def _create_table( The server will determine the LogicalName automatically (usually lowercased SchemaName). """ # Check if table already exists (case-insensitive) - ent = self._get_entity_by_logical_name(table_schema_name) + ent = self._get_entity_by_table_schema_name(table_schema_name) if ent: raise MetadataError( f"Table '{table_schema_name}' already exists.", @@ -1270,7 +1270,7 @@ def _create_columns( if not isinstance(columns, dict) or not columns: raise TypeError("columns must be a non-empty dict[name -> type]") - ent = self._get_entity_by_logical_name(table_schema_name) + ent = self._get_entity_by_table_schema_name(table_schema_name) if not ent or not ent.get("MetadataId"): raise MetadataError( f"Table '{table_schema_name}' not found.", @@ -1316,7 +1316,7 @@ def _delete_columns( if not isinstance(name, str) or not name.strip(): raise ValueError("column names must be non-empty strings") - ent = self._get_entity_by_logical_name(table_schema_name) + ent = self._get_entity_by_table_schema_name(table_schema_name) if not ent or not ent.get("MetadataId"): raise MetadataError( f"Table '{table_schema_name}' not found.", diff --git a/tests/unit/data/test_logical_crud.py b/tests/unit/data/test_logical_crud.py index 3d08653..90b366f 100644 --- a/tests/unit/data/test_logical_crud.py +++ b/tests/unit/data/test_logical_crud.py @@ -38,7 +38,7 @@ class TestableClient(ODataClient): def __init__(self, responses): super().__init__(DummyAuth(), "https://org.example", None) self._http = DummyHTTPClient(responses) - def _convert_labels_to_ints(self, logical_name, record): # pragma: no cover - test shim + def _convert_labels_to_ints(self, table_schema_name, record): # pragma: no cover - test shim return record # Helper metadata response for logical name resolution @@ -116,7 +116,7 @@ def test_get_multiple_paging(): assert pages == [[{"accountid": "1"}], [{"accountid": "2"}]] -def test_unknown_logical_name_raises(): +def test_unknown_table_schema_name_raises(): responses = [ (200, {}, {"value": []}), # metadata lookup returns empty ] From d861410bda9b265b228520bd49c8ed519807444a Mon Sep 17 00:00:00 2001 From: Tim Pellissier Date: Fri, 14 Nov 2025 00:53:52 -0800 Subject: [PATCH 9/9] docstring merge --- README.md | 2 +- examples/advanced/file_upload.py | 1 - src/PowerPlatform/Dataverse/client.py | 26 +++++++++--------- src/PowerPlatform/Dataverse/data/odata.py | 32 +++++++++++------------ 4 files changed, 30 insertions(+), 31 deletions(-) diff --git a/README.md b/README.md index 3030559..17f3764 100644 --- a/README.md +++ b/README.md @@ -100,7 +100,7 @@ The SDK provides a simple, pythonic interface for Dataverse operations: |---------|-------------| | **DataverseClient** | Main entry point for all operations with environment connection | | **Records** | Dataverse records represented as Python dictionaries with column schema names | -| **Schema Names** | Use table schema names (`"account"`, `"new_MyTestTable"`) and column schema names (`"name"`, `"new_MyTestColumn"`). See: [Table definitions in Microsoft Dataverse](https://learn.microsoft.com/en-us/power-apps/developer/data-platform/entity-metadata) | +| **Schema names** | Use table schema names (`"account"`, `"new_MyTestTable"`) and column schema names (`"name"`, `"new_MyTestColumn"`). See: [Table definitions in Microsoft Dataverse](https://learn.microsoft.com/en-us/power-apps/developer/data-platform/entity-metadata) | | **Bulk Operations** | Efficient bulk processing for multiple records with automatic optimization | | **Paging** | Automatic handling of large result sets with iterators | | **Structured Errors** | Detailed exception hierarchy with retry guidance and diagnostic information | diff --git a/examples/advanced/file_upload.py b/examples/advanced/file_upload.py index 798596e..24a5851 100644 --- a/examples/advanced/file_upload.py +++ b/examples/advanced/file_upload.py @@ -166,7 +166,6 @@ def backoff(op, *, delays=(0,2,5,10), retry_status=(400,403,404,409,412,429,500, # --------------------------- Table ensure --------------------------- TABLE_SCHEMA_NAME = "new_FileSample" -# If user wants new customization prefix value / naming, adjust above. def ensure_table(): # Check by schema diff --git a/src/PowerPlatform/Dataverse/client.py b/src/PowerPlatform/Dataverse/client.py index d6b1340..ef06d24 100644 --- a/src/PowerPlatform/Dataverse/client.py +++ b/src/PowerPlatform/Dataverse/client.py @@ -104,7 +104,7 @@ def create(self, table_schema_name: str, records: Union[Dict[str, Any], List[Dic """ Create one or more records by table name. - :param table_schema_name: Table schema name (e.g. ``"account"``, ``"contact"``, or ``"new_MyTestTable"``). + :param table_schema_name: Schema name of the table (e.g. ``"account"``, ``"contact"``, or ``"new_MyTestTable"``). :type table_schema_name: ``str`` :param records: A single record dictionary or a list of record dictionaries. Each dictionary should contain column schema names as keys. @@ -157,7 +157,7 @@ def update(self, table_schema_name: str, ids: Union[str, List[str]], changes: Un 2. Broadcast update: ``update("account", [id1, id2], {"status": 1})`` - applies same changes to all IDs 3. Paired updates: ``update("account", [id1, id2], [changes1, changes2])`` - one-to-one mapping - :param table_schema_name: Table schema name (e.g. ``"account"`` or ``"new_MyTestTable"``). + :param table_schema_name: Schema name of the table (e.g. ``"account"`` or ``"new_MyTestTable"``). :type table_schema_name: ``str`` :param ids: Single GUID string or list of GUID strings to update. :type ids: ``str`` or ``list[str]`` @@ -210,7 +210,7 @@ def delete( """ Delete one or more records by GUID. - :param table_schema_name: Table schema name (e.g. ``"account"`` or ``"new_MyTestTable"``). + :param table_schema_name: Schema name of the table (e.g. ``"account"`` or ``"new_MyTestTable"``). :type table_schema_name: ``str`` :param ids: Single GUID string or list of GUID strings to delete. :type ids: ``str`` or ``list[str]`` @@ -266,19 +266,19 @@ def get( When ``record_id`` is provided, returns a single record dictionary. When ``record_id`` is None, returns a generator yielding batches of records. - :param table_schema_name: Table schema name (e.g. ``"account"`` or ``"new_MyTestTable"``). + :param table_schema_name: Schema name of the table (e.g. ``"account"`` or ``"new_MyTestTable"``). :type table_schema_name: ``str`` :param record_id: Optional GUID to fetch a specific record. If None, queries multiple records. :type record_id: ``str`` or ``None`` :param select: Optional list of attribute logical names to retrieve. Column names are case-insensitive and automatically lowercased (e.g. ``["new_Title", "new_Amount"]`` becomes ``"new_title,new_amount"``). :type select: ``list[str]`` or ``None`` - :param filter: Optional OData filter string, e.g. ``"name eq 'Contoso'"`` or ``"new_quantity gt 5"``. **IMPORTANT: Column names in filter expressions must use exact lowercase logical names** (e.g. ``"new_quantity"``, not ``"new_Quantity"``). The filter string is passed directly to the Dataverse Web API without transformation. + :param filter: Optional OData filter string, e.g. ``"name eq 'Contoso'"`` or ``"new_quantity gt 5"``. Column names in filter expressions must use exact lowercase logical names (e.g. ``"new_quantity"``, not ``"new_Quantity"``). The filter string is passed directly to the Dataverse Web API without transformation. :type filter: ``str`` or ``None`` :param orderby: Optional list of attributes to sort by, e.g. ``["name asc", "createdon desc"]``. Column names are automatically lowercased. :type orderby: ``list[str]`` or ``None`` :param top: Optional maximum number of records to return. :type top: ``int`` or ``None`` - :param expand: Optional list of navigation properties to expand, e.g. ``["primarycontactid"]``. **IMPORTANT: Navigation property names are case-sensitive and must match the server-defined names exactly.**. These are NOT automatically transformed. Consult entity metadata for correct casing. + :param expand: Optional list of navigation properties to expand, e.g. ``["primarycontactid"]``. Navigation property names are case-sensitive and must match the server-defined names exactly. These are NOT automatically transformed. Consult entity metadata for correct casing. :type expand: ``list[str]`` or ``None`` :param page_size: Optional number of records per page for pagination. :type page_size: ``int`` or ``None`` @@ -386,7 +386,7 @@ def get_table_info(self, table_schema_name: str) -> Optional[Dict[str, Any]]: """ Get basic metadata for a table if it exists. - :param table_schema_name: Table schema name (e.g. ``"new_MyTestTable"`` or ``"account"``). + :param table_schema_name: Schema name of the table (e.g. ``"new_MyTestTable"`` or ``"account"``). :type table_schema_name: ``str`` :return: Dictionary containing table metadata with keys ``table_schema_name``, @@ -414,7 +414,7 @@ def create_table( """ Create a simple custom table with specified columns. - :param table_schema_name: Table schema name with customization prefix value (e.g. ``"new_MyTestTable"``). + :param table_schema_name: Schema name of the table with customization prefix value (e.g. ``"new_MyTestTable"``). :type table_schema_name: ``str`` :param columns: Dictionary mapping column names (with customization prefix value) to their types. All custom column names must include the customization prefix value (e.g. ``"new_Title"``). Supported types: @@ -431,7 +431,7 @@ class ItemStatus(IntEnum): 1036: {"Active": "Actif", "Inactive": "Inactif"} } - :type schema: dict[str, Any] + :type columns: dict[str, Any] :param solution_unique_name: Optional solution unique name that should own the new table. When omitted the table is created in the default solution. :type solution_unique_name: ``str`` or ``None`` :param primary_column_schema_name: Optional primary name column schema name with customization prefix value (e.g. ``"new_MyTestTable"``). If not provided, defaults to ``"{customization prefix value}_Name"``. @@ -483,7 +483,7 @@ def delete_table(self, table_schema_name: str) -> None: """ Delete a custom table by name. - :param table_schema_name: Table schema name (e.g. ``"new_MyTestTable"`` or ``"account"``). + :param table_schema_name: Schema name of the table (e.g. ``"new_MyTestTable"`` or ``"account"``). :type table_schema_name: ``str`` :raises ~PowerPlatform.Dataverse.core.errors.MetadataError: If the table does not exist or deletion fails. @@ -523,7 +523,7 @@ def create_columns( """ Create one or more columns on an existing table using a schema-style mapping. - :param table_schema_name: Table schema name (e.g. ``"new_MyTestTable"``). + :param table_schema_name: Schema name of the table (e.g. ``"new_MyTestTable"``). :type table_schema_name: ``str`` :param columns: Mapping of column schema names (with customization prefix value) to supported types. All custom column names must include the customization prefix value** (e.g. ``"new_Notes"``). Primitive types include ``string``, ``int``, ``decimal``, ``float``, ``datetime``, and ``bool``. Enum subclasses (IntEnum preferred) @@ -556,7 +556,7 @@ def delete_columns( """ Delete one or more columns from a table. - :param table_schema_name: Table schema name (e.g. ``"new_MyTestTable"``). + :param table_schema_name: Schema name of the table (e.g. ``"new_MyTestTable"``). :type table_schema_name: ``str`` :param columns: Column name or list of column names to remove. Must include customization prefix value (e.g. ``"new_TestColumn"``). :type columns: ``str`` | ``list[str]`` @@ -590,7 +590,7 @@ def upload_file( """ Upload a file to a Dataverse file column. - :param table_schema_name: Table schema name, e.g. ``"account"`` or ``"new_MyTestTable"``. + :param table_schema_name: Schema name of the table, e.g. ``"account"`` or ``"new_MyTestTable"``. :type table_schema_name: ``str`` :param record_id: GUID of the target record. :type record_id: ``str`` diff --git a/src/PowerPlatform/Dataverse/data/odata.py b/src/PowerPlatform/Dataverse/data/odata.py index 7c3baa0..33d9db5 100644 --- a/src/PowerPlatform/Dataverse/data/odata.py +++ b/src/PowerPlatform/Dataverse/data/odata.py @@ -178,7 +178,7 @@ def _create(self, entity_set: str, table_schema_name: str, record: Dict[str, Any :param entity_set: Resolved entity set (plural) name. :type entity_set: ``str`` - :param table_schema_name: Table schema name. + :param table_schema_name: Schema name of the table. :type table_schema_name: ``str`` :param record: Attribute payload mapped by logical column names. :type record: ``dict[str, Any]`` @@ -215,7 +215,7 @@ def _create_multiple(self, entity_set: str, table_schema_name: str, records: Lis :param entity_set: Resolved entity set (plural) name. :type entity_set: ``str`` - :param table_schema_name: Table schema name. + :param table_schema_name: Schema name of the table. :type table_schema_name: ``str`` :param records: Payload dictionaries mapped by column schema names. :type records: ``list[dict[str, Any]]`` @@ -291,7 +291,7 @@ def _primary_id_attr(self, table_schema_name: str) -> str: def _update_by_ids(self, table_schema_name: str, ids: List[str], changes: Union[Dict[str, Any], List[Dict[str, Any]]]) -> None: """Update many records by GUID list using the collection-bound ``UpdateMultiple`` action. - :param table_schema_name: Table schema name. + :param table_schema_name: Schema name of the table. :type table_schema_name: ``str`` :param ids: GUIDs of target records. :type ids: ``list[str]`` @@ -413,7 +413,7 @@ def esc(match): def _update(self, table_schema_name: str, key: str, data: Dict[str, Any]) -> None: """Update an existing record by GUID. - :param table_schema_name: Table schema name. + :param table_schema_name: Schema name of the table. :type table_schema_name: ``str`` :param key: Record GUID (with or without parentheses). :type key: ``str`` @@ -434,7 +434,7 @@ def _update_multiple(self, entity_set: str, table_schema_name: str, records: Lis :param entity_set: Resolved entity set (plural) name. :type entity_set: ``str`` - :param table_schema_name: Table schema name, e.g. "new_MyTestTable". + :param table_schema_name: Schema name of the table, e.g. "new_MyTestTable". :type table_schema_name: ``str`` :param records: List of patch dictionaries. Each must include the true primary key attribute (e.g. ``accountid``) and one or more fields to update. :type records: ``list[dict[str, Any]]`` @@ -475,7 +475,7 @@ def _update_multiple(self, entity_set: str, table_schema_name: str, records: Lis def _delete(self, table_schema_name: str, key: str) -> None: """Delete a record by GUID. - :param table_schema_name: Table schema name. + :param table_schema_name: Schema name of the table. :type table_schema_name: ``str`` :param key: Record GUID (with or without parentheses) :type key: ``str`` @@ -490,7 +490,7 @@ def _delete(self, table_schema_name: str, key: str) -> None: def _get(self, table_schema_name: str, key: str, select: Optional[str] = None) -> Dict[str, Any]: """Retrieve a single record. - :param table_schema_name: Table schema name. + :param table_schema_name: Schema name of the table. :type table_schema_name: ``str`` :param key: Record GUID (with or without parentheses). :type key: ``str`` @@ -521,17 +521,17 @@ def _get_multiple( ) -> Iterable[List[Dict[str, Any]]]: """Iterate records from an entity set, yielding one page (list of dicts) at a time. - :param table_schema_name: Table schema name. + :param table_schema_name: Schema name of the table. :type table_schema_name: ``str`` :param select: Columns to include (``$select``) or ``None``. Column names are automatically lowercased. :type select: ``list[str]`` | ``None`` - :param filter: OData ``$filter`` expression or ``None``. IMPORTANT: This is passed as-is without transformation. Users must provide lowercase logical column names (e.g., "statecode eq 0"). + :param filter: OData ``$filter`` expression or ``None``. This is passed as-is without transformation. Users must provide lowercase logical column names (e.g., "statecode eq 0"). :type filter: ``str`` | ``None`` :param orderby: Order expressions (``$orderby``) or ``None``. Column names are automatically lowercased. :type orderby: ``list[str]`` | ``None`` :param top: Max total records (applied on first request as ``$top``) or ``None``. :type top: ``int`` | ``None`` - :param expand: Navigation properties to expand (``$expand``) or ``None``. IMPORTANT: These are case-sensitive and passed as-is. Users must provide exact navigation property names from entity metadata. + :param expand: Navigation properties to expand (``$expand``) or ``None``. These are case-sensitive and passed as-is. Users must provide exact navigation property names from entity metadata. :type expand: ``list[str]`` | ``None`` :param page_size: Per-page size hint via ``Prefer: odata.maxpagesize``. :type page_size: ``int`` | ``None`` @@ -1147,7 +1147,7 @@ def _attribute_payload(self, column_schema_name: str, dtype: Any, *, is_primary_ def _get_table_info(self, table_schema_name: str) -> Optional[Dict[str, Any]]: """Return basic metadata for a custom table if it exists. - :param table_schema_name: Table schema name. + :param table_schema_name: Schema name of the table. :type table_schema_name: ``str`` :return: Metadata summary or ``None`` if not found. @@ -1180,9 +1180,9 @@ def _list_tables(self) -> List[Dict[str, Any]]: return r.json().get("value", []) def _delete_table(self, table_schema_name: str) -> None: - """Delete a table by SchemaName. Case-insensitive. + """Delete a table by schema name. - :param table_schema_name: Table schema name. + :param table_schema_name: Schema name of the table. :type table_schema_name: ``str`` :return: ``None`` @@ -1210,7 +1210,7 @@ def _create_table( ) -> Dict[str, Any]: """Create a custom table with specified columns. - :param table_schema_name: Table schema name. + :param table_schema_name: Schema name of the table. :type table_schema_name: ``str`` :param schema: Mapping of column name -> type spec (``str`` or ``Enum`` subclass). :type schema: ``dict[str, Any]`` @@ -1281,7 +1281,7 @@ def _create_columns( ) -> List[str]: """Create new columns on an existing table. - :param table_schema_name: Table schema name. + :param table_schema_name: Schema name of the table. :type table_schema_name: ``str`` :param columns: Mapping of column schema name -> type spec (``str`` or ``Enum`` subclass). :type columns: ``dict[str, Any]`` @@ -1333,7 +1333,7 @@ def _delete_columns( ) -> List[str]: """Delete one or more columns from a table. - :param table_schema_name: Table schema name. + :param table_schema_name: Schema name of the table. :type table_schema_name: ``str`` :param columns: Single column name or list of column names :type columns: ``str`` | ``list[str]``