diff --git a/.claude/skills/dataverse-sdk/SKILL.md b/.claude/skills/dataverse-sdk/SKILL.md index 142f452..f611fd8 100644 --- a/.claude/skills/dataverse-sdk/SKILL.md +++ b/.claude/skills/dataverse-sdk/SKILL.md @@ -159,6 +159,7 @@ Types on the same line map to the same exact format under the hood - `"float"` or `"double"` - Floating point number - `"bool"` or `"boolean"` - Yes/No - `"datetime"` or `"date"` - Date +- `"file"` - File column - Enum subclass - Local option set (picklist) #### Manage Columns @@ -199,7 +200,7 @@ client.delete_table("new_Product") client.upload_file( table_schema_name="account", record_id=account_id, - file_name_attribute="new_document", + file_name_attribute="new_Document", # If the file column doesn't exist, it will be created automatically path="/path/to/document.pdf" ) ``` diff --git a/README.md b/README.md index cd959f1..7415c7a 100644 --- a/README.md +++ b/README.md @@ -269,7 +269,7 @@ client.delete_table("new_Product") client.upload_file( table_schema_name="account", record_id=account_id, - file_name_attribute="new_document", + file_name_attribute="new_Document", # If the file column doesn't exist, it will be created automatically path="/path/to/document.pdf" ) ``` diff --git a/examples/advanced/file_upload.py b/examples/advanced/file_upload.py index d3499b7..ca75fb8 100644 --- a/examples/advanced/file_upload.py +++ b/examples/advanced/file_upload.py @@ -48,12 +48,12 @@ run_small = mode_int in (1, 3) run_chunk = mode_int in (2, 3) -delete_table_choice = input("Delete the table at end? (y/N): ").strip() or "n" -cleanup_table = delete_table_choice.lower() in ("y", "yes", "true", "1") - delete_record_choice = input("Delete the created record at end? (Y/n): ").strip() or "y" cleanup_record = delete_record_choice.lower() in ("y", "yes", "true", "1") +delete_table_choice = input("Delete the table at end? (y/N): ").strip() or "n" +cleanup_table = delete_table_choice.lower() in ("y", "yes", "true", "1") + credential = InteractiveBrowserCredential() client = DataverseClient(base_url=base_url, credential=credential) @@ -192,7 +192,7 @@ def ensure_table(): if existing: print({"table": TABLE_SCHEMA_NAME, "existed": True}) return existing - log("client.create_table('new_FileSample', schema={'new_Title': 'string'})") + log(f"client.create_table('{TABLE_SCHEMA_NAME}', schema={{'new_Title': 'string'}})") info = backoff(lambda: client.create_table(TABLE_SCHEMA_NAME, {"new_Title": "string"})) print({"table": TABLE_SCHEMA_NAME, "existed": False, "metadata_id": info.get("metadata_id")}) return info @@ -210,102 +210,7 @@ def ensure_table(): attr_prefix = table_schema_name.split("_", 1)[0] if "_" in table_schema_name else table_schema_name name_attr = f"{attr_prefix}_name" small_file_attr_schema = f"{attr_prefix}_SmallDocument" # second file attribute for small single-request demo -small_file_attr_logical = f"{attr_prefix}_smalldocument" # expected logical name (lowercase) chunk_file_attr_schema = f"{attr_prefix}_ChunkDocument" # attribute for streaming chunk upload demo -chunk_file_attr_logical = f"{attr_prefix}_chunkdocument" # expected logical name - - -def ensure_file_attribute_generic(schema_name: str, label: str, key_prefix: str): - meta_id = table_info.get("metadata_id") - if not meta_id: - print({f"{key_prefix}_attribute": "skipped", "reason": "missing metadata_id"}) - return False - odata = client._get_odata() - # Probe existing - try: - url = ( - f"{odata.api}/EntityDefinitions({meta_id})/Attributes?$select=SchemaName&$filter=" - f"SchemaName eq '{schema_name}'" - ) - r = backoff(lambda: odata._request("get", url), delays=ATTRIBUTE_VISIBILITY_DELAYS) - val = [] - try: - val = r.json().get("value", []) - except Exception: # noqa: BLE001 - pass - if any(a.get("SchemaName") == schema_name for a in val if isinstance(a, dict)): - return True - except Exception as ex: # noqa: BLE001 - print({f"{key_prefix}_file_attr_probe_error": str(ex)}) - - payload = { - "@odata.type": "Microsoft.Dynamics.CRM.FileAttributeMetadata", - "SchemaName": schema_name, - "DisplayName": { - "@odata.type": "Microsoft.Dynamics.CRM.Label", - "LocalizedLabels": [ - { - "@odata.type": "Microsoft.Dynamics.CRM.LocalizedLabel", - "Label": label, - "LanguageCode": int(client._config.language_code), - } - ], - }, - "RequiredLevel": {"Value": "None"}, - } - try: - url = f"{odata.api}/EntityDefinitions({meta_id})/Attributes" - backoff(lambda: odata._request("post", url, json=payload), delays=ATTRIBUTE_VISIBILITY_DELAYS) - print({f"{key_prefix}_file_attribute_created": True}) - time.sleep(2) - return True - except Exception as ex: # noqa: BLE001 - resp = getattr(ex, "response", None) - body_l = None - try: - body_l = resp.text.lower() if getattr(resp, "text", None) else None - except Exception: # noqa: BLE001 - pass - if body_l and ("duplicate" in body_l or "exists" in body_l): - print({f"{key_prefix}_file_attribute_created": False, "reason": "already exists (race)"}) - return True - print({f"{key_prefix}_file_attribute_created": False, "error": str(ex)}) - return False - - -def wait_for_attribute_visibility(logical_name: str, label: str): - if not logical_name or not entity_set: - return False - odata = client._get_odata() - probe_url = f"{odata.api}/{entity_set}?$top=1&$select={logical_name}" - waited = 0 - last_error = None - for delay in ATTRIBUTE_VISIBILITY_DELAYS: - if delay: - time.sleep(delay) - waited += delay - try: - resp = odata._request("get", probe_url) - try: - resp.json() - except Exception: # noqa: BLE001 - pass - if waited: - print({f"{label}_attribute_visible_wait_seconds": waited}) - return True - except Exception as ex: # noqa: BLE001 - last_error = ex - continue - raise RuntimeError(f"Timed out waiting for attribute '{logical_name}' to materialize") from last_error - - -# Conditionally ensure each attribute only if its mode is selected -if run_small: - ensure_file_attribute_generic(small_file_attr_schema, "Small Document", "small") - wait_for_attribute_visibility(small_file_attr_logical, "small") -if run_chunk: - ensure_file_attribute_generic(chunk_file_attr_schema, "Chunk Document", "chunk") - wait_for_attribute_visibility(chunk_file_attr_logical, "chunk") # --------------------------- Record create --------------------------- record_id = None @@ -353,7 +258,7 @@ def get_dataset_info(file_path: Path): lambda: client.upload_file( table_schema_name, record_id, - small_file_attr_logical, + small_file_attr_schema, str(DATASET_FILE), mode="small", ) @@ -361,7 +266,7 @@ def get_dataset_info(file_path: Path): print({"small_upload_completed": True, "small_source_size": small_file_size}) odata = client._get_odata() dl_url_single = ( - f"{odata.api}/{entity_set}({record_id})/{small_file_attr_logical}/$value" # raw entity_set URL OK + f"{odata.api}/{entity_set}({record_id})/{small_file_attr_schema.lower()}/$value" # raw entity_set URL OK ) resp_single = backoff(lambda: odata._request("get", dl_url_single)) content_single = resp_single.content or b"" @@ -387,7 +292,7 @@ def get_dataset_info(file_path: Path): lambda: client.upload_file( table_schema_name, record_id, - small_file_attr_logical, + small_file_attr_schema, str(replacement_file), mode="small", ) @@ -425,7 +330,7 @@ def get_dataset_info(file_path: Path): lambda: client.upload_file( table_schema_name, record_id, - chunk_file_attr_logical, + chunk_file_attr_schema, str(DATASET_FILE), mode="chunk", ) @@ -433,7 +338,7 @@ def get_dataset_info(file_path: Path): print({"chunk_upload_completed": True}) odata = client._get_odata() dl_url_chunk = ( - f"{odata.api}/{entity_set}({record_id})/{chunk_file_attr_logical}/$value" # raw entity_set for download + f"{odata.api}/{entity_set}({record_id})/{chunk_file_attr_schema.lower()}/$value" # raw entity_set for download ) resp_chunk = backoff(lambda: odata._request("get", dl_url_chunk)) content_chunk = resp_chunk.content or b"" @@ -458,7 +363,7 @@ def get_dataset_info(file_path: Path): lambda: client.upload_file( table_schema_name, record_id, - chunk_file_attr_logical, + chunk_file_attr_schema, str(replacement_file), mode="chunk", ) diff --git a/src/PowerPlatform/Dataverse/claude_skill/SKILL.md b/src/PowerPlatform/Dataverse/claude_skill/SKILL.md index 142f452..f611fd8 100644 --- a/src/PowerPlatform/Dataverse/claude_skill/SKILL.md +++ b/src/PowerPlatform/Dataverse/claude_skill/SKILL.md @@ -159,6 +159,7 @@ Types on the same line map to the same exact format under the hood - `"float"` or `"double"` - Floating point number - `"bool"` or `"boolean"` - Yes/No - `"datetime"` or `"date"` - Date +- `"file"` - File column - Enum subclass - Local option set (picklist) #### Manage Columns @@ -199,7 +200,7 @@ client.delete_table("new_Product") client.upload_file( table_schema_name="account", record_id=account_id, - file_name_attribute="new_document", + file_name_attribute="new_Document", # If the file column doesn't exist, it will be created automatically path="/path/to/document.pdf" ) ``` diff --git a/src/PowerPlatform/Dataverse/client.py b/src/PowerPlatform/Dataverse/client.py index 84bd5d4..bf425b0 100644 --- a/src/PowerPlatform/Dataverse/client.py +++ b/src/PowerPlatform/Dataverse/client.py @@ -436,7 +436,7 @@ def create_table( :param columns: Dictionary mapping column names (with customization prefix value) to their types. All custom column names must include the customization prefix value (e.g. ``"new_Title"``). Supported types: - - Primitive types: ``"string"`` (alias: ``"text"``), ``"int"`` (alias: ``"integer"``), ``"decimal"`` (alias: ``"money"``), ``"float"`` (alias: ``"double"``), ``"datetime"`` (alias: ``"date"``), ``"bool"`` (alias: ``"boolean"``) + - Primitive types: ``"string"`` (alias: ``"text"``), ``"int"`` (alias: ``"integer"``), ``"decimal"`` (alias: ``"money"``), ``"float"`` (alias: ``"double"``), ``"datetime"`` (alias: ``"date"``), ``"bool"`` (alias: ``"boolean"``), and ``"file"`` - Enum subclass (IntEnum preferred): Creates a local option set. Optional multilingual labels can be provided via ``__labels__`` class attribute, defined inside the Enum subclass:: @@ -546,22 +546,23 @@ def create_columns( :param table_schema_name: Schema name of the table (e.g. ``"new_MyTestTable"``). :type table_schema_name: :class:`str` :param columns: Mapping of column schema names (with customization prefix value) to supported types. All custom column names must include the customization prefix value** (e.g. ``"new_Notes"``). Primitive types include - ``"string"`` (alias: ``"text"``), ``"int"`` (alias: ``"integer"``), ``"decimal"`` (alias: ``"money"``), ``"float"`` (alias: ``"double"``), ``"datetime"`` (alias: ``"date"``), and ``"bool"`` (alias: ``"boolean"``). Enum subclasses (IntEnum preferred) + ``"string"`` (alias: ``"text"``), ``"int"`` (alias: ``"integer"``), ``"decimal"`` (alias: ``"money"``), ``"float"`` (alias: ``"double"``), ``"datetime"`` (alias: ``"date"``), ``"bool"`` (alias: ``"boolean"``), and ``"file"``. Enum subclasses (IntEnum preferred) generate a local option set and can specify localized labels via ``__labels__``. :type columns: :class:`dict` mapping :class:`str` to :class:`typing.Any` :returns: Schema names for the columns that were created. :rtype: :class:`list` of :class:`str` Example: - Create two columns on the custom table:: + Create multiple columns on the custom table:: created = client.create_columns( "new_MyTestTable", { "new_Scratch": "string", "new_Flags": "bool", + "new_Document": "file", }, ) - print(created) # ['new_Scratch', 'new_Flags'] + print(created) # ['new_Scratch', 'new_Flags', 'new_Document'] """ with self._scoped_odata() as od: return od._create_columns( @@ -616,7 +617,7 @@ def upload_file( :type table_schema_name: :class:`str` :param record_id: GUID of the target record. :type record_id: :class:`str` - :param file_name_attribute: Logical name of the file column attribute. + :param file_name_attribute: Schema name of the file column attribute (e.g., ``"new_Document"``). If the column doesn't exist, it will be created automatically. :type file_name_attribute: :class:`str` :param path: Local filesystem path to the file. The stored filename will be the basename of this path. @@ -645,7 +646,7 @@ def upload_file( client.upload_file( table_schema_name="account", record_id=account_id, - file_name_attribute="new_contract", + file_name_attribute="new_Contract", path="/path/to/contract.pdf", mime_type="application/pdf" ) @@ -655,15 +656,14 @@ def upload_file( client.upload_file( table_schema_name="email", record_id=email_id, - file_name_attribute="new_attachment", + file_name_attribute="new_Attachment", path="/path/to/large_file.zip", mode="auto" ) """ with self._scoped_odata() as od: - entity_set = od._entity_set_from_schema_name(table_schema_name) od._upload_file( - entity_set, + table_schema_name, record_id, file_name_attribute, path, diff --git a/src/PowerPlatform/Dataverse/data/_odata.py b/src/PowerPlatform/Dataverse/data/_odata.py index 7c5fc6c..aa0b0ee 100644 --- a/src/PowerPlatform/Dataverse/data/_odata.py +++ b/src/PowerPlatform/Dataverse/data/_odata.py @@ -865,10 +865,12 @@ def _create_entity( def _get_attribute_metadata( self, entity_metadata_id: str, - column_schema_name: str, + column_name: str, extra_select: Optional[str] = None, ) -> Optional[Dict[str, Any]]: - attr_escaped = self._escape_odata_quotes(column_schema_name) + # Convert to lowercase logical name for lookup + logical_name = column_name.lower() + attr_escaped = self._escape_odata_quotes(logical_name) url = f"{self.api}/EntityDefinitions({entity_metadata_id})/Attributes" select_fields = ["MetadataId", "LogicalName", "SchemaName"] if extra_select: @@ -882,7 +884,7 @@ def _get_attribute_metadata( select_fields.append(piece) params = { "$select": ",".join(select_fields), - "$filter": f"SchemaName eq '{attr_escaped}'", + "$filter": f"LogicalName eq '{attr_escaped}'", } r = self._request("get", url, params=params) try: @@ -896,6 +898,40 @@ def _get_attribute_metadata( return item return None + def _wait_for_attribute_visibility( + self, + entity_set: str, + attribute_name: str, + delays: tuple = (0, 3, 10, 20), + ) -> None: + """Wait for a newly created attribute to become visible in the data API. + + After creating an attribute via the metadata API, there can be a delay before + it becomes queryable in the data API. This method polls the entity set with + the attribute in the $select clause until it succeeds or all delays are exhausted. + """ + # Convert to lowercase logical name for URL + logical_name = attribute_name.lower() + probe_url = f"{self.api}/{entity_set}?$top=1&$select={logical_name}" + last_error = None + total_wait = sum(delays) + + for delay in delays: + if delay: + time.sleep(delay) + try: + self._request("get", probe_url) + return + except Exception as ex: + last_error = ex + continue + + # All retries exhausted - raise with context + raise RuntimeError( + f"Attribute '{logical_name}' did not become visible in the data API " + f"after {total_wait} seconds (exhausted all retries)." + ) from last_error + # ---------------------- Enum / Option Set helpers ------------------ def _build_localizedlabels_payload(self, translations: Dict[int, str]) -> Dict[str, Any]: """Build a Dataverse Label object from {: } entries. @@ -1239,6 +1275,13 @@ def _attribute_payload( "IsGlobal": False, }, } + if dtype_l == "file": + return { + "@odata.type": "Microsoft.Dynamics.CRM.FileAttributeMetadata", + "SchemaName": column_schema_name, + "DisplayName": self._label(label), + "RequiredLevel": {"Value": "None"}, + } return None def _get_table_info(self, table_schema_name: str) -> Optional[Dict[str, Any]]: diff --git a/src/PowerPlatform/Dataverse/data/_upload.py b/src/PowerPlatform/Dataverse/data/_upload.py index d82efb5..8f0da24 100644 --- a/src/PowerPlatform/Dataverse/data/_upload.py +++ b/src/PowerPlatform/Dataverse/data/_upload.py @@ -13,7 +13,7 @@ class _ODataFileUpload: def _upload_file( self, - entity_set: str, + table_schema_name: str, record_id: str, file_name_attribute: str, path: str, @@ -25,12 +25,12 @@ def _upload_file( Parameters ---------- - entity_set : :class:`str` - Target entity set (plural logical name), e.g. "accounts". + table_schema_name : :class:`str` + Table schema name, e.g. "account" or "new_MyTestTable". record_id : :class:`str` GUID of the target record. file_name_attribute : :class:`str` - Logical name of the file column attribute + Schema name of the file column attribute (e.g., "new_Document"). If the column doesn't exist, it will be created. path : :class:`str` Local filesystem path to the file. mode : :class:`str` | None @@ -42,6 +42,22 @@ def _upload_file( """ import os + # Resolve entity set from table schema name + entity_set = self._entity_set_from_schema_name(table_schema_name) + + # Check if the file column exists, create it if it doesn't + entity_metadata = self._get_entity_by_table_schema_name(table_schema_name) + if entity_metadata: + metadata_id = entity_metadata.get("MetadataId") + if metadata_id: + attr_metadata = self._get_attribute_metadata(metadata_id, file_name_attribute) + if not attr_metadata: + # Attribute doesn't exist, create it + self._create_columns(table_schema_name, {file_name_attribute: "file"}) + # Wait for the attribute to become visible in the data API + # Raises RuntimeError with underlying exception if timeout occurs + self._wait_for_attribute_visibility(entity_set, file_name_attribute) + mode = (mode or "auto").lower() if mode == "auto": @@ -50,14 +66,15 @@ def _upload_file( size = os.path.getsize(path) mode = "small" if size < 128 * 1024 * 1024 else "chunk" + # Convert schema name to lowercase logical name for URL usage + logical_name = file_name_attribute.lower() + if mode == "small": return self._upload_file_small( - entity_set, record_id, file_name_attribute, path, content_type=mime_type, if_none_match=if_none_match + entity_set, record_id, logical_name, path, content_type=mime_type, if_none_match=if_none_match ) if mode == "chunk": - return self._upload_file_chunk( - entity_set, record_id, file_name_attribute, path, if_none_match=if_none_match - ) + return self._upload_file_chunk(entity_set, record_id, logical_name, path, if_none_match=if_none_match) raise ValueError(f"Invalid mode '{mode}'. Use 'auto', 'small', or 'chunk'.") def _upload_file_small(