diff --git a/src/albert/__init__.py b/src/albert/__init__.py index e900d504..a2a2f481 100644 --- a/src/albert/__init__.py +++ b/src/albert/__init__.py @@ -4,4 +4,4 @@ __all__ = ["Albert", "AlbertClientCredentials", "AlbertSSOClient"] -__version__ = "1.5.5" +__version__ = "1.6.6" diff --git a/src/albert/collections/companies.py b/src/albert/collections/companies.py index c41c0d21..cc1a0ef2 100644 --- a/src/albert/collections/companies.py +++ b/src/albert/collections/companies.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Iterator from pydantic import validate_call @@ -249,3 +251,46 @@ def update(self, *, company: Company) -> Company: self.session.patch(url, json=patch_payload.model_dump(mode="json", by_alias=True)) updated_company = self.get_by_id(id=company.id) return updated_company + + def merge(self, *, parent_id: str, child_ids: str | list[str]) -> Company: + """ + Merge one or more child companies into a single parent company. + + Parameters + ---------- + parent_id : str + The ID of the company that will remain as the parent. + child_ids : Union[str, List[str]] + A single company ID or a list of company IDs to merge into the parent. + + Returns + ------- + Company + The updated parent Company object. + """ + # allow passing a single ID as a string + if isinstance(child_ids, str): + child_ids = [child_ids] + + if not child_ids: + msg = "At least one child company ID must be provided for merge." + logger.error(msg) + raise AlbertException(msg) + + payload = { + "parentId": parent_id, + "ChildCompanies": [{"id": child_id} for child_id in child_ids], + } + + url = f"{self.base_path}/merge" + response = self.session.post(url, json=payload) + if response.status_code == 206: + msg = "Merge returned partial content (206). Check that all ACLs are valid." + logger.error(msg) + raise AlbertException(msg) + response.raise_for_status() + + try: + return Company(**response.json()) + except (ValueError, TypeError): + return self.get_by_id(id=parent_id) diff --git a/src/albert/collections/custom_templates.py b/src/albert/collections/custom_templates.py index d925e45d..878767af 100644 --- a/src/albert/collections/custom_templates.py +++ b/src/albert/collections/custom_templates.py @@ -88,6 +88,44 @@ def search( ], ) + def create(self, *, custom_template: list[CustomTemplate]) -> list[CustomTemplate]: + """Creates a new custom template. + + Parameters + ---------- + custom_template : CustomTemplate + The custom template to create. + + Returns + ------- + CustomTemplate + The created CustomTemplate object. + """ + + response = self.session.post( + url=self.base_path, + json=[ + custom_template.model_dump( + mode="json", by_alias=True, exclude_unset=True, exclude_none=True + ) + ], + ) + obj = response.json()[0] + tags = (obj.get("Data")).get("Tags") or [] + + def _resolve_tags(tid: str) -> dict: + r = self.session.get(url=f"/api/v3/tags/{tid}") + if r.ok: + d = r.json() + item = (d.get("Items") or [d])[0] if isinstance(d, dict) and "Items" in d else d + return {"albertId": item.get("albertId", tid), "name": item.get("name")} + return {"albertId": tid} + + if tags: + obj["Data"]["Tags"] = [_resolve_tags(t.get("id")) for t in tags] + + return CustomTemplate(**obj) + def get_all( self, *, @@ -120,3 +158,20 @@ def get_all( yield self.get_by_id(id=item.id) except AlbertHTTPError as e: logger.warning(f"Error hydrating custom template {item.id}: {e}") + + def delete(self, *, id: CustomTemplateId) -> None: + """ + Delete a Custom Template by ID. + + Parameters + ---------- + id : str + The Albert ID of the custom template to delete. + + Raises + ------ + AlbertHTTPError + If the API responds with a non-2xx status (e.g., 404 if not found). + """ + url = f"{self.base_path}/{id}" + self.session.delete(url) diff --git a/src/albert/collections/data_templates.py b/src/albert/collections/data_templates.py index 75a1a9ee..fbb10f57 100644 --- a/src/albert/collections/data_templates.py +++ b/src/albert/collections/data_templates.py @@ -93,7 +93,9 @@ def _add_param_enums( data_template = self.get_by_id(id=data_template_id) existing_parameters = data_template.parameter_values - for parameter in new_parameters: + all_results: list[EnumValidationValue] = [] + + for index, parameter in enumerate(new_parameters, start=1): this_sequence = next( ( p.sequence @@ -102,6 +104,7 @@ def _add_param_enums( ), None, ) + rowId = f"ROW{index}" enum_patches = [] if ( parameter.validation @@ -170,10 +173,12 @@ def _add_param_enums( if len(enum_patches) > 0: enum_response = self.session.put( - f"{self.base_path}/{data_template_id}/parameters/{this_sequence}/enums", + f"{self.base_path}/{data_template_id}/parameters/{rowId}/enums", json=enum_patches, ) - return [EnumValidationValue(**x) for x in enum_response.json()] + all_results.extend([EnumValidationValue(**x) for x in enum_response.json()]) + + return all_results @validate_call def get_by_id(self, *, id: DataTemplateId) -> DataTemplate: diff --git a/src/albert/collections/notebooks.py b/src/albert/collections/notebooks.py index 2741cdba..6acae1c7 100644 --- a/src/albert/collections/notebooks.py +++ b/src/albert/collections/notebooks.py @@ -13,6 +13,7 @@ PutBlockPayload, PutOperation, ) +from albert.resources.notebooks import BlockType class NotebookCollection(BaseCollection): diff --git a/src/albert/collections/parameter_groups.py b/src/albert/collections/parameter_groups.py index 419fa3ed..7f109e1e 100644 --- a/src/albert/collections/parameter_groups.py +++ b/src/albert/collections/parameter_groups.py @@ -256,6 +256,41 @@ def update(self, *, parameter_group: ParameterGroup) -> ParameterGroup: url=enum_url, json=ep, ) + required_params: list[dict] = [] + for existing_param in existing.parameters: + # find the matching updated param by its row_id + updated_param = next( + ( + parameter + for parameter in parameter_group.parameters + if parameter.sequence == existing_param.sequence + ), + None, + ) + if not updated_param: + continue + + if existing_param.required != updated_param.required: + required_params = [ + { + "operation": "update", + "attribute": "required", + "rowId": existing_param.sequence, + "oldValue": existing_param.required, + "newValue": updated_param.required, + }] + + self.session.patch( + url=path, + json={"data": required_params}, + ) + + # if required_params: + # self.session.patch( + # url=path, + # json={"data": required_params}, + # ) + if len(general_patches.data) > 0: # patch the general patches self.session.patch( diff --git a/src/albert/collections/workflows.py b/src/albert/collections/workflows.py index 522972dc..60ec80b4 100644 --- a/src/albert/collections/workflows.py +++ b/src/albert/collections/workflows.py @@ -64,7 +64,10 @@ def create(self, *, workflows: list[Workflow]) -> list[Workflow]: for x in workflows ], ) - return [Workflow(**x) for x in response.json()] + try: + return [Workflow(**x) for x in response.json()] + except Exception: + return [Workflow(id=response.json()[0].get('existingAlbertId') or response.json()[0].get('albertId'),name=response.json()[0].get('name'),ParameterGroups=[])] def _hydrate_parameter_groups(self, *, workflow: Workflow) -> None: """Populate parameter setpoints when only an ID is provided.""" diff --git a/src/albert/resources/custom_templates.py b/src/albert/resources/custom_templates.py index 85b49603..43db6949 100644 --- a/src/albert/resources/custom_templates.py +++ b/src/albert/resources/custom_templates.py @@ -55,6 +55,7 @@ class GeneralData(BaseTaggedResource): priority: Priority | None = Field(default=None) sources: list[TaskSource] | None = Field(alias="Sources", default=None) parent_id: str | None = Field(alias="parentId", default=None) + notes: str | None = Field(default=None) class JobStatus(str, Enum): @@ -114,6 +115,8 @@ class BatchData(BaseTaggedResource): inventories: list[DataTemplateInventory] | None = Field(default=None, alias="Inventories") priority: Priority # enum?! workflow: list[EntityLink] = Field(default=None, alias="Workflow") + notes: str | None = Field(default=None) + due_date: str | None = Field(alias="dueDate", default=None) class PropertyData(BaseTaggedResource): @@ -126,6 +129,7 @@ class PropertyData(BaseTaggedResource): project: SerializeAsEntityLink[Project] | None = Field(alias="Project", default=None) inventories: list[DataTemplateInventory] | None = Field(default=None, alias="Inventories") due_date: str | None = Field(alias="dueDate", default=None) + notes: str | None = Field(default=None) class SheetData(BaseTaggedResource): @@ -136,6 +140,7 @@ class SheetData(BaseTaggedResource): class NotebookData(BaseTaggedResource): + id: str category: Literal[TemplateCategory.NOTEBOOK] = TemplateCategory.NOTEBOOK @@ -153,22 +158,26 @@ class ACLType(str, Enum): class TeamACL(ACL): - type: Literal[ACLType.TEAM] = ACLType.TEAM + # accept either backend token or SDK enum value + type: Literal[ACLType.TEAM, "CustomTemplateTeam"] = ACLType.TEAM class OwnerACL(ACL): - type: Literal[ACLType.OWNER] = ACLType.OWNER + type: Literal[ACLType.OWNER, "CustomTemplateOwner"] = ACLType.OWNER class MemberACL(ACL): - type: Literal[ACLType.MEMBER] = ACLType.MEMBER + type: Literal[ACLType.MEMBER, "CustomTemplateMember"] = ACLType.MEMBER class ViewerACL(ACL): - type: Literal[ACLType.VIEWER] = ACLType.VIEWER + type: Literal[ACLType.VIEWER, "CustomTemplateViewer"] = ACLType.VIEWER -ACLEntry = Annotated[TeamACL | OwnerACL | MemberACL | ViewerACL, Field(discriminator="type")] +ACLEntry = Annotated[ + TeamACL | OwnerACL | MemberACL | ViewerACL, + Field(discriminator="type"), +] class TemplateACL(BaseResource): @@ -198,7 +207,7 @@ class CustomTemplate(BaseTaggedResource): """ name: str - id: CustomTemplateId = Field(alias="albertId") + id: str | None = Field(default=None, alias="albertId") category: TemplateCategory = Field(default=TemplateCategory.GENERAL) metadata: dict[str, MetadataItem] | None = Field(default=None, alias="Metadata") data: CustomTemplateData | None = Field(default=None, alias="Data") diff --git a/src/albert/resources/data_templates.py b/src/albert/resources/data_templates.py index 024dca5c..1367ab82 100644 --- a/src/albert/resources/data_templates.py +++ b/src/albert/resources/data_templates.py @@ -53,7 +53,7 @@ class DataTemplate(BaseTaggedResource): description: str | None = None security_class: SecurityClass | None = None verified: bool = False - users_with_access: list[SerializeAsEntityLink[User]] | None = Field(alias="ACL", default=None) + acl: list[SerializeAsEntityLink[User]] | None = Field(alias="ACL", default=None) data_column_values: list[DataColumnValue] | None = Field(alias="DataColumns", default=None) parameter_values: list[ParameterValue] | None = Field(alias="Parameters", default=None) deleted_parameters: list[ParameterValue] | None = Field( diff --git a/src/albert/resources/notebooks.py b/src/albert/resources/notebooks.py index f2f135ff..5c31fe00 100644 --- a/src/albert/resources/notebooks.py +++ b/src/albert/resources/notebooks.py @@ -8,7 +8,7 @@ from pydantic import Field, model_validator from albert.core.base import BaseAlbertModel -from albert.core.shared.identifiers import LinkId, NotebookId, ProjectId, SynthesisId, TaskId +from albert.core.shared.identifiers import LinkId, NotebookId, ProjectId, SynthesisId, TaskId, CustomTemplateId from albert.core.shared.models.base import BaseResource, EntityLink from albert.exceptions import AlbertException from albert.resources.acls import ACL @@ -220,7 +220,7 @@ class NotebookLink(BaseAlbertModel): class Notebook(BaseResource): id: NotebookId | None = Field(default=None, alias="albertId") name: str = Field(default="Untitled Notebook") - parent_id: ProjectId | TaskId = Field(..., alias="parentId") + parent_id: ProjectId | TaskId | CustomTemplateId = Field(..., alias="parentId") version: datetime | None = Field(default=None) blocks: list[NotebookBlock] = Field(default_factory=list) links: list[NotebookLink] | None = Field(default=None) diff --git a/src/albert/resources/parameter_groups.py b/src/albert/resources/parameter_groups.py index d494db7b..8e8efce8 100644 --- a/src/albert/resources/parameter_groups.py +++ b/src/albert/resources/parameter_groups.py @@ -27,6 +27,7 @@ class DataType(str, Enum): NUMBER = "number" STRING = "string" ENUM = "enum" + IMAGE = "image" class Operator(str, Enum): @@ -99,6 +100,7 @@ class ParameterValue(BaseAlbertModel): unit: SerializeAsEntityLink[Unit] | None = Field(alias="Unit", default=None) added: AuditFields | None = Field(alias="Added", default=None, exclude=True) validation: list[ValueValidation] | None = Field(default_factory=list) + required: bool | None = Field(default=False) # Read-only fields name: str | None = Field(default=None, exclude=True, frozen=True) diff --git a/src/albert/resources/sheets.py b/src/albert/resources/sheets.py index 227e42c2..9ced9e38 100644 --- a/src/albert/resources/sheets.py +++ b/src/albert/resources/sheets.py @@ -1,6 +1,6 @@ from enum import Enum -from typing import Any, ForwardRef, Union - +from typing import Any, ForwardRef, Union, Optional +from pydantic.config import ConfigDict import pandas as pd from pydantic import Field, PrivateAttr, field_validator, model_validator, validate_call @@ -148,6 +148,66 @@ class DesignState(BaseResource): collapsed: bool | None = False +class Group(BaseAlbertModel): + row_id: str + name: str | None = None + child_row_ids: list[str] = Field(default_factory=list) + +def _groups_from_sequence(seq: list[dict]) -> list[Group]: + """ + Try to infer groups from GET /worksheet/design/{id}/rows/sequence. + We accept multiple shapes: look for parent-like keys and child arrays. + Returns [] if we can't infer anything. + """ + if not isinstance(seq, list): + return [] + + # Build lookup + by_row: dict[str, dict] = {} + for item in seq: + rid = item.get("rowId") or item.get("id") + if rid: + by_row[rid] = item + + # Accept several possible keys + def children_of(item: dict) -> list[str]: + for k in ("children", "childRows", "ChildRows", "rows"): + v = item.get(k) + if isinstance(v, list): + if v and isinstance(v[0], dict): + return [x.get("rowId") or x.get("id") for x in v if (x.get("rowId") or x.get("id"))] + return [str(x) for x in v] + return [] + + def parent_of(item: dict) -> str | None: + for k in ("parentId", "groupId", "parentRowId"): + if item.get(k): + return item[k] + return None + + # 1) Prefer explicit parent→children + groups: list[Group] = [] + for item in seq: + rid = item.get("rowId") or item.get("id") + if not rid: + continue + kids = children_of(item) + if kids: + groups.append(Group(row_id=rid, name=item.get("name"), child_row_ids=[k for k in kids if k])) + + if groups: + return groups + + # 2) If only child→parent exists, invert it + parent_map: dict[str, list[str]] = {} + for item in seq: + rid = item.get("rowId") or item.get("id") + pid = parent_of(item) + if rid and pid: + parent_map.setdefault(pid, []).append(rid) + + return [Group(row_id=pid, name=by_row.get(pid, {}).get("name"), child_row_ids=kids) + for pid, kids in parent_map.items()] class Design(BaseSessionResource): """A Design in a Sheet. Designs are sheet subsections that are largly abstracted away from the user. @@ -176,6 +236,7 @@ class Design(BaseSessionResource): _columns: list["Column"] | None = PrivateAttr(default=None) _sheet: Union["Sheet", None] = PrivateAttr(default=None) # noqa _leftmost_pinned_column: str | None = PrivateAttr(default=None) + _groups_cache: list[Group] | None = PrivateAttr(default=None) def _grid_to_cell_df(self, *, grid_response): items = grid_response.get("Items") or [] @@ -185,6 +246,16 @@ def _grid_to_cell_df(self, *, grid_response): records: list[dict[str, Cell]] = [] index: list[str] = [] for item in items: + + def _normalize_value(v): + if v is None: + return "" + if isinstance(v, list): + return "" if not v else (v[0] if isinstance(v[0], str) else str(v[0])) + if isinstance(v, str | dict): + return v + return str(v) + this_row_id = item["rowId"] this_index = item["rowUniqueId"] row_label = item.get("lableName") or item.get("name") @@ -209,7 +280,10 @@ def _grid_to_cell_df(self, *, grid_response): raw_id = c.pop("id", None) inv = (raw_id if raw_id.startswith("INV") else f"INV{raw_id}") if raw_id else None c["inventory_id"] = inv - + c["value"] = _normalize_value(c.get("value", "")) + fmt = c.get("cellFormat") + if fmt is None or isinstance(fmt, list): + c["cellFormat"] = {} cell = Cell(**c) col_id = c["colId"] @@ -313,6 +387,85 @@ def _get_columns(self, *, grid_response: dict) -> list["Column"]: return cols + # def _get_rows(self, *, grid_response: dict) -> list["Row"]: + # """ + # Parse the /grid response into a list of Row models. + + # Parameters + # ---------- + # grid_response : dict + # The JSON-decoded payload from GET /worksheet/.../grid. + + # Returns + # ------- + # list[Row] + # One Row per item in `Items` + # """ + # items = grid_response.get("Items") or [] + # if not items: + # return [] + + # rows: list[Row] = [] + # for v in items: + # raw_id = v.get("id") + # if raw_id and not str(raw_id).startswith("INV"): + # raw_id = f"INV{raw_id}" + # inv_id = raw_id + + # row_label = v.get("lableName") or v.get("name") + + + # rows.append( + # Row( + # rowId=v["rowId"], + # type=v["type"], + # session=self.session, + # design=self, + # sheet=self.sheet, + # name=row_label, + # manufacturer=v.get("manufacturer"), + # inventory_id=inv_id, + # config=v.get("config"), + # ) + # ) + # seq = grid_response.get("RowSequence") or [] + # if seq: + # groups_inline = _groups_from_sequence(seq) + # if groups_inline: + # by_id = {r.row_id: r for r in rows} + # # children → parent + # for g in groups_inline: + # for cid in g.child_row_ids: + # ch = by_id.get(cid) + # if ch: + # ch.parent_row_id = g.row_id + # # header rows (if present) + # for g in groups_inline: + # if g.row_id in by_id: + # by_id[g.row_id].child_row_ids = list(g.child_row_ids) + # # warm the cache so later calls are cheap + # self._groups_cache = groups_inline + # return rows # we’re done + # try: + # groups = self.list_groups() + # except Exception: + # groups = [] + + # if groups: + # by_id = {r.row_id: r for r in rows} + # # Always mark children → parent + # for g in groups: + # for cid in g.child_row_ids: + # ch = by_id.get(cid) + # if ch: + # ch.parent_row_id = g.row_id + # # Mark headers if present in grid + # for g in groups: + # if g.row_id in by_id: + # by_id[g.row_id].child_row_ids = list(g.child_row_ids) + + # return rows + def _get_rows(self, *, grid_response: dict) -> list["Row"]: """ Parse the /grid response into a list of Row models. @@ -337,7 +490,6 @@ def _get_rows(self, *, grid_response: dict) -> list["Row"]: if raw_id and not str(raw_id).startswith("INV"): raw_id = f"INV{raw_id}" inv_id = raw_id - row_label = v.get("lableName") or v.get("name") rows.append( @@ -350,11 +502,50 @@ def _get_rows(self, *, grid_response: dict) -> list["Row"]: name=row_label, manufacturer=v.get("manufacturer"), inventory_id=inv_id, + config=v.get("config"), ) ) - return rows + by_id = {r.row_id: r for r in rows} + # Extract parent-child from rowHierarchy + for v in items: + hierarchy = v.get("rowHierarchy", []) + if len(hierarchy) < 2: + continue + row_ids = [h for h in hierarchy if h != self.design_type.value] + if len(row_ids) > 1: + by_id[v["rowId"]].parent_row_id = row_ids[-2] + + # Build child_row_ids + for row in rows: + if row.parent_row_id and row.parent_row_id in by_id: + parent = by_id[row.parent_row_id] + if row.row_id not in parent.child_row_ids: + parent.child_row_ids.append(row.row_id) + + seq = grid_response.get("RowSequence") or [] + if seq: + groups_inline = _groups_from_sequence(seq) + if groups_inline: + for g in groups_inline: + for cid in g.child_row_ids: + ch = by_id.get(cid) + if ch and not ch.parent_row_id: + ch.parent_row_id = g.row_id + for g in groups_inline: + if g.row_id in by_id: + existing = set(by_id[g.row_id].child_row_ids) + by_id[g.row_id].child_row_ids = list(existing | set(g.child_row_ids)) + self._groups_cache = groups_inline + return rows + + groups = [Group(row_id=r.row_id, name=r.name, child_row_ids=r.child_row_ids) + for r in rows if r.child_row_ids] + if groups: + self._groups_cache = groups + + return rows def _get_grid(self): endpoint = f"/api/v3/worksheet/{self.id}/{self.design_type.value}/grid" response = self.session.get(endpoint) @@ -363,6 +554,97 @@ def _get_grid(self): self._columns = self._get_columns(grid_response=resp_json) self._rows = self._get_rows(grid_response=resp_json) return self._grid_to_cell_df(grid_response=resp_json) + def _hydrate_groups_from_sequence(self) -> list[Group]: + r = self.session.get(f"/api/v3/worksheet/design/{self.id}/rows/sequence") + if r.status_code >= 400: + return [] + seq = r.json() + groups = _groups_from_sequence(seq) + self._groups_cache = groups + return groups + + def list_groups(self, *, refresh: bool = False) -> list[Group]: + if self._groups_cache is not None and not refresh: + return self._groups_cache + + # No GET /groups exists → try sequence as a fallback + groups = self._hydrate_groups_from_sequence() + self._groups_cache = groups or [] # [] = unknown + return self._groups_cache + + def _clear_layout_caches(self): + if self.sheet is not None: + self.sheet.grid = None + self._rows = None + self._columns = None + def group_rows( + self, + *, + name: str, + child_row_ids: list[str] | list["Row"], + reference_id: str | None = None, + position: str = "above", + ) -> dict: + """ + Create a row group within this design. + + Contract enforced here: + - referenceId MUST be one of ChildRows and MUST be the first item. + - If caller gives a reference_id not in children, we ignore it and use children[0]. + """ + + # Accept Row objects or plain rowId strings + ids: list[str] = [ + r.row_id if hasattr(r, "row_id") else str(r) # type: ignore[attr-defined] + for r in child_row_ids + ] + if not ids: + raise AlbertException("child_row_ids must include at least one rowId") + + # Deduplicate while preserving order + seen = set() + ids = [x for x in ids if not (x in seen or seen.add(x))] + + # Enforce API requirement: + # - if reference_id provided and in ids -> move it to front + # - else -> reference_id = ids[0] + if reference_id and reference_id in ids: + ids = [reference_id] + [x for x in ids if x != reference_id] + else: + reference_id = ids[0] + + endpoint = f"/api/v3/worksheet/{self.id}/designs/groups" + payload = { + "name": name, + "referenceId": reference_id, # now guaranteed to be ids[0] + "position": position, + "ChildRows": [{"rowId": rid} for rid in ids], # referenceId is first + } + + resp = self.session.put(endpoint, json=payload) + if resp.status_code >= 400: + alt = f"/api/v3/worksheet/design/{self.id}/groups" + resp = self.session.put(alt, json=payload) + if resp.status_code >= 400: + raise AlbertException(f"Grouping failed: {resp.status_code} {getattr(resp, 'text', '')}") + + data = resp.json() if hasattr(resp, "json") else {} + + # Best-effort cache update + try: + group = Group( + row_id=data.get("rowId"), + name=data.get("name"), + child_row_ids=[d.get("rowId") for d in (data.get("ChildRows") or []) if d.get("rowId")], + ) + existing = {g.row_id: g for g in (self._groups_cache or [])} + existing[group.row_id] = group + self._groups_cache = list(existing.values()) + except Exception: + self._groups_cache = None + + self._clear_layout_caches() + return data @property def columns(self) -> list["Column"]: @@ -372,8 +654,13 @@ def columns(self) -> list["Column"]: @property def rows(self) -> list["Row"]: + if self.design_type == 'process': + return [] if not self._rows: - self._get_grid() + try: + self._get_grid() + except Exception as e: + print(e) return self._rows @@ -409,10 +696,10 @@ class Sheet(BaseSessionResource): # noqa:F811 """ - id: str = Field(alias="albertId") + id: str | None = Field(default=None, alias="albertId") name: str formulations: list[SheetFormulationRef] = Field(default_factory=list, alias="Formulas") - hidden: bool + hidden: bool = Field(default=False) _app_design: Design = PrivateAttr(default=None) _product_design: Design = PrivateAttr(default=None) _result_design: Design = PrivateAttr(default=None) @@ -601,7 +888,68 @@ def add_formulation( self.update_cells(cells=all_cells) return self.get_column(column_id=col_id) + def append_components_to_formulation( + self, + *, + formulation_name: str | None = None, + column_id: str | None = None, + inventory_id: InventoryId | None = None, + components: list[Component], + enforce_order: bool = False, + ) -> Column: + """ + Append (or upsert) components into an existing formulation column + without clearing other cells. + + You must specify exactly one of: column_id, inventory_id, or formulation_name. + """ + # 1) Resolve target column + col = self.get_column( + column_id=column_id, inventory_id=inventory_id, column_name=formulation_name + ) + col_id = col.column_id + + # 2) Build Cell updates for just the given components + all_cells: list[Cell] = [] + self.grid = None # refresh caches + + for component in components: + row_id = self._get_row_id_for_component( + inventory_item=component.inventory_item, + existing_cells=all_cells, + enforce_order=enforce_order, + ) + if row_id is None: + raise AlbertException(f"no component with id {component.inventory_item.id}") + + value = str(component.amount) + min_value = str(component.min_value) if component.min_value is not None else None + max_value = str(component.max_value) if component.max_value is not None else None + + this_cell = Cell( + column_id=col_id, + row_id=row_id, + value=value, + calculation="", + type=CellType.INVENTORY, + design_id=self.product_design.id, + name=col.name or formulation_name or "", + inventory_id=col.inventory_id, + min_value=min_value, + max_value=max_value, + ) + all_cells.append(this_cell) + + # 3) Upsert only these cells + self.update_cells(cells=all_cells) + return self.get_column(column_id=col_id) + def _get_row_id_for_component(self, *, inventory_item, existing_cells, enforce_order): + # Checks if that inventory row already exists + sheet_inv_id = inventory_item.id + for r in self.product_design.rows: + if r.inventory_id == sheet_inv_id: + return r.row_id self.grid = None # within a sheet, the "INV" prefix is dropped @@ -747,6 +1095,81 @@ def add_inventory_row( id=row_dict["id"], manufacturer=row_dict["manufacturer"], ) + def add_lookup_row( + self, + *, + name: str, + row_name: str | None = None, + design: DesignType | str | None = DesignType.APPS, + position: dict | None = None, + ) -> Row: + if design == DesignType.RESULTS: + raise AlbertException("You cannot add rows to the results design") + position = position or {"reference_id": "ROW1", "position": "above"} + design_id = self._get_design_id(design=design) + endpoint = f"/api/v3/worksheet/design/{design_id}/rows" + payload = [{ + "type": "LKP", + "name": name, + "referenceId": position["reference_id"], + "position": position["position"], + "labelName": "" if row_name is None else row_name, + }] + resp = self.session.post(endpoint, json=payload) + self.grid = None + data = resp.json()[0] if isinstance(resp.json(), list) else resp.json() + return Row( + rowId=data["rowId"], + type=data["type"], + session=self.session, + design=self._get_design(design=design), + sheet=self, + name=data.get("name") or data.get("lableName") or row_name or name, + inventory_id=data.get("id"), + manufacturer=data.get("manufacturer"), + ) + def add_app_row( + self, + *, + app_id: str, + name: str, + config: dict[str, str] | tuple[str, str] | None = None, + design: DesignType | str | None = DesignType.APPS, + position: dict | None = None, + ) -> Row: + if design == DesignType.RESULTS: + raise AlbertException("You cannot add rows to the results design") + + position = position or {"reference_id": "ROW1", "position": "above"} + design_id = self._get_design_id(design=design) + endpoint = f"/api/v3/worksheet/design/{design_id}/rows" + + app_id = app_id if app_id.startswith("APP") else f"APP{app_id}" + + payload = [{ + "type": "APP", + "referenceId": position["reference_id"], + "id": app_id, + "position": position["position"], + "name": name, + "config": config + }] + + resp = self.session.post(endpoint, json=payload) + self.grid = None + + data = resp.json()[0] if isinstance(resp.json(), list) else resp.json() + return Row( + rowId=data["rowId"], + type=data["type"], + session=self.session, + design=self._get_design(design=design), + sheet=self, + name=data.get("name") or name, + inventory_id=data.get("id"), + manufacturer=data.get("manufacturer"), + config=(data.get("config") or cfg), + ) def _filter_cells(self, *, cells: list[Cell], response_dict: dict): updated = [] @@ -918,10 +1341,13 @@ def update_cells(self, *, cells: list[Cell]): this_url = f"/api/v3/worksheet/{design_id}/values" for payload in payloads: - response = self.session.patch( + try: + response = self.session.patch( this_url, json=[payload], # The API expects a list of changes ) + except Exception as e: + continue original_cell = next( ( @@ -950,26 +1376,97 @@ def update_cells(self, *, cells: list[Cell]): self.grid = None return (updated, failed) - def add_blank_column(self, *, name: str, position: dict = None): + + def add_blank_column(self, col_or_name=None, *, position=None): + # accept Column, dict, or string + if isinstance(col_or_name, Column): + nm = col_or_name.name or "" + elif isinstance(col_or_name, dict): + nm = col_or_name.get("name", "") + position = position or col_or_name.get("position") + else: + nm = str(col_or_name or "") + extra = {} + if position is None: - position = {"reference_id": self.leftmost_pinned_column, "position": "rightOf"} - endpoint = f"/api/v3/worksheet/sheet/{self.id}/columns" - payload = [ - { - "type": "BLK", - "name": name, - "referenceId": position["reference_id"], - "position": position["position"], - } - ] + ref = self.columns[-1].column_id if getattr(self, "columns", None) else self.leftmost_pinned_column + position = {"reference_id": ref, "position": "rightOf"} - response = self.session.post(endpoint, json=payload) + payload = [{ + "type": "BLK", + "name": nm, + "referenceId": position["reference_id"], + "position": position["position"], + }] + resp = self.session.post(f"/api/v3/worksheet/sheet/{self.id}/columns", json=payload) + data = resp.json()[0]; data["sheet"] = self; data["session"] = self.session + self.grid = None + return Column(**data) + + + def add_lookup_column(self, col_or_name=None, *, position=None): + if isinstance(col_or_name, Column): + nm = col_or_name.name or "" + elif isinstance(col_or_name, dict): + nm = col_or_name.get("name", "") + else: + nm = str(col_or_name or "") + extra = {} - data = response.json() - data[0]["sheet"] = self - data[0]["session"] = self.session - self.grid = None # reset the known grid. We could probably make this nicer later. - return Column(**data[0]) + if position is None: + ref = self.columns[-1].column_id if getattr(self, "columns", None) else self.leftmost_pinned_column + position = {"reference_id": ref, "position": "rightOf"} + + payload = [{ + "type": "LKP", + "name": nm, + "referenceId": position["reference_id"], + "position": position["position"], + }] + resp = self.session.post(f"/api/v3/worksheet/sheet/{self.id}/columns", json=payload) + if resp.status_code >= 400: + payload[0]["type"] = "BLK" + resp = self.session.post(f"/api/v3/worksheet/sheet/{self.id}/columns", json=payload) + + data = resp.json()[0]; data["sheet"] = self; data["session"] = self.session + self.grid = None + return Column(**data) + + def set_columns_pinned(self, *, col_ids: list[str], pinned: str | None) -> None: + """Pin columns: pinned in {'left','right',None}.""" + payload = {"data": [{ + "operation": "update", + "attribute": "pinned", + "colIds": col_ids, + "newValue": pinned, + }]} + self.session.patch(f"/api/v3/worksheet/sheet/{self.id}/columns", json=payload) + self.grid = None + + def set_columns_width(self, *, col_ids: list[str], width: str) -> None: + """Set width like '142px' for one or many columns.""" + payload = {"data": [{ + "operation": "update", + "attribute": "columnWidth", + "colIds": col_ids, + "newValue": width, + }]} + self.session.patch(f"/api/v3/worksheet/sheet/{self.id}/columns", json=payload) + self.grid = None + + def set_column_hidden(self, *, col_id: str, hidden: bool, old_value: bool | None = None) -> None: + """Hide/show a single column. If you know the previous value, pass it.""" + data = { + "operation": "update", + "attribute": "hidden", + "colId": col_id, + "newValue": hidden, + } + if old_value is not None: + data["oldValue"] = old_value + payload = {"data": [data]} + self.session.patch(f"/api/v3/worksheet/sheet/{self.id}/columns", json=payload) + self.grid = None def delete_column(self, *, column_id: str) -> None: endpoint = f"/api/v3/worksheet/sheet/{self.id}/columns" @@ -1193,6 +1690,11 @@ def recolor_cells(self, color: CellColor): new_cells.append(cell_copy) return self.sheet.update_cells(cells=new_cells) +class Config(BaseSessionResource): + key: str + value: str + model_config = ConfigDict(extra='ignore') + class Row(BaseSessionResource): # noqa:F811 """A row in a Sheet @@ -1227,6 +1729,9 @@ class Row(BaseSessionResource): # noqa:F811 name: str | None = Field(default=None) inventory_id: str | None = Field(default=None, alias="id") manufacturer: str | None = Field(default=None) + config: Optional[Config] = Field(default=None) + parent_row_id: str | None = Field(default=None) + child_row_ids: list[str] = Field(default_factory=list) @property def row_unique_id(self): @@ -1235,6 +1740,20 @@ def row_unique_id(self): @property def cells(self) -> list[Cell]: return self.sheet.grid.loc[self.row_unique_id] + + @property + def is_group_header(self) -> bool: + return bool(self.child_row_ids) + @field_validator("config", mode="before") + @classmethod + def _coerce_config(cls, v): + if v is None or isinstance(v, Config): + return v + if isinstance(v, dict): + return Config(**v) + if isinstance(v, (tuple, list)) and len(v) == 2: + return Config(key=v[0], value=v[1]) + return v def recolor_cells(self, color: CellColor): new_cells = [] diff --git a/src/albert/resources/tagged_base.py b/src/albert/resources/tagged_base.py index 2a0a5364..0d5c9402 100644 --- a/src/albert/resources/tagged_base.py +++ b/src/albert/resources/tagged_base.py @@ -28,6 +28,8 @@ def convert_tags(cls, data: dict[str, Any]) -> dict[str, Any]: tags = data.get("tags") if not tags: tags = data.get("Tags") + if not tags and "Data" in data: + tags = data["Data"].get("tags") or data["Data"].get("Tags") if tags: new_tags = [] for t in tags: diff --git a/src/albert/utils/_patch.py b/src/albert/utils/_patch.py index 6dcfd093..a742789b 100644 --- a/src/albert/utils/_patch.py +++ b/src/albert/utils/_patch.py @@ -357,6 +357,7 @@ def generate_enum_patches( enums_in_both = [x for x in updated_enums if x.id is not None and x.id in existing_ids] if existing_enums == updated_enums: return [] + enum_patches = [] existing_enums_values = [x for x in existing_enums if isinstance(x, EnumValidationValue)] @@ -432,6 +433,7 @@ def generate_parameter_patches( unit_patch = _parameter_unit_patches(existing_param, updated_param) value_patch = _parameter_value_patches(existing_param, updated_param) validation_patch = parameter_validation_patch(existing_param, updated_param) + required_patch = _parameter_required_patches(existing_param, updated_param) if unit_patch: parameter_patches.append(unit_patch) @@ -439,6 +441,8 @@ def generate_parameter_patches( parameter_patches.append(value_patch) if validation_patch: parameter_patches.append(validation_patch) + if required_patch: + parameter_patches.append(required_patch) if ( updated_param.validation is not None and updated_param.validation != [] @@ -548,3 +552,37 @@ def generate_parameter_group_patches( general_patches.data.extend(tag_patches) return general_patches, new_parameters, parameter_enum_patches + + +def _parameter_required_patches( + initial_parameter_value: ParameterValue, updated_parameter_value: ParameterValue +) -> PGPatchDatum | None: + """Generate a Patch for a parameter's `required` flag.""" + + if initial_parameter_value.required == updated_parameter_value.required: + return None + elif initial_parameter_value.required is None: + if updated_parameter_value.required is not None: + return PGPatchDatum( + operation="add", + attribute="required", + newValue=updated_parameter_value.required, + rowId=updated_parameter_value.sequence, + ) + elif updated_parameter_value.required is None: + if initial_parameter_value.required is not None: + return PGPatchDatum( + operation="delete", + attribute="required", + oldValue=initial_parameter_value.required, + rowId=updated_parameter_value.sequence, + ) + elif initial_parameter_value.required != updated_parameter_value.required: + return PGPatchDatum( + operation="update", + attribute="required", + oldValue=initial_parameter_value.required, + newValue=updated_parameter_value.required, + rowId=updated_parameter_value.sequence, + ) + return None diff --git a/tests/collections/test_custom_templates.py b/tests/collections/test_custom_templates.py index 608f0098..563487b4 100644 --- a/tests/collections/test_custom_templates.py +++ b/tests/collections/test_custom_templates.py @@ -58,3 +58,31 @@ def test_hydrate_custom_template(client: Albert): # identity checks assert hydrated.id == custom_template.id assert hydrated.name == custom_template.name + + +def test_create_custom_template_from_seed( + caplog, + client: Albert, + seed_prefix: str, + seeded_custom_templates: list[CustomTemplate], +): + """Test creating a new custom template.""" + seed = seeded_custom_templates[0] + + new_template = CustomTemplate( + name=seed_prefix, + category=seed.category, + data=( + seed.data.model_copy(update={"name": seed_prefix}, deep=True) + if getattr(seed, "data", None) is not None + else None + ), + ) + + created = client.custom_templates.create(custom_template=new_template) + + assert isinstance(created, CustomTemplate) + assert created.name == new_template.name + assert created.category == new_template.category + if new_template.data is not None and hasattr(new_template.data, "name"): + assert getattr(created.data, "name", None) == new_template.data.name diff --git a/tests/conftest.py b/tests/conftest.py index 15fecbe1..a4cea322 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -14,6 +14,7 @@ from albert.resources.cas import Cas from albert.resources.companies import Company from albert.resources.custom_fields import CustomField +from albert.resources.custom_templates import CustomTemplate, GeneralData, TemplateCategory from albert.resources.data_columns import DataColumn from albert.resources.data_templates import DataTemplate from albert.resources.files import FileCategory, FileInfo, FileNamespace @@ -223,6 +224,23 @@ def seeded_locations(client: Albert, seed_prefix: str) -> Iterator[list[Location client.locations.delete(id=location.id) +@pytest.fixture(scope="session") +def seeded_custom_templates(client: Albert, seed_prefix: str): + seeded = [] + data = GeneralData(name=f"{seed_prefix}-general") + custom_template = CustomTemplate( + name=f"{seed_prefix}-general", data=data, category=TemplateCategory.GENERAL + ) + created = client.custom_templates.create(custom_template=custom_template) + seeded.append(created) + + yield seeded + + for t in seeded: + with suppress(NotFoundError): + client.custom_templates.delete(id=t.id) + + @pytest.fixture(scope="session") def seeded_projects( client: Albert,