From 76d76315a23607f97f60f8e8aab5e738505e35f7 Mon Sep 17 00:00:00 2001 From: Rohit Sankaran Date: Mon, 5 May 2025 13:15:29 +0200 Subject: [PATCH 01/25] chore: Add quotes around installable optional features (#41) --- README.md | 6 +++--- src/tower/__init__.py | 6 +++--- src/tower/_features.py | 8 ++++---- 3 files changed, 10 insertions(+), 10 deletions(-) diff --git a/README.md b/README.md index 2267aa28..9b236a83 100644 --- a/README.md +++ b/README.md @@ -51,7 +51,7 @@ Tower supports several optional features that can be installed as needed: #### AI/LLM Support ```bash -pip install tower[ai] +pip install "tower[ai]" ``` Provides integration with language models through: @@ -61,7 +61,7 @@ Provides integration with language models through: #### Apache Iceberg Support ```bash -pip install tower[iceberg] +pip install "tower[iceberg]" ``` Provides Apache Iceberg table support: @@ -72,7 +72,7 @@ Provides Apache Iceberg table support: #### Install All Optional Features ```bash -pip install tower[all] +pip install "tower[all]" ``` #### Check Available Features diff --git a/src/tower/__init__.py b/src/tower/__init__.py index 4511111d..129529ba 100644 --- a/src/tower/__init__.py +++ b/src/tower/__init__.py @@ -5,9 +5,9 @@ including running apps locally or in the Tower cloud. Optional features: -- AI/LLM support: Install with `pip install tower[ai]` -- Apache Iceberg support: Install with `pip install tower[iceberg]` -- All features: Install with `pip install tower[all]` +- AI/LLM support: Install with `pip install "tower[ai]"` +- Apache Iceberg support: Install with `pip install "tower[iceberg]"` +- All features: Install with `pip install "tower[all]"` """ from ._client import ( diff --git a/src/tower/_features.py b/src/tower/_features.py index 5b257499..f2d8a0e7 100644 --- a/src/tower/_features.py +++ b/src/tower/_features.py @@ -4,9 +4,9 @@ Tower supports optional features that require additional dependencies. These can be installed using pip: - pip install tower[ai] # For AI/LLM features - pip install tower[iceberg] # For Apache Iceberg table support - pip install tower[all] # For all optional features + pip install "tower[ai]" # For AI/LLM features + pip install "tower[iceberg]" # For Apache Iceberg table support + pip install "tower[all]" # For all optional features """ import importlib.util @@ -121,7 +121,7 @@ def override_get_attr(name: str) -> Any: raise ImportError( f"This requires the '{feature}' feature and the following dependencies: {deps}.\n" f"You are missing these packages: {missing}\n" - f"Install with: pip install tower[{feature}]" + f'Install with: pip install "tower[{feature}]"' ) # Check if module is already cached From ee58e60ede5a2d1eb531e47380773b27b13e1703 Mon Sep 17 00:00:00 2001 From: Brad Heller Date: Mon, 5 May 2025 17:17:48 +0200 Subject: [PATCH 02/25] chore: Add new methods to the Table function --- src/tower/_tables.py | 115 ++++++++++++++++++++++++++++++++++++- src/tower/utils/pyarrow.py | 106 ++++++++++++++++++++++++++++++++++ 2 files changed, 218 insertions(+), 3 deletions(-) diff --git a/src/tower/_tables.py b/src/tower/_tables.py index c68b98bb..bfe653db 100644 --- a/src/tower/_tables.py +++ b/src/tower/_tables.py @@ -1,18 +1,31 @@ -from typing import Optional +from typing import Optional, Generic, TypeVar, Union, List +from dataclasses import dataclass + +TTable = TypeVar("TTable", bound="Table") import polars as pl import pyarrow as pa +import pyarrow.compute as pc from pyiceberg.catalog import load_catalog from pyiceberg.table import Table as IcebergTable from ._context import TowerContext -from .utils.pyarrow import convert_pyarrow_schema +from .utils.pyarrow import ( + convert_pyarrow_schema, + convert_pyarrow_expressions, +) from .utils.tables import ( make_table_name, namespace_or_default, ) +@dataclass +class RowsAffectedInformation: + inserts: int + updates: int + + class Table: """ `Table` is a wrapper around an Iceberg table. It provides methods to read and @@ -20,9 +33,11 @@ class Table: """ def __init__(self, context: TowerContext, table: IcebergTable): + self._stats = RowsAffectedInformation(0, 0) self._context = context self._table = table + def read(self) -> pl.DataFrame: """ Reads from the Iceberg tables. Returns the results as a Polars DataFrame. @@ -31,14 +46,104 @@ def read(self) -> pl.DataFrame: # the result as a DataFrame. return pl.scan_iceberg(self._table).collect() - def insert(self, data: pa.Table): + + def to_polars(self) -> pl.LazyFrame: + """ + Converts the table to a Polars LazyFrame. This is useful when you + understand Polars and you want to do something more complicated. + """ + return pl.scan_iceberg(self._table).collect() + + + def rows_affected(self) -> RowsAffectedInformation: + """ + Returns the stats for the table. This includes the number of inserts, + updates, and deletes. + """ + return self._stats + + + def insert(self, data: pa.Table) -> TTable: """ Inserts data into the Iceberg table. The data is expressed as a PyArrow table. Args: data (pa.Table): The data to insert into the table. + + Returns: + TTable: The table with the inserted rows. """ self._table.append(data) + self._stats.inserts += data.num_rows() + return self + + + def upsert(self, data: pa.Table, join_cols: Optional[list[str]] = None) -> TTable: + """ + Upserts data into the Iceberg table. The data is expressed as a PyArrow table. + + Args: + data (pa.Table): The data to upsert into the table. + join_cols (Optional[list[str]]): The columns that form the key to match rows on + + Returns: + TTable: The table with the upserted rows. + """ + res = self._table.upsert( + data, + join_cols=join_cols, + + # All upserts will always be case sensitive. Perhaps we'll add this + # as a parameter in the future? + case_sensitive=True, + + # These are the defaults, but we're including them to be complete. + when_matched_update_all=True, + when_not_matched_insert_all=True, + ) + + # Update the stats with the results of the relevant upsert. + self._stats.updates += res.rows_updated + self._stats.inserts += res.rows_inserted + + return self + + + def delete(self, filters: Union[str, List[pc.Expression]]) -> TTable: + """ + Deletes data from the Iceberg table. The filters are expressed as a + PyArrow expression. The filters are applied to the table and the + matching rows are deleted. + + Args: + filters (Union[str, List[pc.Expression]]): The filters to apply to the table. + This can be a string or a list of PyArrow expressions. + + Returns: + TTable: The table with the deleted rows. + """ + if isinstance(filters, list): + # We need to covnert the pc.Expression into PyIceberg + next_filters = convert_pyarrow_expression(filters) + filters = next_filters + + self._table.delete( + delete_filters=filters, + + # We want this to always be the case. Not sure why you wouldn't? + case_sensitive=True, + ) + + # NOTE: There is, unfortunately, no way to get the number of rows + # deleted besides comparing the two snapshots that were created. + + return self + + def schema(self) -> pa.Schema: + # We take an Iceberg Schema and we need to convert it into a PyArrow Schema + iceberg_schema = self._table.schema() + return convert_iceberg_schema(iceberg_schema) + class TableReference: def __init__(self, ctx: TowerContext, catalog_name: str, name: str, namespace: Optional[str] = None): @@ -47,12 +152,14 @@ def __init__(self, ctx: TowerContext, catalog_name: str, name: str, namespace: O self._name = name self._namespace = namespace + def load(self) -> Table: namespace = namespace_or_default(self._namespace) table_name = make_table_name(self._name, namespace) table = self._catalog.load_table(table_name) return Table(self._context, table) + def create(self, schema: pa.Schema) -> Table: namespace = namespace_or_default(self._namespace) table_name = make_table_name(self._name, namespace) @@ -71,6 +178,7 @@ def create(self, schema: pa.Schema) -> Table: return Table(self._context, table) + def create_if_not_exists(self, schema: pa.Schema) -> Table: namespace = namespace_or_default(self._namespace) table_name = make_table_name(self._name, namespace) @@ -90,6 +198,7 @@ def create_if_not_exists(self, schema: pa.Schema) -> Table: return Table(self._context, table) + def tables( name: str, catalog: str = "default", diff --git a/src/tower/utils/pyarrow.py b/src/tower/utils/pyarrow.py index 32766b47..6976c794 100644 --- a/src/tower/utils/pyarrow.py +++ b/src/tower/utils/pyarrow.py @@ -1,6 +1,18 @@ +from typing import Optional, List + import pyarrow as pa +import pyarrow.compute as pc + import pyiceberg.types as types from pyiceberg.schema import Schema as IcebergSchema +from pyiceberg.expressions import ( + BooleanExpression, + And, Or, Not, + EqualTo, NotEqualTo, + GreaterThan, GreaterThanOrEqual, + LessThan, LessThanOrEqual, + Literal, Reference +) def arrow_to_iceberg_type(arrow_type): """ @@ -53,6 +65,52 @@ def arrow_to_iceberg_type(arrow_type): else: raise ValueError(f"Unsupported Arrow type: {arrow_type}") + +def iceberg_to_arrow_type(iceberg_type): + """ + Convert a PyIceberg type to a PyArrow type. + """ + if isinstance(iceberg_type, types.BooleanType): + return pa.bool_() + elif isinstance(iceberg_type, types.IntegerType): + return pa.int32() + elif isinstance(iceberg_type, types.LongType): + return pa.int64() + elif isinstance(iceberg_type, types.FloatType): + return pa.float32() + elif isinstance(iceberg_type, types.DoubleType): + return pa.float64() + elif isinstance(iceberg_type, types.StringType): + return pa.string() + elif isinstance(iceberg_type, types.BinaryType): + return pa.binary() + elif isinstance(iceberg_type, types.DateType): + return pa.date32() + elif isinstance(iceberg_type, types.TimestampType): + # Using microsecond precision as default + return pa.timestamp('us') + elif isinstance(iceberg_type, types.TimeType): + # Using microsecond precision as default + return pa.time64('us') + elif isinstance(iceberg_type, types.DecimalType): + return pa.decimal128(iceberg_type.precision, iceberg_type.scale) + elif isinstance(iceberg_type, types.ListType): + element_type = iceberg_to_arrow_type(iceberg_type.element_type) + return pa.list_(element_type) + elif isinstance(iceberg_type, types.StructType): + arrow_fields = [] + for field in iceberg_type.fields: + arrow_type = iceberg_to_arrow_type(field.field_type) + arrow_fields.append(pa.field(field.name, arrow_type, nullable=not field.required)) + return pa.struct(arrow_fields) + elif isinstance(iceberg_type, types.MapType): + key_type = iceberg_to_arrow_type(iceberg_type.key_type) + value_type = iceberg_to_arrow_type(iceberg_type.value_type) + return pa.map_(key_type, value_type) + else: + raise ValueError(f"Unsupported Iceberg type: {iceberg_type}") + + def convert_pyarrow_field(num, field) -> types.NestedField: name = field.name field_type = arrow_to_iceberg_type(field.type) @@ -65,7 +123,55 @@ def convert_pyarrow_field(num, field) -> types.NestedField: required=not field.nullable ) + +def convert_iceberg_field(field) -> pa.Field: + """Convert a PyIceberg NestedField to a PyArrow Field.""" + name = field.name + arrow_type = iceberg_to_arrow_type(field.field_type) + + return pa.field(name, arrow_type, nullable=not field.required) + + def convert_pyarrow_schema(arrow_schema: pa.Schema) -> IcebergSchema: """Convert a PyArrow schema to a PyIceberg schema.""" fields = [convert_pyarrow_field(i, field) for i, field in enumerate(arrow_schema)] return IcebergSchema(*fields) + + +def convert_iceberg_schema(iceberg_schema: IcebergSchema) -> pa.Schema: + """Convert a PyIceberg schema to a PyArrow schema.""" + arrow_fields = [convert_iceberg_field(field) for field in iceberg_schema.fields] + return pa.schema(arrow_fields) + + +def convert_pyarrow_expression(expr: pc.Expression) -> Optional[BooleanExpression]: + if expr is None: + return None + + if expr.op == "and": + return And(convert_expression(expr.args[0]), convert_expression(expr.args[1])) + elif expr.op == "or": + return Or(convert_expression(expr.args[0]), convert_expression(expr.args[1])) + elif expr.op == "not": + return Not(convert_expression(expr.args[0])) + elif expr.op in {"==", "equal"}: + return EqualTo(Reference(expr.args[0].name), expr.args[1].as_py()) + elif expr.op in {"!=", "not_equal"}: + return NotEqualTo(Reference(expr.args[0].name), expr.args[1].as_py()) + elif expr.op in {">", "greater"}: + return GreaterThan(Reference(expr.args[0].name), expr.args[1].as_py()) + elif expr.op == ">=": + return GreaterThanOrEqual(Reference(expr.args[0].name), expr.args[1].as_py()) + elif expr.op == "<": + return LessThan(Reference(expr.args[0].name), expr.args[1].as_py()) + elif expr.op == "<=": + return LessThanOrEqual(Reference(expr.args[0].name), expr.args[1].as_py()) + else: + raise ValueError(f"Unsupported operation: {expr.op}") + + +def convert_pyarrow_expressions(exprs: List[pc.Expression]) -> List[BooleanExpression]: + """ + Convert a list of PyArrow expressions to PyIceberg expressions. + """ + return [convert_pyarrow_expression(expr) for expr in exprs] From 3a68311dbbc7c2c718a3384738caf3ef01d2335d Mon Sep 17 00:00:00 2001 From: Brad Heller Date: Mon, 5 May 2025 21:23:05 +0200 Subject: [PATCH 03/25] chore: Minor cleanup thanks to @datancoffee's feedback --- src/tower/_tables.py | 5 ++- src/tower/utils/pyarrow.py | 72 -------------------------------------- 2 files changed, 2 insertions(+), 75 deletions(-) diff --git a/src/tower/_tables.py b/src/tower/_tables.py index bfe653db..6e4b18eb 100644 --- a/src/tower/_tables.py +++ b/src/tower/_tables.py @@ -52,7 +52,7 @@ def to_polars(self) -> pl.LazyFrame: Converts the table to a Polars LazyFrame. This is useful when you understand Polars and you want to do something more complicated. """ - return pl.scan_iceberg(self._table).collect() + return pl.scan_iceberg(self._table) def rows_affected(self) -> RowsAffectedInformation: @@ -142,7 +142,7 @@ def delete(self, filters: Union[str, List[pc.Expression]]) -> TTable: def schema(self) -> pa.Schema: # We take an Iceberg Schema and we need to convert it into a PyArrow Schema iceberg_schema = self._table.schema() - return convert_iceberg_schema(iceberg_schema) + return iceberg_schema.as_arrow() class TableReference: @@ -198,7 +198,6 @@ def create_if_not_exists(self, schema: pa.Schema) -> Table: return Table(self._context, table) - def tables( name: str, catalog: str = "default", diff --git a/src/tower/utils/pyarrow.py b/src/tower/utils/pyarrow.py index 6976c794..6b52f1bc 100644 --- a/src/tower/utils/pyarrow.py +++ b/src/tower/utils/pyarrow.py @@ -66,84 +66,12 @@ def arrow_to_iceberg_type(arrow_type): raise ValueError(f"Unsupported Arrow type: {arrow_type}") -def iceberg_to_arrow_type(iceberg_type): - """ - Convert a PyIceberg type to a PyArrow type. - """ - if isinstance(iceberg_type, types.BooleanType): - return pa.bool_() - elif isinstance(iceberg_type, types.IntegerType): - return pa.int32() - elif isinstance(iceberg_type, types.LongType): - return pa.int64() - elif isinstance(iceberg_type, types.FloatType): - return pa.float32() - elif isinstance(iceberg_type, types.DoubleType): - return pa.float64() - elif isinstance(iceberg_type, types.StringType): - return pa.string() - elif isinstance(iceberg_type, types.BinaryType): - return pa.binary() - elif isinstance(iceberg_type, types.DateType): - return pa.date32() - elif isinstance(iceberg_type, types.TimestampType): - # Using microsecond precision as default - return pa.timestamp('us') - elif isinstance(iceberg_type, types.TimeType): - # Using microsecond precision as default - return pa.time64('us') - elif isinstance(iceberg_type, types.DecimalType): - return pa.decimal128(iceberg_type.precision, iceberg_type.scale) - elif isinstance(iceberg_type, types.ListType): - element_type = iceberg_to_arrow_type(iceberg_type.element_type) - return pa.list_(element_type) - elif isinstance(iceberg_type, types.StructType): - arrow_fields = [] - for field in iceberg_type.fields: - arrow_type = iceberg_to_arrow_type(field.field_type) - arrow_fields.append(pa.field(field.name, arrow_type, nullable=not field.required)) - return pa.struct(arrow_fields) - elif isinstance(iceberg_type, types.MapType): - key_type = iceberg_to_arrow_type(iceberg_type.key_type) - value_type = iceberg_to_arrow_type(iceberg_type.value_type) - return pa.map_(key_type, value_type) - else: - raise ValueError(f"Unsupported Iceberg type: {iceberg_type}") - - -def convert_pyarrow_field(num, field) -> types.NestedField: - name = field.name - field_type = arrow_to_iceberg_type(field.type) - field_id = num + 1 # Iceberg requires field IDs - - return types.NestedField( - field_id, - name, - field_type, - required=not field.nullable - ) - - -def convert_iceberg_field(field) -> pa.Field: - """Convert a PyIceberg NestedField to a PyArrow Field.""" - name = field.name - arrow_type = iceberg_to_arrow_type(field.field_type) - - return pa.field(name, arrow_type, nullable=not field.required) - - def convert_pyarrow_schema(arrow_schema: pa.Schema) -> IcebergSchema: """Convert a PyArrow schema to a PyIceberg schema.""" fields = [convert_pyarrow_field(i, field) for i, field in enumerate(arrow_schema)] return IcebergSchema(*fields) -def convert_iceberg_schema(iceberg_schema: IcebergSchema) -> pa.Schema: - """Convert a PyIceberg schema to a PyArrow schema.""" - arrow_fields = [convert_iceberg_field(field) for field in iceberg_schema.fields] - return pa.schema(arrow_fields) - - def convert_pyarrow_expression(expr: pc.Expression) -> Optional[BooleanExpression]: if expr is None: return None From 161e7c84182b879b741885ddf6739d5144c8e9f5 Mon Sep 17 00:00:00 2001 From: Brad Heller Date: Tue, 6 May 2025 10:35:49 +0200 Subject: [PATCH 04/25] chore: Add a basic test for tables --- src/tower/_tables.py | 20 +++++++++++----- src/tower/utils/pyarrow.py | 13 +++++++++++ tests/tower/test_tables.py | 48 ++++++++++++++++++++++++++++++++++++++ 3 files changed, 75 insertions(+), 6 deletions(-) create mode 100644 tests/tower/test_tables.py diff --git a/src/tower/_tables.py b/src/tower/_tables.py index 6e4b18eb..7587f3b4 100644 --- a/src/tower/_tables.py +++ b/src/tower/_tables.py @@ -7,8 +7,11 @@ import pyarrow as pa import pyarrow.compute as pc -from pyiceberg.catalog import load_catalog from pyiceberg.table import Table as IcebergTable +from pyiceberg.catalog import ( + Catalog, + load_catalog, +) from ._context import TowerContext from .utils.pyarrow import ( @@ -74,7 +77,7 @@ def insert(self, data: pa.Table) -> TTable: TTable: The table with the inserted rows. """ self._table.append(data) - self._stats.inserts += data.num_rows() + self._stats.inserts += data.num_rows return self @@ -146,9 +149,9 @@ def schema(self) -> pa.Schema: class TableReference: - def __init__(self, ctx: TowerContext, catalog_name: str, name: str, namespace: Optional[str] = None): + def __init__(self, ctx: TowerContext, catalog: Catalog, name: str, namespace: Optional[str] = None): self._context = ctx - self._catalog = load_catalog(catalog_name) + self._catalog = catalog self._name = name self._namespace = namespace @@ -200,7 +203,7 @@ def create_if_not_exists(self, schema: pa.Schema) -> Table: def tables( name: str, - catalog: str = "default", + catalog: Union[str, Catalog] = "default", namespace: Optional[str] = None ) -> TableReference: """ @@ -209,11 +212,16 @@ def tables( Args: `name` (str): The name of the table to load. - `catalog` (str): The name of the catalog to use. "default" by default. + `catalog` (Union[str, Catalog]): The name of the catalog or the actual + catalog to use. "default" is the default value. You can pass in an + actual catalog object for testing purposes. `namespace` (Optional[str]): The namespace in which to load the table. Returns: TableReference: A reference to a table to be resolved with `create` or `load` """ + if isinstance(catalog, str): + catalog = load_catalog(catalog) + ctx = TowerContext.build() return TableReference(ctx, catalog, name, namespace) diff --git a/src/tower/utils/pyarrow.py b/src/tower/utils/pyarrow.py index 6b52f1bc..a8c545f4 100644 --- a/src/tower/utils/pyarrow.py +++ b/src/tower/utils/pyarrow.py @@ -66,6 +66,19 @@ def arrow_to_iceberg_type(arrow_type): raise ValueError(f"Unsupported Arrow type: {arrow_type}") +def convert_pyarrow_field(num, field) -> types.NestedField: + name = field.name + field_type = arrow_to_iceberg_type(field.type) + field_id = num + 1 # Iceberg requires field IDs + + return types.NestedField( + field_id, + name, + field_type, + required=not field.nullable + ) + + def convert_pyarrow_schema(arrow_schema: pa.Schema) -> IcebergSchema: """Convert a PyArrow schema to a PyIceberg schema.""" fields = [convert_pyarrow_field(i, field) for i, field in enumerate(arrow_schema)] diff --git a/tests/tower/test_tables.py b/tests/tower/test_tables.py new file mode 100644 index 00000000..0da67e73 --- /dev/null +++ b/tests/tower/test_tables.py @@ -0,0 +1,48 @@ +import pytest +import shutil +import datetime + +import polars as pl +import pyarrow as pa +from pyiceberg.catalog.memory import InMemoryCatalog + +@pytest.fixture +def in_memory_catalog(): + catalog = InMemoryCatalog("test.in_memory.catalog", warehouse="file:///tmp/iceberg") + + # Yield the fixture which actually runs the test + yield catalog + + # Clean up after the catalog + shutil.rmtree("/tmp/iceberg") + + +def test_reading_and_writing_to_tables(in_memory_catalog): + schema = pa.schema([ + pa.field("id", pa.int64()), + pa.field("name", pa.string()), + pa.field("age", pa.int32()), + pa.field("created_at", pa.timestamp("ms")), + ]) + + import tower + ref = tower.tables("some_table", catalog=in_memory_catalog) + table = ref.create_if_not_exists(schema) + + data_with_schema = pa.Table.from_pylist([ + {"id": 1, "name": "Alice", "age": 30, "created_at": datetime.datetime(2023, 1, 1, 0, 0, 0)}, + {"id": 2, "name": "Bob", "age": 25, "created_at": datetime.datetime(2023, 1, 2, 0, 0, 0)}, + {"id": 3, "name": "Charlie", "age": 35, "created_at": datetime.datetime(2023, 1, 3, 0, 0, 0)}, + ], schema=schema) + + # If we write some data to the table, that should be...OK. + table = table.insert(data_with_schema) + assert table is not None + assert table.rows_affected().inserts == 3 + + # Now we should be able to read from the table too. + df = table.to_polars() + + # Assert that the DF actually can do something useful. + avg_age = df.select(pl.mean("age").alias("mean_age")).collect().item() + assert avg_age == 30.0 From 3ea5d6ba0821c9b4779584b2323126a39e6b30ca Mon Sep 17 00:00:00 2001 From: Brad Heller Date: Tue, 6 May 2025 15:10:01 +0200 Subject: [PATCH 05/25] chore: Updated implementation and code for Tower tables --- src/tower/_tables.py | 19 ++++- src/tower/utils/pyarrow.py | 148 ++++++++++++++++++++++++++++++------- tests/tower/test_tables.py | 124 ++++++++++++++++++++++++++++++- 3 files changed, 260 insertions(+), 31 deletions(-) diff --git a/src/tower/_tables.py b/src/tower/_tables.py index 7587f3b4..70cfa30b 100644 --- a/src/tower/_tables.py +++ b/src/tower/_tables.py @@ -127,11 +127,11 @@ def delete(self, filters: Union[str, List[pc.Expression]]) -> TTable: """ if isinstance(filters, list): # We need to covnert the pc.Expression into PyIceberg - next_filters = convert_pyarrow_expression(filters) + next_filters = convert_pyarrow_expressions(filters) filters = next_filters self._table.delete( - delete_filters=filters, + delete_filter=filters, # We want this to always be the case. Not sure why you wouldn't? case_sensitive=True, @@ -142,12 +142,27 @@ def delete(self, filters: Union[str, List[pc.Expression]]) -> TTable: return self + def schema(self) -> pa.Schema: # We take an Iceberg Schema and we need to convert it into a PyArrow Schema iceberg_schema = self._table.schema() return iceberg_schema.as_arrow() + def column(self, name: str) -> pa.compute.Expression: + """ + Returns a column from the table. This is useful when you want to + perform some operations on the column. + """ + field = self.schema().field(name) + + if field is None: + raise ValueError(f"Column {name} not found in table schema") + + # We need to convert the PyArrow field into pa.compute.Expression + return pa.compute.field(name) + + class TableReference: def __init__(self, ctx: TowerContext, catalog: Catalog, name: str, namespace: Optional[str] = None): self._context = ctx diff --git a/src/tower/utils/pyarrow.py b/src/tower/utils/pyarrow.py index a8c545f4..a0ad414c 100644 --- a/src/tower/utils/pyarrow.py +++ b/src/tower/utils/pyarrow.py @@ -1,4 +1,4 @@ -from typing import Optional, List +from typing import Any, Optional, List import pyarrow as pa import pyarrow.compute as pc @@ -85,34 +85,132 @@ def convert_pyarrow_schema(arrow_schema: pa.Schema) -> IcebergSchema: return IcebergSchema(*fields) +def extract_field_and_literal(expr: pc.Expression) -> tuple[str, Any]: + """Extract field name and literal value from a comparison expression.""" + # First, convert the expression to a string and parse it + expr_str = str(expr) + + # PyArrow expression strings look like: "(field_name == literal)" or similar + # Need to determine the operator and then split accordingly + operators = ["==", "!=", ">", ">=", "<", "<="] + op_used = None + for op in operators: + if op in expr_str: + op_used = op + break + + if not op_used: + raise ValueError(f"Could not find comparison operator in expression: {expr_str}") + + # Remove parentheses and split by operator + expr_clean = expr_str.strip("()") + parts = expr_clean.split(op_used) + if len(parts) != 2: + raise ValueError(f"Expected binary comparison in expression: {expr_str}") + + # Determine which part is the field and which is the literal + field_name = None + literal_value = None + + # Clean up the parts + left = parts[0].strip() + right = parts[1].strip() + + # Typically field name doesn't have quotes, literals (strings) do + if left.startswith('"') or left.startswith("'"): + # Right side is the field + field_name = right + # Extract the literal value - this is a simplification + literal_value = left.strip('"\'') + else: + # Left side is the field + field_name = left + # Extract the literal value - this is a simplification + literal_value = right.strip('"\'') + + # Try to convert numeric literals + try: + if "." in literal_value: + literal_value = float(literal_value) + else: + literal_value = int(literal_value) + except ValueError: + # Keep as string if not numeric + pass + + return field_name, literal_value + def convert_pyarrow_expression(expr: pc.Expression) -> Optional[BooleanExpression]: + """Convert a PyArrow compute expression to a PyIceberg boolean expression.""" if expr is None: return None - - if expr.op == "and": - return And(convert_expression(expr.args[0]), convert_expression(expr.args[1])) - elif expr.op == "or": - return Or(convert_expression(expr.args[0]), convert_expression(expr.args[1])) - elif expr.op == "not": - return Not(convert_expression(expr.args[0])) - elif expr.op in {"==", "equal"}: - return EqualTo(Reference(expr.args[0].name), expr.args[1].as_py()) - elif expr.op in {"!=", "not_equal"}: - return NotEqualTo(Reference(expr.args[0].name), expr.args[1].as_py()) - elif expr.op in {">", "greater"}: - return GreaterThan(Reference(expr.args[0].name), expr.args[1].as_py()) - elif expr.op == ">=": - return GreaterThanOrEqual(Reference(expr.args[0].name), expr.args[1].as_py()) - elif expr.op == "<": - return LessThan(Reference(expr.args[0].name), expr.args[1].as_py()) - elif expr.op == "<=": - return LessThanOrEqual(Reference(expr.args[0].name), expr.args[1].as_py()) - else: - raise ValueError(f"Unsupported operation: {expr.op}") + + # Handle the expression based on its string representation + expr_str = str(expr) + + # Handle logical operations + if "and" in expr_str.lower() and isinstance(expr, pc.Expression): + # This is a simplification - in real code, you'd need to parse the expression + # to extract the sub-expressions properly + left_expr = None # You'd need to extract this + right_expr = None # You'd need to extract this + return And( + convert_pyarrow_expression(left_expr), + convert_pyarrow_expression(right_expr) + ) + elif "or" in expr_str.lower() and isinstance(expr, pc.Expression): + # Similar simplification + left_expr = None # You'd need to extract this + right_expr = None # You'd need to extract this + return Or( + convert_pyarrow_expression(left_expr), + convert_pyarrow_expression(right_expr) + ) + elif "not" in expr_str.lower() and isinstance(expr, pc.Expression): + # Similar simplification + inner_expr = None # You'd need to extract this + return Not(convert_pyarrow_expression(inner_expr)) + + # Handle comparison operations + try: + if "==" in expr_str: + field_name, value = extract_field_and_literal(expr) + return EqualTo(Reference(field_name), value) + elif "!=" in expr_str: + field_name, value = extract_field_and_literal(expr) + return NotEqualTo(Reference(field_name), value) + elif ">=" in expr_str: + field_name, value = extract_field_and_literal(expr) + return GreaterThanOrEqual(Reference(field_name), value) + elif ">" in expr_str: + field_name, value = extract_field_and_literal(expr) + return GreaterThan(Reference(field_name), value) + elif "<=" in expr_str: + field_name, value = extract_field_and_literal(expr) + return LessThanOrEqual(Reference(field_name), value) + elif "<" in expr_str: + field_name, value = extract_field_and_literal(expr) + return LessThan(Reference(field_name), value) + else: + raise ValueError(f"Unsupported expression: {expr_str}") + except Exception as e: + raise ValueError(f"Failed to convert expression '{expr_str}': {str(e)}") -def convert_pyarrow_expressions(exprs: List[pc.Expression]) -> List[BooleanExpression]: +def convert_pyarrow_expressions(exprs: List[pc.Expression]) -> BooleanExpression: """ - Convert a list of PyArrow expressions to PyIceberg expressions. + Convert a list of PyArrow expressions to a single PyIceberg expression. + Multiple expressions are combined with AND. """ - return [convert_pyarrow_expression(expr) for expr in exprs] + if not exprs: + raise ValueError("No expressions provided") + + if len(exprs) == 1: + return convert_pyarrow_expression(exprs[0]) + + # Combine multiple expressions with AND + result = convert_pyarrow_expression(exprs[0]) + for expr in exprs[1:]: + result = And(result, convert_pyarrow_expression(expr)) + + return result diff --git a/tests/tower/test_tables.py b/tests/tower/test_tables.py index 0da67e73..eab04f89 100644 --- a/tests/tower/test_tables.py +++ b/tests/tower/test_tables.py @@ -1,20 +1,39 @@ import pytest import shutil import datetime +import tempfile +import pathlib +from urllib.parse import urljoin +from urllib.request import pathname2url import polars as pl import pyarrow as pa from pyiceberg.catalog.memory import InMemoryCatalog +# Imports the library under test +import tower + +def get_temp_dir(): + """Create a temporary directory and return its file:// URL.""" + # Create a temporary directory that will be automatically cleaned up + temp_dir = tempfile.TemporaryDirectory() + abs_path = pathlib.Path(temp_dir.name).absolute() + file_url = urljoin('file:', pathname2url(str(abs_path))) + + # Return both the URL and the path to the temporary directory + return file_url, abs_path + + @pytest.fixture def in_memory_catalog(): - catalog = InMemoryCatalog("test.in_memory.catalog", warehouse="file:///tmp/iceberg") + file_url, temp_dir = get_temp_dir() + catalog = InMemoryCatalog("test.in_memory.catalog", warehouse=file_url) # Yield the fixture which actually runs the test yield catalog # Clean up after the catalog - shutil.rmtree("/tmp/iceberg") + shutil.rmtree(temp_dir) def test_reading_and_writing_to_tables(in_memory_catalog): @@ -25,8 +44,7 @@ def test_reading_and_writing_to_tables(in_memory_catalog): pa.field("created_at", pa.timestamp("ms")), ]) - import tower - ref = tower.tables("some_table", catalog=in_memory_catalog) + ref = tower.tables("users", catalog=in_memory_catalog) table = ref.create_if_not_exists(schema) data_with_schema = pa.Table.from_pylist([ @@ -46,3 +64,101 @@ def test_reading_and_writing_to_tables(in_memory_catalog): # Assert that the DF actually can do something useful. avg_age = df.select(pl.mean("age").alias("mean_age")).collect().item() assert avg_age == 30.0 + +def test_upsert_to_tables(in_memory_catalog): + schema = pa.schema([ + pa.field("id", pa.int64()), + pa.field("username", pa.string()), + pa.field("name", pa.string()), + pa.field("age", pa.int32()), + pa.field("created_at", pa.timestamp("ms")), + ]) + + # First we'll insert some data into the relevant table. + ref = tower.tables("users", catalog=in_memory_catalog) + table = ref.create_if_not_exists(schema) + + data_with_schema = pa.Table.from_pylist([ + {"id": 1, "username": "alicea", "name": "Alice", "age": 30, "created_at": datetime.datetime(2023, 1, 1, 0, 0, 0)}, + {"id": 2, "username": "bobb", "name": "Bob", "age": 25, "created_at": datetime.datetime(2023, 1, 2, 0, 0, 0)}, + {"id": 3, "username": "charliec", "name": "Charlie", "age": 35, "created_at": datetime.datetime(2023, 1, 3, 0, 0, 0)}, + ], schema=schema) + + # Make sure that we can actually insert the data into the table. + table = table.insert(data_with_schema) + assert table is not None + assert table.rows_affected().inserts == 3 + + # Now we'll update records in the table. + data_with_schema = pa.Table.from_pylist([ + {"id": 2, "username": "bobb", "name": "Bob", "age": 26, "created_at": datetime.datetime(2023, 1, 2, 0, 0, 0)}, + ], schema=schema) + + # And make sure we can upsert the data. + table = table.upsert(data_with_schema, join_cols=["username"]) + assert table.rows_affected().updates == 1 + + # Now let's read from the table and see what we get back out. + df = table.to_polars() + bob_rows = df.filter(pl.col("username") == "bobb") + res = bob_rows.select("age").collect() + + # The age should match what we updated the relevant record to + assert res["age"].item() == 26 + +def test_delete_from_tables(in_memory_catalog): + schema = pa.schema([ + pa.field("id", pa.int64()), + pa.field("username", pa.string()), + pa.field("name", pa.string()), + pa.field("age", pa.int32()), + pa.field("created_at", pa.timestamp("ms")), + ]) + + # First we'll insert some data into the relevant table. + ref = tower.tables("users", catalog=in_memory_catalog) + table = ref.create_if_not_exists(schema) + + data_with_schema = pa.Table.from_pylist([ + {"id": 1, "username": "alicea", "name": "Alice", "age": 30, "created_at": datetime.datetime(2023, 1, 1, 0, 0, 0)}, + {"id": 2, "username": "bobb", "name": "Bob", "age": 25, "created_at": datetime.datetime(2023, 1, 2, 0, 0, 0)}, + {"id": 3, "username": "charliec", "name": "Charlie", "age": 35, "created_at": datetime.datetime(2023, 1, 3, 0, 0, 0)}, + ], schema=schema) + + # Make sure that we can actually insert the data into the table. + table = table.insert(data_with_schema) + assert table is not None + assert table.rows_affected().inserts == 3 + + # Perform the underlying delete from the table... + table.delete(filters=[ + table.column("username") == "bobb" + ]) + + # ...and let's make sure that record is actually gone. + df = table.to_polars() + + # Count all the rows in the table, should be 2. + all_rows = df.collect() + assert all_rows.height == 2 + +def test_getting_schemas_for_tables(in_memory_catalog): + original_schema = pa.schema([ + pa.field("id", pa.int64()), + pa.field("username", pa.string()), + pa.field("name", pa.string()), + pa.field("age", pa.int32()), + pa.field("created_at", pa.timestamp("ms")), + ]) + + # First we'll insert some data into the relevant table. + ref = tower.tables("users", catalog=in_memory_catalog) + table = ref.create_if_not_exists(original_schema) + + new_schema = table.schema() + + # Should have five columns + assert len(new_schema) == 5 + assert new_schema.field("id") is not None + assert new_schema.field("age") is not None + assert new_schema.field("created_at") is not None From b4f1bad05476ddf0ac4463a557481e29376498bd Mon Sep 17 00:00:00 2001 From: Brad Heller Date: Tue, 6 May 2025 15:20:37 +0200 Subject: [PATCH 06/25] chore: Re-export imported modules to make access easier. --- src/tower/polars.py | 10 ++++++++++ src/tower/pyarrow.py | 6 ++++++ src/tower/pyiceberg.py | 17 +++++++++++++++++ tests/tower/test_tables.py | 7 ++++--- 4 files changed, 37 insertions(+), 3 deletions(-) create mode 100644 src/tower/polars.py create mode 100644 src/tower/pyarrow.py create mode 100644 src/tower/pyiceberg.py diff --git a/src/tower/polars.py b/src/tower/polars.py new file mode 100644 index 00000000..b770c21c --- /dev/null +++ b/src/tower/polars.py @@ -0,0 +1,10 @@ +try: + import polars as _polars + # Re-export everything from polars + from polars import * + + # Or if you prefer, you can be explicit about what you re-export + # from polars import DataFrame, Series, etc. +except ImportError: + _polars = None + # Set specific names to None if you're using explicit imports diff --git a/src/tower/pyarrow.py b/src/tower/pyarrow.py new file mode 100644 index 00000000..02125566 --- /dev/null +++ b/src/tower/pyarrow.py @@ -0,0 +1,6 @@ +try: + import pyarrow as _pyarrow + # Re-export everything + from pyarrow import * +except ImportError: + _pyarrow = None diff --git a/src/tower/pyiceberg.py b/src/tower/pyiceberg.py new file mode 100644 index 00000000..ba05f294 --- /dev/null +++ b/src/tower/pyiceberg.py @@ -0,0 +1,17 @@ +try: + import pyiceberg as _pyiceberg + # Re-export everything + from pyiceberg import * +except ImportError: + _pyiceberg = None + + +# Dynamic dispatch for submodules, as relevant. +def __getattr__(name): + """Forward attribute access to the original module.""" + return getattr(_pyiceberg, name) + +# Optionally, also set up the module to handle subpackage imports +# This requires Python 3.7+ +def __dir__(): + return dir(_pyiceberg) diff --git a/tests/tower/test_tables.py b/tests/tower/test_tables.py index eab04f89..3ef0b027 100644 --- a/tests/tower/test_tables.py +++ b/tests/tower/test_tables.py @@ -6,9 +6,10 @@ from urllib.parse import urljoin from urllib.request import pathname2url -import polars as pl -import pyarrow as pa -from pyiceberg.catalog.memory import InMemoryCatalog +# We import all the things we need from Tower. +import tower.polars as pl +import tower.pyarrow as pa +from tower.pyiceberg.catalog.memory import InMemoryCatalog # Imports the library under test import tower From 35c90f6f3e635441c5a0da8c4a4d8f84172961e8 Mon Sep 17 00:00:00 2001 From: Brad Heller Date: Tue, 6 May 2025 15:31:42 +0200 Subject: [PATCH 07/25] chore: Fix setup for test enviros in GitHub Actions --- .github/workflows/test-python.yml | 16 ++-- pyproject.toml | 13 ++- uv.lock | 146 ++++++++++++++++++++++++++++-- 3 files changed, 153 insertions(+), 22 deletions(-) diff --git a/.github/workflows/test-python.yml b/.github/workflows/test-python.yml index 3873e501..cd22685b 100644 --- a/.github/workflows/test-python.yml +++ b/.github/workflows/test-python.yml @@ -28,16 +28,16 @@ jobs: steps: - uses: actions/checkout@v4 - - name: Set up Python - uses: actions/setup-python@v5 - - name: Install the latest version of uv uses: astral-sh/setup-uv@v6 - - name: Install dependencies - if: github.ref_name != 'main' - run: uv sync --all-extras + - name: "Set up Python" + uses: actions/setup-python@v5 + with: + python-version-file: ".python-version" + + - name: Install the project + run: uv sync --locked --all-extras --dev - name: Run tests - if: github.ref_name != 'main' - run: uv run -m pytest --tb=short --disable-warnings + run: uv run pytest tests diff --git a/pyproject.toml b/pyproject.toml index 90b38580..7320a5d0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -45,11 +45,6 @@ dependencies = [ ai = ["huggingface-hub>=0.30.2", "ollama>=0.4.7"] iceberg = ["polars>=1.27.1", "pyarrow>=19.0.1", "pyiceberg>=0.9.0"] all = ["tower[ai,iceberg]"] -dev = [ - "openapi-python-client>=0.12.1", - "pytest>=8.3.5", - "pytest-httpx>=0.35.0", -] [tool.maturin] bindings = "bin" @@ -62,3 +57,11 @@ include = ["rust-toolchain.toml"] [tool.uv.sources] tower = { workspace = true } + +[dependency-groups] +dev = [ + "openapi-python-client>=0.12.1", + "pytest>=8.3.5", + "pytest-httpx>=0.35.0", + "pyiceberg[sql-sqlite]>=0.9.0", +] diff --git a/uv.lock b/uv.lock index 1b9f95c2..49909738 100644 --- a/uv.lock +++ b/uv.lock @@ -175,6 +175,68 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/44/4b/e0cfc1a6f17e990f3e64b7d941ddc4acdc7b19d6edd51abf495f32b1a9e4/fsspec-2025.3.2-py3-none-any.whl", hash = "sha256:2daf8dc3d1dfa65b6aa37748d112773a7a08416f6c70d96b264c96476ecaf711", size = 194435, upload_time = "2025-03-31T15:27:07.028Z" }, ] +[[package]] +name = "greenlet" +version = "3.2.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/3f/74/907bb43af91782e0366b0960af62a8ce1f9398e4291cac7beaeffbee0c04/greenlet-3.2.1.tar.gz", hash = "sha256:9f4dd4b4946b14bb3bf038f81e1d2e535b7d94f1b2a59fdba1293cd9c1a0a4d7", size = 184475, upload_time = "2025-04-22T14:40:18.206Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/df/3e/6332bb2d1e43ec6270e0b97bf253cd704691ee55e4e52196cb7da8f774e9/greenlet-3.2.1-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:777c1281aa7c786738683e302db0f55eb4b0077c20f1dc53db8852ffaea0a6b0", size = 267364, upload_time = "2025-04-22T14:25:26.993Z" }, + { url = "https://files.pythonhosted.org/packages/73/c1/c47cc96878c4eda993a2deaba15af3cfdc87cf8e2e3c4c20726dea541a8c/greenlet-3.2.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3059c6f286b53ea4711745146ffe5a5c5ff801f62f6c56949446e0f6461f8157", size = 625721, upload_time = "2025-04-22T14:53:41.313Z" }, + { url = "https://files.pythonhosted.org/packages/c8/65/df1ff1a505a62b08d31da498ddc0c9992e9c536c01944f8b800a7cf17ac6/greenlet-3.2.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e1a40a17e2c7348f5eee5d8e1b4fa6a937f0587eba89411885a36a8e1fc29bd2", size = 636983, upload_time = "2025-04-22T14:54:55.568Z" }, + { url = "https://files.pythonhosted.org/packages/e8/1d/29944dcaaf5e482f7bff617de15f29e17cc0e74c7393888f8a43d7f6229e/greenlet-3.2.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5193135b3a8d0017cb438de0d49e92bf2f6c1c770331d24aa7500866f4db4017", size = 632880, upload_time = "2025-04-22T15:04:32.187Z" }, + { url = "https://files.pythonhosted.org/packages/e4/c6/6c0891fd775b4fc5613593181526ba282771682dfe7bd0206d283403bcbb/greenlet-3.2.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:639a94d001fe874675b553f28a9d44faed90f9864dc57ba0afef3f8d76a18b04", size = 631638, upload_time = "2025-04-22T14:27:02.856Z" }, + { url = "https://files.pythonhosted.org/packages/c0/50/3d8cadd4dfab17ef72bf0476cc2dacab368273ed29a79bbe66c36c6007a4/greenlet-3.2.1-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8fe303381e7e909e42fb23e191fc69659910909fdcd056b92f6473f80ef18543", size = 580577, upload_time = "2025-04-22T14:25:54.509Z" }, + { url = "https://files.pythonhosted.org/packages/a5/fe/bb0fc421318c69a840e5b98fdeea29d8dcb38f43ffe8b49664aeb10cc3dc/greenlet-3.2.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:72c9b668454e816b5ece25daac1a42c94d1c116d5401399a11b77ce8d883110c", size = 1109788, upload_time = "2025-04-22T14:58:54.243Z" }, + { url = "https://files.pythonhosted.org/packages/89/e9/db23a39effaef855deac9083a9054cbe34e1623dcbabed01e34a9d4174c7/greenlet-3.2.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6079ae990bbf944cf66bea64a09dcb56085815630955109ffa98984810d71565", size = 1133412, upload_time = "2025-04-22T14:28:08.284Z" }, + { url = "https://files.pythonhosted.org/packages/6a/86/c33905264b43fe4806720f60124254a149857b42c1bf01bd6e136883c99f/greenlet-3.2.1-cp310-cp310-win_amd64.whl", hash = "sha256:e63cd2035f49376a23611fbb1643f78f8246e9d4dfd607534ec81b175ce582c2", size = 294958, upload_time = "2025-04-22T15:02:15.755Z" }, + { url = "https://files.pythonhosted.org/packages/26/80/a6ee52c59f75a387ec1f0c0075cf7981fb4644e4162afd3401dabeaa83ca/greenlet-3.2.1-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:aa30066fd6862e1153eaae9b51b449a6356dcdb505169647f69e6ce315b9468b", size = 268609, upload_time = "2025-04-22T14:26:58.208Z" }, + { url = "https://files.pythonhosted.org/packages/ad/11/bd7a900629a4dd0e691dda88f8c2a7bfa44d0c4cffdb47eb5302f87a30d0/greenlet-3.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b0f3a0a67786facf3b907a25db80efe74310f9d63cc30869e49c79ee3fcef7e", size = 628776, upload_time = "2025-04-22T14:53:43.036Z" }, + { url = "https://files.pythonhosted.org/packages/46/f1/686754913fcc2707addadf815c884fd49c9f00a88e6dac277a1e1a8b8086/greenlet-3.2.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:64a4d0052de53ab3ad83ba86de5ada6aeea8f099b4e6c9ccce70fb29bc02c6a2", size = 640827, upload_time = "2025-04-22T14:54:57.409Z" }, + { url = "https://files.pythonhosted.org/packages/03/74/bef04fa04125f6bcae2c1117e52f99c5706ac6ee90b7300b49b3bc18fc7d/greenlet-3.2.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:852ef432919830022f71a040ff7ba3f25ceb9fe8f3ab784befd747856ee58530", size = 636752, upload_time = "2025-04-22T15:04:33.707Z" }, + { url = "https://files.pythonhosted.org/packages/aa/08/e8d493ab65ae1e9823638b8d0bf5d6b44f062221d424c5925f03960ba3d0/greenlet-3.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4818116e75a0dd52cdcf40ca4b419e8ce5cb6669630cb4f13a6c384307c9543f", size = 635993, upload_time = "2025-04-22T14:27:04.408Z" }, + { url = "https://files.pythonhosted.org/packages/1f/9d/3a3a979f2b019fb756c9a92cd5e69055aded2862ebd0437de109cf7472a2/greenlet-3.2.1-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9afa05fe6557bce1642d8131f87ae9462e2a8e8c46f7ed7929360616088a3975", size = 583927, upload_time = "2025-04-22T14:25:55.896Z" }, + { url = "https://files.pythonhosted.org/packages/59/21/a00d27d9abb914c1213926be56b2a2bf47999cf0baf67d9ef5b105b8eb5b/greenlet-3.2.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:5c12f0d17a88664757e81a6e3fc7c2452568cf460a2f8fb44f90536b2614000b", size = 1112891, upload_time = "2025-04-22T14:58:55.808Z" }, + { url = "https://files.pythonhosted.org/packages/20/c7/922082bf41f0948a78d703d75261d5297f3db894758317409e4677dc1446/greenlet-3.2.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dbb4e1aa2000852937dd8f4357fb73e3911da426df8ca9b8df5db231922da474", size = 1138318, upload_time = "2025-04-22T14:28:09.451Z" }, + { url = "https://files.pythonhosted.org/packages/34/d7/e05aa525d824ec32735ba7e66917e944a64866c1a95365b5bd03f3eb2c08/greenlet-3.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:cb5ee928ce5fedf9a4b0ccdc547f7887136c4af6109d8f2fe8e00f90c0db47f5", size = 295407, upload_time = "2025-04-22T14:58:42.319Z" }, + { url = "https://files.pythonhosted.org/packages/f0/d1/e4777b188a04726f6cf69047830d37365b9191017f54caf2f7af336a6f18/greenlet-3.2.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:0ba2811509a30e5f943be048895a983a8daf0b9aa0ac0ead526dfb5d987d80ea", size = 270381, upload_time = "2025-04-22T14:25:43.69Z" }, + { url = "https://files.pythonhosted.org/packages/59/e7/b5b738f5679247ddfcf2179c38945519668dced60c3164c20d55c1a7bb4a/greenlet-3.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4245246e72352b150a1588d43ddc8ab5e306bef924c26571aafafa5d1aaae4e8", size = 637195, upload_time = "2025-04-22T14:53:44.563Z" }, + { url = "https://files.pythonhosted.org/packages/6c/9f/57968c88a5f6bc371364baf983a2e5549cca8f503bfef591b6dd81332cbc/greenlet-3.2.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7abc0545d8e880779f0c7ce665a1afc3f72f0ca0d5815e2b006cafc4c1cc5840", size = 651381, upload_time = "2025-04-22T14:54:59.439Z" }, + { url = "https://files.pythonhosted.org/packages/40/81/1533c9a458e9f2ebccb3ae22f1463b2093b0eb448a88aac36182f1c2cd3d/greenlet-3.2.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6dcc6d604a6575c6225ac0da39df9335cc0c6ac50725063fa90f104f3dbdb2c9", size = 646110, upload_time = "2025-04-22T15:04:35.739Z" }, + { url = "https://files.pythonhosted.org/packages/06/66/25f7e4b1468ebe4a520757f2e41c2a36a2f49a12e963431b82e9f98df2a0/greenlet-3.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2273586879affca2d1f414709bb1f61f0770adcabf9eda8ef48fd90b36f15d12", size = 648070, upload_time = "2025-04-22T14:27:05.976Z" }, + { url = "https://files.pythonhosted.org/packages/d7/4c/49d366565c4c4d29e6f666287b9e2f471a66c3a3d8d5066692e347f09e27/greenlet-3.2.1-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ff38c869ed30fff07f1452d9a204ece1ec6d3c0870e0ba6e478ce7c1515acf22", size = 603816, upload_time = "2025-04-22T14:25:57.224Z" }, + { url = "https://files.pythonhosted.org/packages/04/15/1612bb61506f44b6b8b6bebb6488702b1fe1432547e95dda57874303a1f5/greenlet-3.2.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e934591a7a4084fa10ee5ef50eb9d2ac8c4075d5c9cf91128116b5dca49d43b1", size = 1119572, upload_time = "2025-04-22T14:58:58.277Z" }, + { url = "https://files.pythonhosted.org/packages/cc/2f/002b99dacd1610e825876f5cbbe7f86740aa2a6b76816e5eca41c8457e85/greenlet-3.2.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:063bcf7f8ee28eb91e7f7a8148c65a43b73fbdc0064ab693e024b5a940070145", size = 1147442, upload_time = "2025-04-22T14:28:11.243Z" }, + { url = "https://files.pythonhosted.org/packages/c0/ba/82a2c3b9868644ee6011da742156247070f30e952f4d33f33857458450f2/greenlet-3.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7132e024ebeeeabbe661cf8878aac5d2e643975c4feae833142592ec2f03263d", size = 296207, upload_time = "2025-04-22T14:54:40.531Z" }, + { url = "https://files.pythonhosted.org/packages/77/2a/581b3808afec55b2db838742527c40b4ce68b9b64feedff0fd0123f4b19a/greenlet-3.2.1-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:e1967882f0c42eaf42282a87579685c8673c51153b845fde1ee81be720ae27ac", size = 269119, upload_time = "2025-04-22T14:25:01.798Z" }, + { url = "https://files.pythonhosted.org/packages/b0/f3/1c4e27fbdc84e13f05afc2baf605e704668ffa26e73a43eca93e1120813e/greenlet-3.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e77ae69032a95640a5fe8c857ec7bee569a0997e809570f4c92048691ce4b437", size = 637314, upload_time = "2025-04-22T14:53:46.214Z" }, + { url = "https://files.pythonhosted.org/packages/fc/1a/9fc43cb0044f425f7252da9847893b6de4e3b20c0a748bce7ab3f063d5bc/greenlet-3.2.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3227c6ec1149d4520bc99edac3b9bc8358d0034825f3ca7572165cb502d8f29a", size = 651421, upload_time = "2025-04-22T14:55:00.852Z" }, + { url = "https://files.pythonhosted.org/packages/8a/65/d47c03cdc62c6680206b7420c4a98363ee997e87a5e9da1e83bd7eeb57a8/greenlet-3.2.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0ddda0197c5b46eedb5628d33dad034c455ae77708c7bf192686e760e26d6a0c", size = 645789, upload_time = "2025-04-22T15:04:37.702Z" }, + { url = "https://files.pythonhosted.org/packages/2f/40/0faf8bee1b106c241780f377b9951dd4564ef0972de1942ef74687aa6bba/greenlet-3.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de62b542e5dcf0b6116c310dec17b82bb06ef2ceb696156ff7bf74a7a498d982", size = 648262, upload_time = "2025-04-22T14:27:07.55Z" }, + { url = "https://files.pythonhosted.org/packages/e0/a8/73305f713183c2cb08f3ddd32eaa20a6854ba9c37061d682192db9b021c3/greenlet-3.2.1-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c07a0c01010df42f1f058b3973decc69c4d82e036a951c3deaf89ab114054c07", size = 606770, upload_time = "2025-04-22T14:25:58.34Z" }, + { url = "https://files.pythonhosted.org/packages/c3/05/7d726e1fb7f8a6ac55ff212a54238a36c57db83446523c763e20cd30b837/greenlet-3.2.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:2530bfb0abcd451ea81068e6d0a1aac6dabf3f4c23c8bd8e2a8f579c2dd60d95", size = 1117960, upload_time = "2025-04-22T14:59:00.373Z" }, + { url = "https://files.pythonhosted.org/packages/bf/9f/2b6cb1bd9f1537e7b08c08705c4a1d7bd4f64489c67d102225c4fd262bda/greenlet-3.2.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:1c472adfca310f849903295c351d297559462067f618944ce2650a1878b84123", size = 1145500, upload_time = "2025-04-22T14:28:12.441Z" }, + { url = "https://files.pythonhosted.org/packages/e4/f6/339c6e707062319546598eb9827d3ca8942a3eccc610d4a54c1da7b62527/greenlet-3.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:24a496479bc8bd01c39aa6516a43c717b4cee7196573c47b1f8e1011f7c12495", size = 295994, upload_time = "2025-04-22T14:50:44.796Z" }, + { url = "https://files.pythonhosted.org/packages/f1/72/2a251d74a596af7bb1717e891ad4275a3fd5ac06152319d7ad8c77f876af/greenlet-3.2.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:175d583f7d5ee57845591fc30d852b75b144eb44b05f38b67966ed6df05c8526", size = 629889, upload_time = "2025-04-22T14:53:48.434Z" }, + { url = "https://files.pythonhosted.org/packages/29/2e/d7ed8bf97641bf704b6a43907c0e082cdf44d5bc026eb8e1b79283e7a719/greenlet-3.2.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3ecc9d33ca9428e4536ea53e79d781792cee114d2fa2695b173092bdbd8cd6d5", size = 635261, upload_time = "2025-04-22T14:55:02.258Z" }, + { url = "https://files.pythonhosted.org/packages/1e/75/802aa27848a6fcb5e566f69c64534f572e310f0f12d41e9201a81e741551/greenlet-3.2.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3f56382ac4df3860ebed8ed838f268f03ddf4e459b954415534130062b16bc32", size = 632523, upload_time = "2025-04-22T15:04:39.221Z" }, + { url = "https://files.pythonhosted.org/packages/56/09/f7c1c3bab9b4c589ad356503dd71be00935e9c4db4db516ed88fc80f1187/greenlet-3.2.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc45a7189c91c0f89aaf9d69da428ce8301b0fd66c914a499199cfb0c28420fc", size = 628816, upload_time = "2025-04-22T14:27:08.869Z" }, + { url = "https://files.pythonhosted.org/packages/79/e0/1bb90d30b5450eac2dffeaac6b692857c4bd642c21883b79faa8fa056cf2/greenlet-3.2.1-cp313-cp313t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:51a2f49da08cff79ee42eb22f1658a2aed60c72792f0a0a95f5f0ca6d101b1fb", size = 593687, upload_time = "2025-04-22T14:25:59.676Z" }, + { url = "https://files.pythonhosted.org/packages/c5/b5/adbe03c8b4c178add20cc716021183ae6b0326d56ba8793d7828c94286f6/greenlet-3.2.1-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:0c68bbc639359493420282d2f34fa114e992a8724481d700da0b10d10a7611b8", size = 1105754, upload_time = "2025-04-22T14:59:02.585Z" }, + { url = "https://files.pythonhosted.org/packages/39/93/84582d7ef38dec009543ccadec6ab41079a6cbc2b8c0566bcd07bf1aaf6c/greenlet-3.2.1-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:e775176b5c203a1fa4be19f91da00fd3bff536868b77b237da3f4daa5971ae5d", size = 1125160, upload_time = "2025-04-22T14:28:13.975Z" }, + { url = "https://files.pythonhosted.org/packages/01/e6/f9d759788518a6248684e3afeb3691f3ab0276d769b6217a1533362298c8/greenlet-3.2.1-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:d6668caf15f181c1b82fb6406f3911696975cc4c37d782e19cb7ba499e556189", size = 269897, upload_time = "2025-04-22T14:27:14.044Z" }, + { url = "https://files.pythonhosted.org/packages/c7/04/0a47c2e2d7ded33615afbad52919dac5f065eddd917544f606a6fabb61e7/greenlet-3.2.1-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:17964c246d4f6e1327edd95e2008988a8995ae3a7732be2f9fc1efed1f1cdf8c", size = 266158, upload_time = "2025-04-22T14:26:40.269Z" }, + { url = "https://files.pythonhosted.org/packages/6a/50/4aa63d2ce56000e281a497b1325692874b317240fb65263f3df58673f64a/greenlet-3.2.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:04b4ec7f65f0e4a1500ac475c9343f6cc022b2363ebfb6e94f416085e40dea15", size = 623856, upload_time = "2025-04-22T14:53:49.632Z" }, + { url = "https://files.pythonhosted.org/packages/96/ff/ba4b4f130caee5ab5c40183a6e9ae63daede0e6ab5c00e4c3457074cba5b/greenlet-3.2.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b38d53cf268da963869aa25a6e4cc84c1c69afc1ae3391738b2603d110749d01", size = 635655, upload_time = "2025-04-22T14:55:03.687Z" }, + { url = "https://files.pythonhosted.org/packages/d7/0e/10287f42ba82a311e8697febe29ede14087f901bda09329ad1fe03fb2511/greenlet-3.2.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:05a7490f74e8aabc5f29256765a99577ffde979920a2db1f3676d265a3adba41", size = 630938, upload_time = "2025-04-22T15:04:40.665Z" }, + { url = "https://files.pythonhosted.org/packages/a8/a8/f5b76f63335e5efd05e41b73ffa399b409aedd6dbc729388c2794d9bc680/greenlet-3.2.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4339b202ac20a89ccd5bde0663b4d00dc62dd25cb3fb14f7f3034dec1b0d9ece", size = 630215, upload_time = "2025-04-22T14:27:10.047Z" }, + { url = "https://files.pythonhosted.org/packages/a4/e9/07570eef5155efdea7602a5cca84bc406415928bdd109158df41236493a3/greenlet-3.2.1-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1a750f1046994b9e038b45ae237d68153c29a3a783075211fb1414a180c8324b", size = 579081, upload_time = "2025-04-22T14:26:01.22Z" }, + { url = "https://files.pythonhosted.org/packages/e0/a8/3d51ada057317e86e2b052fded6288030f6d1ca36de6077b352a72c32c70/greenlet-3.2.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:374ffebaa5fbd10919cd599e5cf8ee18bae70c11f9d61e73db79826c8c93d6f9", size = 1108305, upload_time = "2025-04-22T14:59:04.583Z" }, + { url = "https://files.pythonhosted.org/packages/c8/33/78745dfdceb4cf10fb831c33f5a4c2a1125026dfa1beac3a2df912c8ac61/greenlet-3.2.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8b89e5d44f55372efc6072f59ced5ed1efb7b44213dab5ad7e0caba0232c6545", size = 1132382, upload_time = "2025-04-22T14:28:15.723Z" }, + { url = "https://files.pythonhosted.org/packages/19/8f/98a478e9285b82046d3167c30b4d04385bec441493c2155c18c701c5879b/greenlet-3.2.1-cp39-cp39-win32.whl", hash = "sha256:b7503d6b8bbdac6bbacf5a8c094f18eab7553481a1830975799042f26c9e101b", size = 277712, upload_time = "2025-04-22T15:09:57.479Z" }, + { url = "https://files.pythonhosted.org/packages/37/c2/eb1bc32182063e145a28678d73c79e6915c1c43c35abdb7baa2b31cf3aca/greenlet-3.2.1-cp39-cp39-win_amd64.whl", hash = "sha256:e98328b8b8f160925d6b1c5b1879d8e64f6bd8cf11472b7127d579da575b77d9", size = 294835, upload_time = "2025-04-22T15:06:06.809Z" }, +] + [[package]] name = "h11" version = "0.16.0" @@ -735,6 +797,11 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/00/d7/f774e49496194e90694e270df065bf823bb78eba8bc06d059a0eecbb1180/pyiceberg-0.9.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:76581d226ae67d8be5210bdab60dcdd8fc3a4d6745192a2b446eb746201abdb3", size = 624863, upload_time = "2025-03-04T15:17:39.009Z" }, ] +[package.optional-dependencies] +sql-sqlite = [ + { name = "sqlalchemy" }, +] + [[package]] name = "pyparsing" version = "3.2.3" @@ -994,6 +1061,59 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/32/46/9cb0e58b2deb7f82b84065f37f3bffeb12413f947f9388e4cac22c4621ce/sortedcontainers-2.4.0-py2.py3-none-any.whl", hash = "sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0", size = 29575, upload_time = "2021-05-16T22:03:41.177Z" }, ] +[[package]] +name = "sqlalchemy" +version = "2.0.40" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "greenlet", marker = "(python_full_version < '3.14' and platform_machine == 'AMD64') or (python_full_version < '3.14' and platform_machine == 'WIN32') or (python_full_version < '3.14' and platform_machine == 'aarch64') or (python_full_version < '3.14' and platform_machine == 'amd64') or (python_full_version < '3.14' and platform_machine == 'ppc64le') or (python_full_version < '3.14' and platform_machine == 'win32') or (python_full_version < '3.14' and platform_machine == 'x86_64')" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/68/c3/3f2bfa5e4dcd9938405fe2fab5b6ab94a9248a4f9536ea2fd497da20525f/sqlalchemy-2.0.40.tar.gz", hash = "sha256:d827099289c64589418ebbcaead0145cd19f4e3e8a93919a0100247af245fa00", size = 9664299, upload_time = "2025-03-27T17:52:31.876Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/87/fa/8e8fd93684b04e65816be864bebf0000fe1602e5452d006f9acc5db14ce5/sqlalchemy-2.0.40-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f1ea21bef99c703f44444ad29c2c1b6bd55d202750b6de8e06a955380f4725d7", size = 2112843, upload_time = "2025-03-27T18:49:25.515Z" }, + { url = "https://files.pythonhosted.org/packages/ba/87/06992f78a9ce545dfd1fea3dd99262bec5221f6f9d2d2066c3e94662529f/sqlalchemy-2.0.40-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:afe63b208153f3a7a2d1a5b9df452b0673082588933e54e7c8aac457cf35e758", size = 2104032, upload_time = "2025-03-27T18:49:28.098Z" }, + { url = "https://files.pythonhosted.org/packages/92/ee/57dc77282e8be22d686bd4681825299aa1069bbe090564868ea270ed5214/sqlalchemy-2.0.40-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a8aae085ea549a1eddbc9298b113cffb75e514eadbb542133dd2b99b5fb3b6af", size = 3086406, upload_time = "2025-03-27T18:44:25.302Z" }, + { url = "https://files.pythonhosted.org/packages/94/3f/ceb9ab214b2e42d2e74a9209b3a2f2f073504eee16cddd2df81feeb67c2f/sqlalchemy-2.0.40-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ea9181284754d37db15156eb7be09c86e16e50fbe77610e9e7bee09291771a1", size = 3094652, upload_time = "2025-03-27T18:55:16.174Z" }, + { url = "https://files.pythonhosted.org/packages/00/0a/3401232a5b6d91a2df16c1dc39c6504c54575744c2faafa1e5a50de96621/sqlalchemy-2.0.40-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:5434223b795be5c5ef8244e5ac98056e290d3a99bdcc539b916e282b160dda00", size = 3050503, upload_time = "2025-03-27T18:44:28.266Z" }, + { url = "https://files.pythonhosted.org/packages/93/c2/ea7171415ab131397f71a2673645c2fe29ebe9a93063d458eb89e42bf051/sqlalchemy-2.0.40-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:15d08d5ef1b779af6a0909b97be6c1fd4298057504eb6461be88bd1696cb438e", size = 3076011, upload_time = "2025-03-27T18:55:17.967Z" }, + { url = "https://files.pythonhosted.org/packages/3d/ee/d8e229280d621bed8c51eebf1dd413aa09ca89e309b1fff40d881dd149af/sqlalchemy-2.0.40-cp310-cp310-win32.whl", hash = "sha256:cd2f75598ae70bcfca9117d9e51a3b06fe29edd972fdd7fd57cc97b4dbf3b08a", size = 2085136, upload_time = "2025-03-27T18:48:53.032Z" }, + { url = "https://files.pythonhosted.org/packages/60/7f/ea1086136bc648cd4713a1e01869f7fc31979d67b3a8f973f5d9ab8de7e1/sqlalchemy-2.0.40-cp310-cp310-win_amd64.whl", hash = "sha256:2cbafc8d39ff1abdfdda96435f38fab141892dc759a2165947d1a8fffa7ef596", size = 2109421, upload_time = "2025-03-27T18:48:54.258Z" }, + { url = "https://files.pythonhosted.org/packages/77/7e/55044a9ec48c3249bb38d5faae93f09579c35e862bb318ebd1ed7a1994a5/sqlalchemy-2.0.40-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f6bacab7514de6146a1976bc56e1545bee247242fab030b89e5f70336fc0003e", size = 2114025, upload_time = "2025-03-27T18:49:29.456Z" }, + { url = "https://files.pythonhosted.org/packages/77/0f/dcf7bba95f847aec72f638750747b12d37914f71c8cc7c133cf326ab945c/sqlalchemy-2.0.40-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5654d1ac34e922b6c5711631f2da497d3a7bffd6f9f87ac23b35feea56098011", size = 2104419, upload_time = "2025-03-27T18:49:30.75Z" }, + { url = "https://files.pythonhosted.org/packages/75/70/c86a5c20715e4fe903dde4c2fd44fc7e7a0d5fb52c1b954d98526f65a3ea/sqlalchemy-2.0.40-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:35904d63412db21088739510216e9349e335f142ce4a04b69e2528020ee19ed4", size = 3222720, upload_time = "2025-03-27T18:44:29.871Z" }, + { url = "https://files.pythonhosted.org/packages/12/cf/b891a8c1d0c27ce9163361664c2128c7a57de3f35000ea5202eb3a2917b7/sqlalchemy-2.0.40-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c7a80ed86d6aaacb8160a1caef6680d4ddd03c944d985aecee940d168c411d1", size = 3222682, upload_time = "2025-03-27T18:55:20.097Z" }, + { url = "https://files.pythonhosted.org/packages/15/3f/7709d8c8266953d945435a96b7f425ae4172a336963756b58e996fbef7f3/sqlalchemy-2.0.40-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:519624685a51525ddaa7d8ba8265a1540442a2ec71476f0e75241eb8263d6f51", size = 3159542, upload_time = "2025-03-27T18:44:31.333Z" }, + { url = "https://files.pythonhosted.org/packages/85/7e/717eaabaf0f80a0132dc2032ea8f745b7a0914451c984821a7c8737fb75a/sqlalchemy-2.0.40-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:2ee5f9999a5b0e9689bed96e60ee53c3384f1a05c2dd8068cc2e8361b0df5b7a", size = 3179864, upload_time = "2025-03-27T18:55:21.784Z" }, + { url = "https://files.pythonhosted.org/packages/e4/cc/03eb5dfcdb575cbecd2bd82487b9848f250a4b6ecfb4707e834b4ce4ec07/sqlalchemy-2.0.40-cp311-cp311-win32.whl", hash = "sha256:c0cae71e20e3c02c52f6b9e9722bca70e4a90a466d59477822739dc31ac18b4b", size = 2084675, upload_time = "2025-03-27T18:48:55.915Z" }, + { url = "https://files.pythonhosted.org/packages/9a/48/440946bf9dc4dc231f4f31ef0d316f7135bf41d4b86aaba0c0655150d370/sqlalchemy-2.0.40-cp311-cp311-win_amd64.whl", hash = "sha256:574aea2c54d8f1dd1699449f332c7d9b71c339e04ae50163a3eb5ce4c4325ee4", size = 2110099, upload_time = "2025-03-27T18:48:57.45Z" }, + { url = "https://files.pythonhosted.org/packages/92/06/552c1f92e880b57d8b92ce6619bd569b25cead492389b1d84904b55989d8/sqlalchemy-2.0.40-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:9d3b31d0a1c44b74d3ae27a3de422dfccd2b8f0b75e51ecb2faa2bf65ab1ba0d", size = 2112620, upload_time = "2025-03-27T18:40:00.071Z" }, + { url = "https://files.pythonhosted.org/packages/01/72/a5bc6e76c34cebc071f758161dbe1453de8815ae6e662393910d3be6d70d/sqlalchemy-2.0.40-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:37f7a0f506cf78c80450ed1e816978643d3969f99c4ac6b01104a6fe95c5490a", size = 2103004, upload_time = "2025-03-27T18:40:04.204Z" }, + { url = "https://files.pythonhosted.org/packages/bf/fd/0e96c8e6767618ed1a06e4d7a167fe13734c2f8113c4cb704443e6783038/sqlalchemy-2.0.40-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0bb933a650323e476a2e4fbef8997a10d0003d4da996aad3fd7873e962fdde4d", size = 3252440, upload_time = "2025-03-27T18:51:25.624Z" }, + { url = "https://files.pythonhosted.org/packages/cd/6a/eb82e45b15a64266a2917a6833b51a334ea3c1991728fd905bfccbf5cf63/sqlalchemy-2.0.40-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6959738971b4745eea16f818a2cd086fb35081383b078272c35ece2b07012716", size = 3263277, upload_time = "2025-03-27T18:50:28.142Z" }, + { url = "https://files.pythonhosted.org/packages/45/97/ebe41ab4530f50af99e3995ebd4e0204bf1b0dc0930f32250dde19c389fe/sqlalchemy-2.0.40-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:110179728e442dae85dd39591beb74072ae4ad55a44eda2acc6ec98ead80d5f2", size = 3198591, upload_time = "2025-03-27T18:51:27.543Z" }, + { url = "https://files.pythonhosted.org/packages/e6/1c/a569c1b2b2f5ac20ba6846a1321a2bf52e9a4061001f282bf1c5528dcd69/sqlalchemy-2.0.40-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e8040680eaacdce4d635f12c55c714f3d4c7f57da2bc47a01229d115bd319191", size = 3225199, upload_time = "2025-03-27T18:50:30.069Z" }, + { url = "https://files.pythonhosted.org/packages/8f/91/87cc71a6b10065ca0209d19a4bb575378abda6085e72fa0b61ffb2201b84/sqlalchemy-2.0.40-cp312-cp312-win32.whl", hash = "sha256:650490653b110905c10adac69408380688cefc1f536a137d0d69aca1069dc1d1", size = 2082959, upload_time = "2025-03-27T18:45:57.574Z" }, + { url = "https://files.pythonhosted.org/packages/2a/9f/14c511cda174aa1ad9b0e42b64ff5a71db35d08b0d80dc044dae958921e5/sqlalchemy-2.0.40-cp312-cp312-win_amd64.whl", hash = "sha256:2be94d75ee06548d2fc591a3513422b873490efb124048f50556369a834853b0", size = 2108526, upload_time = "2025-03-27T18:45:58.965Z" }, + { url = "https://files.pythonhosted.org/packages/8c/18/4e3a86cc0232377bc48c373a9ba6a1b3fb79ba32dbb4eda0b357f5a2c59d/sqlalchemy-2.0.40-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:915866fd50dd868fdcc18d61d8258db1bf9ed7fbd6dfec960ba43365952f3b01", size = 2107887, upload_time = "2025-03-27T18:40:05.461Z" }, + { url = "https://files.pythonhosted.org/packages/cb/60/9fa692b1d2ffc4cbd5f47753731fd332afed30137115d862d6e9a1e962c7/sqlalchemy-2.0.40-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4a4c5a2905a9ccdc67a8963e24abd2f7afcd4348829412483695c59e0af9a705", size = 2098367, upload_time = "2025-03-27T18:40:07.182Z" }, + { url = "https://files.pythonhosted.org/packages/4c/9f/84b78357ca641714a439eb3fbbddb17297dacfa05d951dbf24f28d7b5c08/sqlalchemy-2.0.40-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55028d7a3ebdf7ace492fab9895cbc5270153f75442a0472d8516e03159ab364", size = 3184806, upload_time = "2025-03-27T18:51:29.356Z" }, + { url = "https://files.pythonhosted.org/packages/4b/7d/e06164161b6bfce04c01bfa01518a20cccbd4100d5c951e5a7422189191a/sqlalchemy-2.0.40-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6cfedff6878b0e0d1d0a50666a817ecd85051d12d56b43d9d425455e608b5ba0", size = 3198131, upload_time = "2025-03-27T18:50:31.616Z" }, + { url = "https://files.pythonhosted.org/packages/6d/51/354af20da42d7ec7b5c9de99edafbb7663a1d75686d1999ceb2c15811302/sqlalchemy-2.0.40-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bb19e30fdae77d357ce92192a3504579abe48a66877f476880238a962e5b96db", size = 3131364, upload_time = "2025-03-27T18:51:31.336Z" }, + { url = "https://files.pythonhosted.org/packages/7a/2f/48a41ff4e6e10549d83fcc551ab85c268bde7c03cf77afb36303c6594d11/sqlalchemy-2.0.40-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:16d325ea898f74b26ffcd1cf8c593b0beed8714f0317df2bed0d8d1de05a8f26", size = 3159482, upload_time = "2025-03-27T18:50:33.201Z" }, + { url = "https://files.pythonhosted.org/packages/33/ac/e5e0a807163652a35be878c0ad5cfd8b1d29605edcadfb5df3c512cdf9f3/sqlalchemy-2.0.40-cp313-cp313-win32.whl", hash = "sha256:a669cbe5be3c63f75bcbee0b266779706f1a54bcb1000f302685b87d1b8c1500", size = 2080704, upload_time = "2025-03-27T18:46:00.193Z" }, + { url = "https://files.pythonhosted.org/packages/1c/cb/f38c61f7f2fd4d10494c1c135ff6a6ddb63508d0b47bccccd93670637309/sqlalchemy-2.0.40-cp313-cp313-win_amd64.whl", hash = "sha256:641ee2e0834812d657862f3a7de95e0048bdcb6c55496f39c6fa3d435f6ac6ad", size = 2104564, upload_time = "2025-03-27T18:46:01.442Z" }, + { url = "https://files.pythonhosted.org/packages/d1/8d/fb1f43d001ed9f8e48e4fb231199fde7f182741efd315d9aef241c3c2292/sqlalchemy-2.0.40-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c884de19528e0fcd9dc34ee94c810581dd6e74aef75437ff17e696c2bfefae3e", size = 2115715, upload_time = "2025-03-27T18:49:23.956Z" }, + { url = "https://files.pythonhosted.org/packages/16/a6/a25d35a13368424b7623a37a3943620e9c3c1670aab4fd039cdaf84deb79/sqlalchemy-2.0.40-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1abb387710283fc5983d8a1209d9696a4eae9db8d7ac94b402981fe2fe2e39ad", size = 2106945, upload_time = "2025-03-27T18:49:25.376Z" }, + { url = "https://files.pythonhosted.org/packages/f2/91/171e9f94e66419bf9ec94cb1a52346b023c227ca9b6c4b4d767b252ac7b2/sqlalchemy-2.0.40-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5cfa124eda500ba4b0d3afc3e91ea27ed4754e727c7f025f293a22f512bcd4c9", size = 3100866, upload_time = "2025-03-27T18:10:48.796Z" }, + { url = "https://files.pythonhosted.org/packages/fa/56/a3fc75088c9f57a405bb890b8e00686a394bd0419e68758fbffd14649a3e/sqlalchemy-2.0.40-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b6b28d303b9d57c17a5164eb1fd2d5119bb6ff4413d5894e74873280483eeb5", size = 3108645, upload_time = "2025-03-27T18:55:40.936Z" }, + { url = "https://files.pythonhosted.org/packages/40/18/fb198acaa8041dd5b61a521678bcef80c2d1fa90c8eaebe35004f12a3fba/sqlalchemy-2.0.40-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:b5a5bbe29c10c5bfd63893747a1bf6f8049df607638c786252cb9243b86b6706", size = 3067694, upload_time = "2025-03-27T18:10:50.135Z" }, + { url = "https://files.pythonhosted.org/packages/aa/39/832b5fe338c98b8c0d6c987128e341ac74ce2e5298e9e019433b37cb6b19/sqlalchemy-2.0.40-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:f0fda83e113bb0fb27dc003685f32a5dcb99c9c4f41f4fa0838ac35265c23b5c", size = 3094193, upload_time = "2025-03-27T18:55:42.682Z" }, + { url = "https://files.pythonhosted.org/packages/3e/57/b3684de3e179e6429d71f31efb55183b274f3ffc1bee8cfda138b2b34927/sqlalchemy-2.0.40-cp39-cp39-win32.whl", hash = "sha256:957f8d85d5e834397ef78a6109550aeb0d27a53b5032f7a57f2451e1adc37e98", size = 2087537, upload_time = "2025-03-27T18:53:32.186Z" }, + { url = "https://files.pythonhosted.org/packages/05/dc/6af9d62239c1115c95a53477092bc4578f0f809962da1680ad75976a8672/sqlalchemy-2.0.40-cp39-cp39-win_amd64.whl", hash = "sha256:1ffdf9c91428e59744f8e6f98190516f8e1d05eec90e936eb08b257332c5e870", size = 2111906, upload_time = "2025-03-27T18:53:33.647Z" }, + { url = "https://files.pythonhosted.org/packages/d1/7c/5fc8e802e7506fe8b55a03a2e1dab156eae205c91bee46305755e086d2e2/sqlalchemy-2.0.40-py3-none-any.whl", hash = "sha256:32587e2e1e359276957e6fe5dad089758bc042a971a8a09ae8ecf7a8fe23d07a", size = 1903894, upload_time = "2025-03-27T18:40:43.796Z" }, +] + [[package]] name = "strictyaml" version = "1.7.3" @@ -1076,33 +1196,41 @@ all = [ { name = "pyarrow" }, { name = "pyiceberg" }, ] -dev = [ - { name = "openapi-python-client" }, - { name = "pytest" }, - { name = "pytest-httpx" }, -] iceberg = [ { name = "polars" }, { name = "pyarrow" }, { name = "pyiceberg" }, ] +[package.dev-dependencies] +dev = [ + { name = "openapi-python-client" }, + { name = "pyiceberg", extra = ["sql-sqlite"] }, + { name = "pytest" }, + { name = "pytest-httpx" }, +] + [package.metadata] requires-dist = [ { name = "attrs", specifier = ">=24.2.0" }, { name = "httpx", specifier = ">=0.23.3" }, { name = "huggingface-hub", marker = "extra == 'ai'", specifier = ">=0.30.2" }, { name = "ollama", marker = "extra == 'ai'", specifier = ">=0.4.7" }, - { name = "openapi-python-client", marker = "extra == 'dev'", specifier = ">=0.12.1" }, { name = "polars", marker = "extra == 'iceberg'", specifier = ">=1.27.1" }, { name = "pyarrow", marker = "extra == 'iceberg'", specifier = ">=19.0.1" }, { name = "pyiceberg", marker = "extra == 'iceberg'", specifier = ">=0.9.0" }, - { name = "pytest", marker = "extra == 'dev'", specifier = ">=8.3.5" }, - { name = "pytest-httpx", marker = "extra == 'dev'", specifier = ">=0.35.0" }, { name = "python-dateutil", specifier = ">=2.9.0.post0" }, { name = "tower", extras = ["ai", "iceberg"], marker = "extra == 'all'", editable = "." }, ] -provides-extras = ["ai", "iceberg", "all", "dev"] +provides-extras = ["ai", "iceberg", "all"] + +[package.metadata.requires-dev] +dev = [ + { name = "openapi-python-client", specifier = ">=0.12.1" }, + { name = "pyiceberg", extras = ["sql-sqlite"], specifier = ">=0.9.0" }, + { name = "pytest", specifier = ">=8.3.5" }, + { name = "pytest-httpx", specifier = ">=0.35.0" }, +] [[package]] name = "tqdm" From 43bbf5f1814d64b9add62e97bcf5e6f36b4b9a4a Mon Sep 17 00:00:00 2001 From: Brad Heller Date: Tue, 6 May 2025 15:33:38 +0200 Subject: [PATCH 08/25] Update src/tower/_tables.py Updated commentary Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> --- src/tower/_tables.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/tower/_tables.py b/src/tower/_tables.py index 70cfa30b..63d0294c 100644 --- a/src/tower/_tables.py +++ b/src/tower/_tables.py @@ -126,7 +126,7 @@ def delete(self, filters: Union[str, List[pc.Expression]]) -> TTable: TTable: The table with the deleted rows. """ if isinstance(filters, list): - # We need to covnert the pc.Expression into PyIceberg + # We need to convert the pc.Expression into PyIceberg next_filters = convert_pyarrow_expressions(filters) filters = next_filters From 71ef0d6c094157f939eb4a9d6d0b5fcdc1e3a162 Mon Sep 17 00:00:00 2001 From: Brad Heller Date: Tue, 6 May 2025 15:34:46 +0200 Subject: [PATCH 09/25] chore: Add .python-version file --- .python-version | 1 + 1 file changed, 1 insertion(+) create mode 100644 .python-version diff --git a/.python-version b/.python-version new file mode 100644 index 00000000..e4fba218 --- /dev/null +++ b/.python-version @@ -0,0 +1 @@ +3.12 From 47968369ccdd2f0236501fb44f0de7d9fcbe5c63 Mon Sep 17 00:00:00 2001 From: Brad Heller Date: Tue, 6 May 2025 15:46:45 +0200 Subject: [PATCH 10/25] chore: Don't test our Python SDK on Windows --- .github/workflows/test-python.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/test-python.yml b/.github/workflows/test-python.yml index cd22685b..bf46a552 100644 --- a/.github/workflows/test-python.yml +++ b/.github/workflows/test-python.yml @@ -23,7 +23,7 @@ jobs: strategy: fail-fast: false matrix: - os: [ubuntu-latest, windows-latest] + os: ubuntu-latest steps: - uses: actions/checkout@v4 From ef048b3c07789d44c7eb08ebfd7f226d599e328f Mon Sep 17 00:00:00 2001 From: Brad Heller Date: Tue, 6 May 2025 16:02:30 +0200 Subject: [PATCH 11/25] Version bump to 0.3.12 --- Cargo.lock | 16 ++++++++-------- Cargo.toml | 2 +- pyproject.toml | 2 +- 3 files changed, 10 insertions(+), 10 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 39684c9e..ca4de8d3 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -310,7 +310,7 @@ dependencies = [ [[package]] name = "config" -version = "0.3.11" +version = "0.3.12" dependencies = [ "chrono", "clap", @@ -382,7 +382,7 @@ dependencies = [ [[package]] name = "crypto" -version = "0.3.11" +version = "0.3.12" dependencies = [ "base64", "pem", @@ -2425,7 +2425,7 @@ dependencies = [ [[package]] name = "testutils" -version = "0.3.11" +version = "0.3.12" dependencies = [ "pem", "rsa", @@ -2629,7 +2629,7 @@ dependencies = [ [[package]] name = "tower" -version = "0.3.11" +version = "0.3.12" dependencies = [ "tokio", "tower-api", @@ -2666,7 +2666,7 @@ dependencies = [ [[package]] name = "tower-cmd" -version = "0.3.11" +version = "0.3.12" dependencies = [ "anyhow", "bytes", @@ -2705,7 +2705,7 @@ checksum = "121c2a6cda46980bb0fcd1647ffaf6cd3fc79a013de288782836f6df9c48780e" [[package]] name = "tower-package" -version = "0.3.11" +version = "0.3.12" dependencies = [ "async-compression", "config", @@ -2724,7 +2724,7 @@ dependencies = [ [[package]] name = "tower-runtime" -version = "0.3.11" +version = "0.3.12" dependencies = [ "chrono", "log", @@ -2741,7 +2741,7 @@ checksum = "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3" [[package]] name = "tower-version" -version = "0.3.11" +version = "0.3.12" dependencies = [ "anyhow", "chrono", diff --git a/Cargo.toml b/Cargo.toml index d65364ab..2d570ed9 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -4,7 +4,7 @@ resolver = "2" [workspace.package] edition = "2021" -version = "0.3.11" +version = "0.3.12" description = "Tower is the best way to host Python data apps in production" rust-version = "1.77" authors = ["Brad Heller "] diff --git a/pyproject.toml b/pyproject.toml index 7320a5d0..d4811ae6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "maturin" [project] name = "tower" -version = "0.3.11" +version = "0.3.12" description = "Tower CLI and runtime environment for Tower." authors = [{ name = "Tower Computing Inc.", email = "brad@tower.dev" }] readme = "README.md" From 4ffce2138c5f6ba45dff3da4dd63085a5cc1fe22 Mon Sep 17 00:00:00 2001 From: Brad Heller Date: Tue, 6 May 2025 16:55:49 +0200 Subject: [PATCH 12/25] chore: Fix workflow file --- .github/workflows/test-python.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/test-python.yml b/.github/workflows/test-python.yml index bf46a552..0efaac54 100644 --- a/.github/workflows/test-python.yml +++ b/.github/workflows/test-python.yml @@ -23,7 +23,8 @@ jobs: strategy: fail-fast: false matrix: - os: ubuntu-latest + os: + - ubuntu-latest steps: - uses: actions/checkout@v4 From 5a5ebd77a0079af7397445f2470f838821e22cad Mon Sep 17 00:00:00 2001 From: Brad Heller Date: Tue, 6 May 2025 16:58:59 +0200 Subject: [PATCH 13/25] chore: Fix uv.lock --- uv.lock | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/uv.lock b/uv.lock index 49909738..d6b5fe8a 100644 --- a/uv.lock +++ b/uv.lock @@ -1176,7 +1176,7 @@ wheels = [ [[package]] name = "tower" -version = "0.3.11" +version = "0.3.12" source = { editable = "." } dependencies = [ { name = "attrs" }, From 5b07df89570f0ad02cb46dd10e3fcc0d68a7d350 Mon Sep 17 00:00:00 2001 From: Brad Heller Date: Wed, 7 May 2025 15:01:23 +0200 Subject: [PATCH 14/25] chore: Updated semver script --- scripts/semver.py | 178 ++++++++++++++++++++++++++++++++++++++++++++++ version.txt | 1 + 2 files changed, 179 insertions(+) create mode 100755 scripts/semver.py create mode 100644 version.txt diff --git a/scripts/semver.py b/scripts/semver.py new file mode 100755 index 00000000..6e85fec7 --- /dev/null +++ b/scripts/semver.py @@ -0,0 +1,178 @@ +#!/usr/bin/env python3 +import os +import sys +import re +import argparse + +BASE_PATH = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) + +SEMVER_EXP = re.compile("\d+\.\d+(\.\d+(-prerelease\.(\d+))?)?") + +class Version: + def __init__(self, version_str): + version_str = version_str.removeprefix("v") + + if SEMVER_EXP.fullmatch(version_str): + parts = version_str.split(".") + + self.major = int(parts[0]) + self.minor = int(parts[1]) + + if len(parts) > 2: + self.patch = int(parts[2]) + + if len(parts) > 3: + self.prerelease = int(parts[3].split(".")[1]) + else: + self.prerelease = 0 + else: + self.patch = 0 + self.prerelease = 0 + else: + self.major = 0 + self.minor = 0 + self.patch = 0 + self.prerelease = 0 + + def is_valid(self): + return self.minor >= 0 + + def __eq__(self, other): + if isinstance(other, Version): + return self.major == other.major and self.minor == other.minor and self.patch == other.patch + else: + return False + + def to_tag_string(self): + if self.prerelease > 0: + return "{major}.{minor}.{patch}-prerelease.{prerelease}".format(major=self.major, minor=self.minor, patch=self.patch, prerelease=self.prerelease) + else: + return "{major}.{minor}.{patch}".format(major=self.major, minor=self.minor, patch=self.patch) + + def to_python_string(self): + if self.prerelease > 0: + return "{major}.{minor}.{patch}rc{prerelease}".format(major=self.major, minor=self.minor, patch=self.patch, prerelease=self.prerelease) + else: + return "{major}.{minor}.{patch}".format(major=self.major, minor=self.minor, patch=self.patch) + +def get_all_versions(): + # Wait for this to complete. + proc = os.popen("git fetch --tags") + + # we read from this to have it complete before we proceed. + _ = proc.read() + + stream = os.popen("git --no-pager tag") + tags = stream.read().split("\n") + return [Version(tag) for tag in tags] + +def get_version_set(version): + all_versions = get_all_versions() + return [v for v in all_versions if v.major == version.major and v.minor == version.minor] + +def get_version_patch(version): + return version.patch + +def get_current_version(base): + v = Version(base) + versions = get_version_set(v) + + if len(versions) < 1: + return None + else: + current_version = max(versions, key=get_version_patch) + + # find all the versions that are the same major, minor, and patch + same_versions = [v for v in versions if v == current_version] + + # Now if there are prereleases, we want the one with a max prerelease number + if len(same_versions) > 1: + released_versions = [ver for ver in same_versions if ver.prerelease == 0] + + # If there is a released version, then let's go with this. + if len(release_versions) > 1: + return release_versions[0] + else: + return max(same_versions, key=lambda x: x.prerelease) + else: + return same_versions[0] + +def get_version_base(): + path = os.path.join(BASE_PATH, "version.txt") + + with open(path) as file: + line = file.readline().rstrip() + return line + +def str2bool(value): + if isinstance(value, bool): + return value + if value.lower() in {'true', 'yes', '1'}: + return True + elif value.lower() in {'false', 'no', '0'}: + return False + else: + raise argparse.ArgumentTypeError('Boolean value expected (true/false).') + +def replace_line_with_regex(file_path, pattern, replace_text): + """ + Replace lines matching a regex pattern with replace_text in the specified file. + + Args: + file_path (str): Path to the file to modify + pattern (re.Pattern): Regex pattern to match lines + replace_text (str): Text to replace the entire line with + """ + with open(file_path, 'r') as file: + content = file.read() + + # Use regex to replace lines matching the pattern + new_content = pattern.sub(replace_text + '\n', content) + + with open(file_path, 'w') as file: + file.write(new_content) + + print(f"Regex replacement complete in {file_path}") + +def update_cargo_file(version): + replace_line_with_regex("Cargo.toml", re.compile(r'^\s*version\s*=\s*".*"$'), f'version = "{version.to_tag_string()}"') + +def update_pyproject_file(version): + replace_line_with_regex("pyproject.toml", re.compile(r'^\s*version\s*=\s*".*"$'), f'version = "{version.to_python_string()}"') + +if __name__ == "__main__": + parser = argparse.ArgumentParser( + prog='semver', + description='Manages the semantic versioning of the projects', + epilog='This is the epilog' + ) + + parser.add_argument("-i", "--patch", type=str2bool, required=False, default=False, help="Increment the patch version") + parser.add_argument("-p", "--prerelease", type=str2bool, required=False, default=False, help="Include the fact that this is a prerelease version") + parser.add_argument("-w", "--write", type=str2bool, required=False, default=False, help="Update the various tools in this repository") + args = parser.parse_args() + + version_base = get_version_base() + + if args.patch: + version = get_current_version(version_base) + + if version is None: + version = Version(version_base) + else: + version.patch += 1 + + else: + version = get_current_version(version_base) + + if args.prerelease: + version.prerelease += 1 + + if args.write: + update_cargo_file(version) + update_pyproject_file(version) + + # Do a cargo build to update the lock file + os.system("cargo build") + else: + print(version.to_tag_string(), end='', flush=True) diff --git a/version.txt b/version.txt new file mode 100644 index 00000000..be586341 --- /dev/null +++ b/version.txt @@ -0,0 +1 @@ +0.3 From c7aa1e2391e31b9e1cb5fcdaaa639268f553e720 Mon Sep 17 00:00:00 2001 From: Brad Heller Date: Wed, 7 May 2025 15:04:41 +0200 Subject: [PATCH 15/25] chore: Downgrade to release candidate --- Cargo.lock | 16 ++++++++-------- Cargo.toml | 3 ++- pyproject.toml | 3 ++- scripts/semver.py | 7 +++++-- 4 files changed, 17 insertions(+), 12 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index ca4de8d3..9688a2ee 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -310,7 +310,7 @@ dependencies = [ [[package]] name = "config" -version = "0.3.12" +version = "0.3.12-prerelease.1" dependencies = [ "chrono", "clap", @@ -382,7 +382,7 @@ dependencies = [ [[package]] name = "crypto" -version = "0.3.12" +version = "0.3.12-prerelease.1" dependencies = [ "base64", "pem", @@ -2425,7 +2425,7 @@ dependencies = [ [[package]] name = "testutils" -version = "0.3.12" +version = "0.3.12-prerelease.1" dependencies = [ "pem", "rsa", @@ -2629,7 +2629,7 @@ dependencies = [ [[package]] name = "tower" -version = "0.3.12" +version = "0.3.12-prerelease.1" dependencies = [ "tokio", "tower-api", @@ -2666,7 +2666,7 @@ dependencies = [ [[package]] name = "tower-cmd" -version = "0.3.12" +version = "0.3.12-prerelease.1" dependencies = [ "anyhow", "bytes", @@ -2705,7 +2705,7 @@ checksum = "121c2a6cda46980bb0fcd1647ffaf6cd3fc79a013de288782836f6df9c48780e" [[package]] name = "tower-package" -version = "0.3.12" +version = "0.3.12-prerelease.1" dependencies = [ "async-compression", "config", @@ -2724,7 +2724,7 @@ dependencies = [ [[package]] name = "tower-runtime" -version = "0.3.12" +version = "0.3.12-prerelease.1" dependencies = [ "chrono", "log", @@ -2741,7 +2741,7 @@ checksum = "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3" [[package]] name = "tower-version" -version = "0.3.12" +version = "0.3.12-prerelease.1" dependencies = [ "anyhow", "chrono", diff --git a/Cargo.toml b/Cargo.toml index 2d570ed9..1d56814d 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -4,7 +4,8 @@ resolver = "2" [workspace.package] edition = "2021" -version = "0.3.12" +version = "0.3.12-prerelease.1" + description = "Tower is the best way to host Python data apps in production" rust-version = "1.77" authors = ["Brad Heller "] diff --git a/pyproject.toml b/pyproject.toml index d4811ae6..ed29dd5a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,8 @@ build-backend = "maturin" [project] name = "tower" -version = "0.3.12" +version = "0.3.12rc1" + description = "Tower CLI and runtime environment for Tower." authors = [{ name = "Tower Computing Inc.", email = "brad@tower.dev" }] readme = "README.md" diff --git a/scripts/semver.py b/scripts/semver.py index 6e85fec7..6e661eb3 100755 --- a/scripts/semver.py +++ b/scripts/semver.py @@ -128,6 +128,7 @@ def replace_line_with_regex(file_path, pattern, replace_text): # Use regex to replace lines matching the pattern new_content = pattern.sub(replace_text + '\n', content) + print(new_content) with open(file_path, 'w') as file: file.write(new_content) @@ -135,10 +136,12 @@ def replace_line_with_regex(file_path, pattern, replace_text): print(f"Regex replacement complete in {file_path}") def update_cargo_file(version): - replace_line_with_regex("Cargo.toml", re.compile(r'^\s*version\s*=\s*".*"$'), f'version = "{version.to_tag_string()}"') + pattern = re.compile(r'^\s*version\s*=\s*".*"$', re.MULTILINE) + replace_line_with_regex("Cargo.toml", pattern, f'version = "{version.to_tag_string()}"') def update_pyproject_file(version): - replace_line_with_regex("pyproject.toml", re.compile(r'^\s*version\s*=\s*".*"$'), f'version = "{version.to_python_string()}"') + pattern = re.compile(r'^\s*version\s*=\s*".*"$', re.MULTILINE) + replace_line_with_regex("pyproject.toml", pattern, f'version = "{version.to_python_string()}"') if __name__ == "__main__": parser = argparse.ArgumentParser( From 3b033756963487944aa8b7227d7721178da32881 Mon Sep 17 00:00:00 2001 From: Brad Heller Date: Wed, 7 May 2025 15:08:35 +0200 Subject: [PATCH 16/25] chore: Updates to better support versioning --- scripts/semver.py | 11 ++++++++--- uv.lock | 2 +- 2 files changed, 9 insertions(+), 4 deletions(-) diff --git a/scripts/semver.py b/scripts/semver.py index 6e661eb3..84f8ff35 100755 --- a/scripts/semver.py +++ b/scripts/semver.py @@ -6,7 +6,7 @@ BASE_PATH = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) -SEMVER_EXP = re.compile("\d+\.\d+(\.\d+(-prerelease\.(\d+))?)?") +SEMVER_EXP = re.compile("\d+\.\d+(\.\d+)?(-prerelease\.(\d+))?") class Version: def __init__(self, version_str): @@ -19,10 +19,14 @@ def __init__(self, version_str): self.minor = int(parts[1]) if len(parts) > 2: - self.patch = int(parts[2]) + if "-prerelease" in parts[2]: + prerelease_parts = parts[2].split("-prerelease") + self.patch = int(prerelease_parts[0]) + else: + self.patch = int(parts[2]) if len(parts) > 3: - self.prerelease = int(parts[3].split(".")[1]) + self.prerelease = int(parts[3]) else: self.prerelease = 0 else: @@ -177,5 +181,6 @@ def update_pyproject_file(version): # Do a cargo build to update the lock file os.system("cargo build") + os.system("uv lock") else: print(version.to_tag_string(), end='', flush=True) diff --git a/uv.lock b/uv.lock index d6b5fe8a..f92e42b3 100644 --- a/uv.lock +++ b/uv.lock @@ -1176,7 +1176,7 @@ wheels = [ [[package]] name = "tower" -version = "0.3.12" +version = "0.3.12rc1" source = { editable = "." } dependencies = [ { name = "attrs" }, From a7d4d9946ec977fa3c6660020f66d40a7143f64d Mon Sep 17 00:00:00 2001 From: Brad Heller Date: Wed, 7 May 2025 15:21:33 +0200 Subject: [PATCH 17/25] chore: Try different -rc specifier --- Cargo.lock | 16 ++++++++-------- Cargo.toml | 4 +++- pyproject.toml | 2 ++ scripts/semver.py | 8 ++++---- 4 files changed, 17 insertions(+), 13 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 9688a2ee..10fd60a8 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -310,7 +310,7 @@ dependencies = [ [[package]] name = "config" -version = "0.3.12-prerelease.1" +version = "0.3.12-rc.1" dependencies = [ "chrono", "clap", @@ -382,7 +382,7 @@ dependencies = [ [[package]] name = "crypto" -version = "0.3.12-prerelease.1" +version = "0.3.12-rc.1" dependencies = [ "base64", "pem", @@ -2425,7 +2425,7 @@ dependencies = [ [[package]] name = "testutils" -version = "0.3.12-prerelease.1" +version = "0.3.12-rc.1" dependencies = [ "pem", "rsa", @@ -2629,7 +2629,7 @@ dependencies = [ [[package]] name = "tower" -version = "0.3.12-prerelease.1" +version = "0.3.12-rc.1" dependencies = [ "tokio", "tower-api", @@ -2666,7 +2666,7 @@ dependencies = [ [[package]] name = "tower-cmd" -version = "0.3.12-prerelease.1" +version = "0.3.12-rc.1" dependencies = [ "anyhow", "bytes", @@ -2705,7 +2705,7 @@ checksum = "121c2a6cda46980bb0fcd1647ffaf6cd3fc79a013de288782836f6df9c48780e" [[package]] name = "tower-package" -version = "0.3.12-prerelease.1" +version = "0.3.12-rc.1" dependencies = [ "async-compression", "config", @@ -2724,7 +2724,7 @@ dependencies = [ [[package]] name = "tower-runtime" -version = "0.3.12-prerelease.1" +version = "0.3.12-rc.1" dependencies = [ "chrono", "log", @@ -2741,7 +2741,7 @@ checksum = "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3" [[package]] name = "tower-version" -version = "0.3.12-prerelease.1" +version = "0.3.12-rc.1" dependencies = [ "anyhow", "chrono", diff --git a/Cargo.toml b/Cargo.toml index 1d56814d..743fafe9 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -4,7 +4,9 @@ resolver = "2" [workspace.package] edition = "2021" -version = "0.3.12-prerelease.1" +version = "0.3.12-rc.1" + + description = "Tower is the best way to host Python data apps in production" rust-version = "1.77" diff --git a/pyproject.toml b/pyproject.toml index ed29dd5a..0dd5e2cb 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -6,6 +6,8 @@ build-backend = "maturin" name = "tower" version = "0.3.12rc1" + + description = "Tower CLI and runtime environment for Tower." authors = [{ name = "Tower Computing Inc.", email = "brad@tower.dev" }] readme = "README.md" diff --git a/scripts/semver.py b/scripts/semver.py index 84f8ff35..fb6b281a 100755 --- a/scripts/semver.py +++ b/scripts/semver.py @@ -6,7 +6,7 @@ BASE_PATH = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) -SEMVER_EXP = re.compile("\d+\.\d+(\.\d+)?(-prerelease\.(\d+))?") +SEMVER_EXP = re.compile("\d+\.\d+(\.\d+)?(-rc\.(\d+))?") class Version: def __init__(self, version_str): @@ -19,8 +19,8 @@ def __init__(self, version_str): self.minor = int(parts[1]) if len(parts) > 2: - if "-prerelease" in parts[2]: - prerelease_parts = parts[2].split("-prerelease") + if "-rc" in parts[2]: + prerelease_parts = parts[2].split("-rc") self.patch = int(prerelease_parts[0]) else: self.patch = int(parts[2]) @@ -49,7 +49,7 @@ def __eq__(self, other): def to_tag_string(self): if self.prerelease > 0: - return "{major}.{minor}.{patch}-prerelease.{prerelease}".format(major=self.major, minor=self.minor, patch=self.patch, prerelease=self.prerelease) + return "{major}.{minor}.{patch}-rc.{prerelease}".format(major=self.major, minor=self.minor, patch=self.patch, prerelease=self.prerelease) else: return "{major}.{minor}.{patch}".format(major=self.major, minor=self.minor, patch=self.patch) From a0b9b2862155a2cf0a20b6312012df590da896cf Mon Sep 17 00:00:00 2001 From: Brad Heller Date: Wed, 7 May 2025 15:33:59 +0200 Subject: [PATCH 18/25] chore: Add a tool for removing prerelease designation --- scripts/semver.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/scripts/semver.py b/scripts/semver.py index fb6b281a..0de6b1cf 100755 --- a/scripts/semver.py +++ b/scripts/semver.py @@ -156,6 +156,7 @@ def update_pyproject_file(version): parser.add_argument("-i", "--patch", type=str2bool, required=False, default=False, help="Increment the patch version") parser.add_argument("-p", "--prerelease", type=str2bool, required=False, default=False, help="Include the fact that this is a prerelease version") + parser.add_argument("-p", "--release", type=str2bool, required=False, default=False, help="Remove the perelease designation") parser.add_argument("-w", "--write", type=str2bool, required=False, default=False, help="Update the various tools in this repository") args = parser.parse_args() @@ -172,7 +173,9 @@ def update_pyproject_file(version): else: version = get_current_version(version_base) - if args.prerelease: + if args.release: + version.prerelease = 0 + elif args.prerelease: version.prerelease += 1 if args.write: From 1d0d619a2fe05985b0aae66d6fb8505ce817274b Mon Sep 17 00:00:00 2001 From: Brad Heller Date: Wed, 7 May 2025 15:48:35 +0200 Subject: [PATCH 19/25] chore: Make sure we publish prerelease versions --- dist-workspace.toml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/dist-workspace.toml b/dist-workspace.toml index 552c59f2..6d9575dd 100644 --- a/dist-workspace.toml +++ b/dist-workspace.toml @@ -25,6 +25,8 @@ create-release = true pr-run-mode = "skip" # Local artifacts jobs to run in CI local-artifacts-jobs = ["./build-binaries"] +# Make sure to publish prerelease versions +publish-prereleases = true [dist.github-custom-runners] global = "ubuntu-22.04" From 3437d51aeb9a9998d599dbfacba41f66d18f1f91 Mon Sep 17 00:00:00 2001 From: Brad Heller Date: Wed, 7 May 2025 15:49:27 +0200 Subject: [PATCH 20/25] chore: Bump version to v0.3.12-rc.2 --- Cargo.lock | 16 ++++++++-------- Cargo.toml | 3 ++- pyproject.toml | 3 ++- scripts/semver.py | 5 +---- uv.lock | 2 +- 5 files changed, 14 insertions(+), 15 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 10fd60a8..ed12b897 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -310,7 +310,7 @@ dependencies = [ [[package]] name = "config" -version = "0.3.12-rc.1" +version = "0.3.12-rc.2" dependencies = [ "chrono", "clap", @@ -382,7 +382,7 @@ dependencies = [ [[package]] name = "crypto" -version = "0.3.12-rc.1" +version = "0.3.12-rc.2" dependencies = [ "base64", "pem", @@ -2425,7 +2425,7 @@ dependencies = [ [[package]] name = "testutils" -version = "0.3.12-rc.1" +version = "0.3.12-rc.2" dependencies = [ "pem", "rsa", @@ -2629,7 +2629,7 @@ dependencies = [ [[package]] name = "tower" -version = "0.3.12-rc.1" +version = "0.3.12-rc.2" dependencies = [ "tokio", "tower-api", @@ -2666,7 +2666,7 @@ dependencies = [ [[package]] name = "tower-cmd" -version = "0.3.12-rc.1" +version = "0.3.12-rc.2" dependencies = [ "anyhow", "bytes", @@ -2705,7 +2705,7 @@ checksum = "121c2a6cda46980bb0fcd1647ffaf6cd3fc79a013de288782836f6df9c48780e" [[package]] name = "tower-package" -version = "0.3.12-rc.1" +version = "0.3.12-rc.2" dependencies = [ "async-compression", "config", @@ -2724,7 +2724,7 @@ dependencies = [ [[package]] name = "tower-runtime" -version = "0.3.12-rc.1" +version = "0.3.12-rc.2" dependencies = [ "chrono", "log", @@ -2741,7 +2741,7 @@ checksum = "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3" [[package]] name = "tower-version" -version = "0.3.12-rc.1" +version = "0.3.12-rc.2" dependencies = [ "anyhow", "chrono", diff --git a/Cargo.toml b/Cargo.toml index 743fafe9..a13ff187 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -4,7 +4,8 @@ resolver = "2" [workspace.package] edition = "2021" -version = "0.3.12-rc.1" +version = "0.3.12-rc.2" + diff --git a/pyproject.toml b/pyproject.toml index 0dd5e2cb..5f597d41 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,8 @@ build-backend = "maturin" [project] name = "tower" -version = "0.3.12rc1" +version = "0.3.12rc2" + diff --git a/scripts/semver.py b/scripts/semver.py index 0de6b1cf..e54cacc8 100755 --- a/scripts/semver.py +++ b/scripts/semver.py @@ -132,12 +132,9 @@ def replace_line_with_regex(file_path, pattern, replace_text): # Use regex to replace lines matching the pattern new_content = pattern.sub(replace_text + '\n', content) - print(new_content) with open(file_path, 'w') as file: file.write(new_content) - - print(f"Regex replacement complete in {file_path}") def update_cargo_file(version): pattern = re.compile(r'^\s*version\s*=\s*".*"$', re.MULTILINE) @@ -156,7 +153,7 @@ def update_pyproject_file(version): parser.add_argument("-i", "--patch", type=str2bool, required=False, default=False, help="Increment the patch version") parser.add_argument("-p", "--prerelease", type=str2bool, required=False, default=False, help="Include the fact that this is a prerelease version") - parser.add_argument("-p", "--release", type=str2bool, required=False, default=False, help="Remove the perelease designation") + parser.add_argument("-r", "--release", type=str2bool, required=False, default=False, help="Remove the perelease designation") parser.add_argument("-w", "--write", type=str2bool, required=False, default=False, help="Update the various tools in this repository") args = parser.parse_args() diff --git a/uv.lock b/uv.lock index f92e42b3..453ef9d3 100644 --- a/uv.lock +++ b/uv.lock @@ -1176,7 +1176,7 @@ wheels = [ [[package]] name = "tower" -version = "0.3.12rc1" +version = "0.3.12rc2" source = { editable = "." } dependencies = [ { name = "attrs" }, From 254cd037ecc6202eb7e913f91bf0884cadbd923a Mon Sep 17 00:00:00 2001 From: Brad Heller Date: Thu, 8 May 2025 09:48:27 +0200 Subject: [PATCH 21/25] chore: Update `cargo dist` to allow releasing prereleases --- .github/workflows/release.yml | 10 ++++++++-- dist-workspace.toml | 4 ++-- 2 files changed, 10 insertions(+), 4 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 6fe96573..b242161e 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -1,6 +1,7 @@ -# This file was autogenerated by dist: https://opensource.axo.dev/cargo-dist/ +# This file was autogenerated by dist: https://github.com/astral-sh/cargo-dist # # Copyright 2022-2024, axodotdev +# Copyright 2025 Astral Software Inc. # SPDX-License-Identifier: MIT or Apache-2.0 # # CI that: @@ -57,12 +58,13 @@ jobs: steps: - uses: actions/checkout@v4 with: + persist-credentials: false submodules: recursive - name: Install dist # we specify bash to get pipefail; it guards against the `curl` command # failing. otherwise `sh` won't catch that `curl` returned non-0 shell: bash - run: "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/axodotdev/cargo-dist/releases/download/v0.28.0/cargo-dist-installer.sh | sh" + run: "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/astral-sh/cargo-dist/releases/download/v0.28.5/cargo-dist-installer.sh | sh" - name: Cache dist uses: actions/upload-artifact@v4 with: @@ -116,6 +118,7 @@ jobs: git config --global core.longpaths true - uses: actions/checkout@v4 with: + persist-credentials: false submodules: recursive - name: Install Rust non-interactively if not already installed if: ${{ matrix.container }} @@ -184,6 +187,7 @@ jobs: steps: - uses: actions/checkout@v4 with: + persist-credentials: false submodules: recursive - name: Install cached dist uses: actions/download-artifact@v4 @@ -234,6 +238,7 @@ jobs: steps: - uses: actions/checkout@v4 with: + persist-credentials: false submodules: recursive - name: Install cached dist uses: actions/download-artifact@v4 @@ -313,4 +318,5 @@ jobs: steps: - uses: actions/checkout@v4 with: + persist-credentials: false submodules: recursive diff --git a/dist-workspace.toml b/dist-workspace.toml index 6d9575dd..f6498c4e 100644 --- a/dist-workspace.toml +++ b/dist-workspace.toml @@ -4,7 +4,7 @@ members = ["cargo:."] # Config for 'dist' [dist] # The preferred dist version to use in CI (Cargo.toml SemVer syntax) -cargo-dist-version = "0.28.0" +cargo-dist-version = "0.28.5" # CI backends to support ci = "github" # The installers to generate for each app @@ -25,7 +25,7 @@ create-release = true pr-run-mode = "skip" # Local artifacts jobs to run in CI local-artifacts-jobs = ["./build-binaries"] -# Make sure to publish prerelease versions +# Whether to publish prereleases to package managers publish-prereleases = true [dist.github-custom-runners] From a10372da8fa9d6ee7eaa715087eadc4742464cce Mon Sep 17 00:00:00 2001 From: Brad Heller Date: Thu, 8 May 2025 09:49:46 +0200 Subject: [PATCH 22/25] chore: Semver bug --- scripts/semver.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/semver.py b/scripts/semver.py index e54cacc8..d708d956 100755 --- a/scripts/semver.py +++ b/scripts/semver.py @@ -94,8 +94,8 @@ def get_current_version(base): released_versions = [ver for ver in same_versions if ver.prerelease == 0] # If there is a released version, then let's go with this. - if len(release_versions) > 1: - return release_versions[0] + if len(released_versions) > 1: + return released_versions[0] else: return max(same_versions, key=lambda x: x.prerelease) else: From 2153a3fe9c157409d84470528378a161dad28764 Mon Sep 17 00:00:00 2001 From: Brad Heller Date: Thu, 8 May 2025 10:38:48 +0200 Subject: [PATCH 23/25] chore: Bump to version v0.3.12-rc.3 --- Cargo.lock | 16 ++++++++-------- Cargo.toml | 6 +----- dist-workspace.toml | 2 +- pyproject.toml | 3 ++- uv.lock | 2 +- 5 files changed, 13 insertions(+), 16 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index ed12b897..ea020e2a 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -310,7 +310,7 @@ dependencies = [ [[package]] name = "config" -version = "0.3.12-rc.2" +version = "0.3.12-rc.3" dependencies = [ "chrono", "clap", @@ -382,7 +382,7 @@ dependencies = [ [[package]] name = "crypto" -version = "0.3.12-rc.2" +version = "0.3.12-rc.3" dependencies = [ "base64", "pem", @@ -2425,7 +2425,7 @@ dependencies = [ [[package]] name = "testutils" -version = "0.3.12-rc.2" +version = "0.3.12-rc.3" dependencies = [ "pem", "rsa", @@ -2629,7 +2629,7 @@ dependencies = [ [[package]] name = "tower" -version = "0.3.12-rc.2" +version = "0.3.12-rc.3" dependencies = [ "tokio", "tower-api", @@ -2666,7 +2666,7 @@ dependencies = [ [[package]] name = "tower-cmd" -version = "0.3.12-rc.2" +version = "0.3.12-rc.3" dependencies = [ "anyhow", "bytes", @@ -2705,7 +2705,7 @@ checksum = "121c2a6cda46980bb0fcd1647ffaf6cd3fc79a013de288782836f6df9c48780e" [[package]] name = "tower-package" -version = "0.3.12-rc.2" +version = "0.3.12-rc.3" dependencies = [ "async-compression", "config", @@ -2724,7 +2724,7 @@ dependencies = [ [[package]] name = "tower-runtime" -version = "0.3.12-rc.2" +version = "0.3.12-rc.3" dependencies = [ "chrono", "log", @@ -2741,7 +2741,7 @@ checksum = "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3" [[package]] name = "tower-version" -version = "0.3.12-rc.2" +version = "0.3.12-rc.3" dependencies = [ "anyhow", "chrono", diff --git a/Cargo.toml b/Cargo.toml index a13ff187..10c20c68 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -4,11 +4,7 @@ resolver = "2" [workspace.package] edition = "2021" -version = "0.3.12-rc.2" - - - - +version = "0.3.12-rc.3" description = "Tower is the best way to host Python data apps in production" rust-version = "1.77" authors = ["Brad Heller "] diff --git a/dist-workspace.toml b/dist-workspace.toml index f6498c4e..46c93efe 100644 --- a/dist-workspace.toml +++ b/dist-workspace.toml @@ -12,7 +12,7 @@ installers = ["shell", "homebrew", "msi"] # A GitHub repo to push Homebrew formulas to tap = "tower/tower-cli" # Target platforms to build apps for (Rust target-triple syntax) -targets = ["aarch64-apple-darwin", "x86_64-apple-darwin", "x86_64-unknown-linux-gnu", "x86_64-pc-windows-msvc"] +targets = ["aarch64-apple-darwin", "aarch64-unknown-linux-gnu", "aarch64-pc-windows-msvc", "x86_64-apple-darwin", "x86_64-unknown-linux-gnu", "x86_64-unknown-linux-musl", "x86_64-pc-windows-msvc"] # Path that installers should place binaries in install-path = "CARGO_HOME" # Publish jobs to run in CI diff --git a/pyproject.toml b/pyproject.toml index 5f597d41..c090d571 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,8 @@ build-backend = "maturin" [project] name = "tower" -version = "0.3.12rc2" +version = "0.3.12rc3" + diff --git a/uv.lock b/uv.lock index 453ef9d3..1e3dd940 100644 --- a/uv.lock +++ b/uv.lock @@ -1176,7 +1176,7 @@ wheels = [ [[package]] name = "tower" -version = "0.3.12rc2" +version = "0.3.12rc3" source = { editable = "." } dependencies = [ { name = "attrs" }, From 3bd732381ed673e3e57f78b2e26c9e30cf56d92d Mon Sep 17 00:00:00 2001 From: Brad Heller Date: Thu, 8 May 2025 10:55:20 +0200 Subject: [PATCH 24/25] chore: Remove Windows targets for now --- dist-workspace.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dist-workspace.toml b/dist-workspace.toml index 46c93efe..48b312c9 100644 --- a/dist-workspace.toml +++ b/dist-workspace.toml @@ -12,7 +12,7 @@ installers = ["shell", "homebrew", "msi"] # A GitHub repo to push Homebrew formulas to tap = "tower/tower-cli" # Target platforms to build apps for (Rust target-triple syntax) -targets = ["aarch64-apple-darwin", "aarch64-unknown-linux-gnu", "aarch64-pc-windows-msvc", "x86_64-apple-darwin", "x86_64-unknown-linux-gnu", "x86_64-unknown-linux-musl", "x86_64-pc-windows-msvc"] +targets = ["aarch64-apple-darwin", "aarch64-unknown-linux-gnu", "x86_64-apple-darwin", "x86_64-unknown-linux-gnu", "x86_64-unknown-linux-musl"] # Path that installers should place binaries in install-path = "CARGO_HOME" # Publish jobs to run in CI From d0c30a6a86d760402aa8e35f1a8082a0a63a7e2f Mon Sep 17 00:00:00 2001 From: Brad Heller Date: Thu, 8 May 2025 11:18:08 +0200 Subject: [PATCH 25/25] chore: Bump version to v0.3.12 --- Cargo.lock | 16 ++++++++-------- Cargo.toml | 3 ++- pyproject.toml | 3 ++- uv.lock | 2 +- 4 files changed, 13 insertions(+), 11 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index ea020e2a..ca4de8d3 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -310,7 +310,7 @@ dependencies = [ [[package]] name = "config" -version = "0.3.12-rc.3" +version = "0.3.12" dependencies = [ "chrono", "clap", @@ -382,7 +382,7 @@ dependencies = [ [[package]] name = "crypto" -version = "0.3.12-rc.3" +version = "0.3.12" dependencies = [ "base64", "pem", @@ -2425,7 +2425,7 @@ dependencies = [ [[package]] name = "testutils" -version = "0.3.12-rc.3" +version = "0.3.12" dependencies = [ "pem", "rsa", @@ -2629,7 +2629,7 @@ dependencies = [ [[package]] name = "tower" -version = "0.3.12-rc.3" +version = "0.3.12" dependencies = [ "tokio", "tower-api", @@ -2666,7 +2666,7 @@ dependencies = [ [[package]] name = "tower-cmd" -version = "0.3.12-rc.3" +version = "0.3.12" dependencies = [ "anyhow", "bytes", @@ -2705,7 +2705,7 @@ checksum = "121c2a6cda46980bb0fcd1647ffaf6cd3fc79a013de288782836f6df9c48780e" [[package]] name = "tower-package" -version = "0.3.12-rc.3" +version = "0.3.12" dependencies = [ "async-compression", "config", @@ -2724,7 +2724,7 @@ dependencies = [ [[package]] name = "tower-runtime" -version = "0.3.12-rc.3" +version = "0.3.12" dependencies = [ "chrono", "log", @@ -2741,7 +2741,7 @@ checksum = "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3" [[package]] name = "tower-version" -version = "0.3.12-rc.3" +version = "0.3.12" dependencies = [ "anyhow", "chrono", diff --git a/Cargo.toml b/Cargo.toml index 10c20c68..1f635348 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -4,7 +4,8 @@ resolver = "2" [workspace.package] edition = "2021" -version = "0.3.12-rc.3" +version = "0.3.12" + description = "Tower is the best way to host Python data apps in production" rust-version = "1.77" authors = ["Brad Heller "] diff --git a/pyproject.toml b/pyproject.toml index c090d571..3fcad844 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,8 @@ build-backend = "maturin" [project] name = "tower" -version = "0.3.12rc3" +version = "0.3.12" + diff --git a/uv.lock b/uv.lock index 1e3dd940..d6b5fe8a 100644 --- a/uv.lock +++ b/uv.lock @@ -1176,7 +1176,7 @@ wheels = [ [[package]] name = "tower" -version = "0.3.12rc3" +version = "0.3.12" source = { editable = "." } dependencies = [ { name = "attrs" },