Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -1,8 +1,7 @@
from datetime import date, datetime
from datetime import date
from typing import Any, Dict, List, Literal, Optional, Union, cast
from uuid import uuid1

import pandas as pd
from forestadmin.agent_toolkit.forest_logger import ForestLogger
from forestadmin.agent_toolkit.resources.collections.base_collection_resource import BaseCollectionResource
from forestadmin.agent_toolkit.resources.collections.decorators import (
Expand All @@ -16,20 +15,21 @@
from forestadmin.agent_toolkit.resources.context_variable_injector_mixin import ContextVariableInjectorResourceMixin
from forestadmin.agent_toolkit.utils.context import FileResponse, HttpResponseBuilder, Request, RequestMethod, Response
from forestadmin.datasource_toolkit.exceptions import ForbiddenError, ForestException
from forestadmin.datasource_toolkit.interfaces.query.aggregation import Aggregation
from forestadmin.datasource_toolkit.interfaces.query.aggregation import Aggregation, DateOperation
from forestadmin.datasource_toolkit.interfaces.query.condition_tree.nodes.base import ConditionTree
from forestadmin.datasource_toolkit.interfaces.query.condition_tree.nodes.branch import Aggregator, ConditionTreeBranch
from forestadmin.datasource_toolkit.interfaces.query.condition_tree.nodes.leaf import ConditionTreeLeaf
from forestadmin.datasource_toolkit.interfaces.query.filter.factory import FilterFactory
from forestadmin.datasource_toolkit.interfaces.query.filter.unpaginated import Filter
from forestadmin.datasource_toolkit.utils.date_utils import (
DATE_OPERATION_STR_FORMAT_FN,
make_formatted_date_range,
parse_date,
)
from forestadmin.datasource_toolkit.utils.schema import SchemaUtils


class StatsResource(BaseCollectionResource, ContextVariableInjectorResourceMixin):
FREQUENCIES = {"Day": "d", "Week": "W-MON", "Month": "BMS", "Year": "BYS"}

FORMAT = {"Day": "%d/%m/%Y", "Week": "W%V-%G", "Month": "%b %Y", "Year": "%Y"}

def stats_method(self, type: str):
return {
"Value": self.value,
Expand Down Expand Up @@ -135,12 +135,13 @@ async def line(self, request: RequestCollection) -> Response:
if key not in request.body:
raise ForestException(f"The parameter {key} is not defined")

date_operation = DateOperation(request.body["timeRange"])
current_filter = await self._get_filter(request)
aggregation = Aggregation(
{
"operation": request.body["aggregator"],
"field": request.body.get("aggregateFieldName"),
"groups": [{"field": request.body["groupByFieldName"], "operation": request.body["timeRange"]}],
"groups": [{"field": request.body["groupByFieldName"], "operation": date_operation}],
}
)
rows = await request.collection.aggregate(request.user, current_filter, aggregation)
Expand All @@ -149,34 +150,23 @@ async def line(self, request: RequestCollection) -> Response:
for row in rows:
label = row["group"][request.body["groupByFieldName"]]
if label is not None:
if isinstance(label, str):
label = datetime.fromisoformat(label).date()
elif isinstance(label, datetime):
label = label.date()
elif isinstance(label, date):
pass
else:
ForestLogger.log(
"warning",
f"The time chart label type must be 'str' or 'date', not {type(label)}. Skipping this record.",
)
label = parse_date(label)
dates.append(label)
values_label[label.strftime(self.FORMAT[request.body["timeRange"]])] = row["value"]
values_label[DATE_OPERATION_STR_FORMAT_FN[date_operation](label)] = row["value"]

dates.sort()
end = dates[-1]
start = dates[0]
data_points: List[Dict[str, Union[date, Dict[str, int]]]] = []
for dt in pd.date_range( # type: ignore
start=start, end=end, freq=self.FREQUENCIES[request.body["timeRange"]]
).to_pydatetime():
label = dt.strftime(self.FORMAT[request.body["timeRange"]])
data_points: List[Dict[str, Union[date, Dict[str, int], str]]] = []

for label in make_formatted_date_range(start, end, date_operation):
data_points.append(
{
"label": label,
"values": {"value": values_label.get(label, 0)},
}
)

return self._build_success_response(data_points)

@check_method(RequestMethod.POST)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -119,7 +119,9 @@ def is_checkbox_group_field(
return field is not None and field.get("widget", "") == "CheckboxGroup"

@staticmethod
def is_dropdown_field(field: ActionField) -> TypeGuard[
def is_dropdown_field(
field: ActionField,
) -> TypeGuard[
Union[
DropdownDynamicSearchFieldConfiguration[str],
DropdownDynamicSearchFieldConfiguration[int],
Expand All @@ -129,7 +131,9 @@ def is_dropdown_field(field: ActionField) -> TypeGuard[
return field is not None and field.get("widget", "") == "Dropdown"

@staticmethod
def is_user_dropdown_field(field: ActionField) -> TypeGuard[
def is_user_dropdown_field(
field: ActionField,
) -> TypeGuard[
Union[
PlainStringListDynamicFieldUserDropdownFieldConfiguration,
PlainStringDynamicFieldUserDropdownFieldConfiguration,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@


class FrontendFilterableUtils:

@classmethod
def is_filterable(cls, operators: Set[Operator]) -> bool:
return operators is not None and len(operators) > 0
14 changes: 0 additions & 14 deletions src/agent_toolkit/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -23,21 +23,7 @@ pyjwt = "^2"
cachetools = "~=5.2"
sseclient-py = "^1.5"
forestadmin-datasource-toolkit = "1.22.11"
[[tool.poetry.dependencies.pandas]]
version = ">=1.4.0"
python = "<3.13.0"

[[tool.poetry.dependencies.pandas]]
version = ">=2.2.3"
python = ">=3.13.0"

[[tool.poetry.dependencies.numpy]]
python = ">=3.8.0,<3.12"
version = ">=1.24.0"

[[tool.poetry.dependencies.numpy]]
python = ">=3.13"
version = ">=1.3.0"

[tool.poetry.dependencies."backports.zoneinfo"]
version = "~0.2.1"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -686,6 +686,54 @@ def test_line_should_return_chart_with_month_filter(self):
{"label": "Feb 2022", "values": {"value": 15}},
)

def test_line_should_return_chart_with_quarter_filter(self):
request = self.mk_request("Quarter")
with patch.object(
self.book_collection,
"aggregate",
new_callable=AsyncMock,
return_value=[
{"value": 10, "group": {"date": "2022-03-31 00:00:00"}},
{"value": 20, "group": {"date": "2022-06-30 00:00:00"}},
{"value": 30, "group": {"date": "2022-09-30 00:00:00"}},
{"value": 40, "group": {"date": "2022-12-31 00:00:00"}},
],
):
response = self.loop.run_until_complete(self.stat_resource.line(request))

content_body = json.loads(response.body)
self.assertEqual(response.status, 200)
self.assertEqual(content_body["data"]["type"], "stats")
self.assertEqual(len(content_body["data"]["attributes"]["value"]), 4)
self.assertEqual(content_body["data"]["attributes"]["value"][0], {"label": "Q1-2022", "values": {"value": 10}})
self.assertEqual(content_body["data"]["attributes"]["value"][1], {"label": "Q2-2022", "values": {"value": 20}})
self.assertEqual(content_body["data"]["attributes"]["value"][2], {"label": "Q3-2022", "values": {"value": 30}})
self.assertEqual(content_body["data"]["attributes"]["value"][3], {"label": "Q4-2022", "values": {"value": 40}})

def test_line_should_return_chart_with_quarter_filter_should_also_work_with_date_as_quarter_start(self):
request = self.mk_request("Quarter")
with patch.object(
self.book_collection,
"aggregate",
new_callable=AsyncMock,
return_value=[
{"value": 10, "group": {"date": "2022-01-01 00:00:00"}},
{"value": 20, "group": {"date": "2022-04-01 00:00:00"}},
{"value": 30, "group": {"date": "2022-07-01 00:00:00"}},
{"value": 40, "group": {"date": "2022-10-01 00:00:00"}},
],
):
response = self.loop.run_until_complete(self.stat_resource.line(request))

content_body = json.loads(response.body)
self.assertEqual(response.status, 200)
self.assertEqual(content_body["data"]["type"], "stats")
self.assertEqual(len(content_body["data"]["attributes"]["value"]), 4)
self.assertEqual(content_body["data"]["attributes"]["value"][0], {"label": "Q1-2022", "values": {"value": 10}})
self.assertEqual(content_body["data"]["attributes"]["value"][1], {"label": "Q2-2022", "values": {"value": 20}})
self.assertEqual(content_body["data"]["attributes"]["value"][2], {"label": "Q3-2022", "values": {"value": 30}})
self.assertEqual(content_body["data"]["attributes"]["value"][3], {"label": "Q4-2022", "values": {"value": 40}})

def test_line_should_return_chart_with_year_filter(self):
request = self.mk_request("Year")
with patch.object(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
from datetime import date, datetime
from typing import Any, Dict, List, Optional, Set, Tuple

import pandas as pd
from django.db import models
from forestadmin.datasource_django.exception import DjangoDatasourceException
from forestadmin.datasource_django.interface import BaseDjangoCollection
Expand Down Expand Up @@ -311,6 +312,7 @@ class DjangoQueryGroupByHelper:
DateOperation.DAY: "__day",
DateOperation.WEEK: "__week",
DateOperation.MONTH: "__month",
DateOperation.QUARTER: "__quarter",
DateOperation.YEAR: "__year",
}

Expand All @@ -331,6 +333,11 @@ def get_operation_suffixes(cls, group: PlainAggregationGroup) -> List[str]:
cls.DATE_OPERATION_SUFFIX_MAPPING[DateOperation.YEAR],
cls.DATE_OPERATION_SUFFIX_MAPPING[DateOperation.WEEK],
]
if group["operation"] == DateOperation.QUARTER:
return [
cls.DATE_OPERATION_SUFFIX_MAPPING[DateOperation.YEAR],
cls.DATE_OPERATION_SUFFIX_MAPPING[DateOperation.QUARTER],
]
if group["operation"] == DateOperation.DAY:
return [
cls.DATE_OPERATION_SUFFIX_MAPPING[DateOperation.YEAR],
Expand Down Expand Up @@ -380,5 +387,11 @@ def _make_date_from_record(cls, row: AggregateResult, date_field: str, date_oper
row_date = datetime.strptime(str_year_week + "-1", "%Y-W%W-%w")
return row_date.date()

if date_operation == DateOperation.QUARTER:
end_of_quarter_date = (
pd.Timestamp(row[f"{date_field}__year"], (row[f"{date_field}__quarter"] * 3), 1) + pd.offsets.MonthEnd()
)
return end_of_quarter_date.date()

if date_operation == DateOperation.DAY:
return date(row[f"{date_field}__year"], row[f"{date_field}__month"], row[f"{date_field}__day"])
7 changes: 7 additions & 0 deletions src/datasource_django/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,13 @@ typing-extensions = "~=4.2"
django = ">=3.2,<5.2"
forestadmin-datasource-toolkit = "1.22.11"
forestadmin-agent-toolkit = "1.22.11"
[[tool.poetry.dependencies.pandas]]
version = ">=1.4.0"
python = "<3.13.0"

[[tool.poetry.dependencies.pandas]]
version = ">=2.2.3"
python = ">=3.13.0"

[tool.pytest.ini_options]
DJANGO_SETTINGS_MODULE = "test_project_datasource.settings"
Expand Down
15 changes: 15 additions & 0 deletions src/datasource_django/tests/test_django_collection.py
Original file line number Diff line number Diff line change
Expand Up @@ -512,6 +512,21 @@ async def test_should_work_by_year(self):
],
)

async def test_should_work_by_quarter(self):
ret = await self.rating_collection.aggregate(
self.mocked_caller,
Filter({}),
Aggregation(
{
"operation": "Sum",
"field": "rating",
"groups": [{"field": "rated_at", "operation": DateOperation.QUARTER}],
}
),
)
self.assertIn({"value": 16, "group": {"rated_at": datetime.date(2023, 3, 31)}}, ret)
self.assertIn({"value": 1, "group": {"rated_at": datetime.date(2022, 12, 31)}}, ret)

async def test_should_work_by_month(self):
ret = await self.rating_collection.aggregate(
self.mocked_caller,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,9 +6,9 @@
from forestadmin.datasource_toolkit.exceptions import DatasourceToolkitException
from forestadmin.datasource_toolkit.interfaces.query.aggregation import Aggregation, Aggregator, DateOperation
from forestadmin.datasource_toolkit.interfaces.query.projections import Projection
from sqlalchemy import DATE, cast
from sqlalchemy import DATE, Integer, cast
from sqlalchemy import column as SqlAlchemyColumn
from sqlalchemy import func, text
from sqlalchemy import extract, func, text
from sqlalchemy.engine import Dialect


Expand Down Expand Up @@ -82,12 +82,22 @@ def build_group(
class DateAggregation:
@staticmethod
def build_postgres(column: SqlAlchemyColumn, operation: DateOperation) -> SqlAlchemyColumn:

return func.date_trunc(operation.value.lower(), column)

@staticmethod
def build_sqllite(column: SqlAlchemyColumn, operation: DateOperation) -> SqlAlchemyColumn:
def build_sqlite(column: SqlAlchemyColumn, operation: DateOperation) -> SqlAlchemyColumn:
if operation == DateOperation.WEEK:
return func.DATE(column, "weekday 1", "-7 days")
elif operation == DateOperation.QUARTER:
return func.date(
func.strftime("%Y", column)
+ "-"
+ func.printf("%02d", (func.floor((func.cast(func.strftime("%m", column), Integer) - 1) / 3) + 1) * 3)
+ "-01",
"+1 month",
"-1 day",
)
elif operation == DateOperation.YEAR:
format = "%Y-01-01"
elif operation == DateOperation.MONTH:
Expand All @@ -107,6 +117,15 @@ def build_mysql(column: SqlAlchemyColumn, operation: DateOperation) -> SqlAlchem
format = "%Y-%m-01"
elif operation == DateOperation.WEEK:
return cast(func.date_sub(column, text(f"INTERVAL(WEEKDAY({column})) DAY")), DATE)
elif operation == DateOperation.QUARTER:
return func.last_day(
func.str_to_date(
func.concat(
func.year(column), "-", func.lpad(func.ceiling(extract("month", column) / 3) * 3, 2, "0"), "-01"
),
"%Y-%m-%d",
)
)
elif operation == DateOperation.DAY:
format = "%Y-%m-%d"
else:
Expand All @@ -121,6 +140,14 @@ def build_mssql(column: SqlAlchemyColumn, operation: DateOperation) -> SqlAlchem
return func.datefromparts(func.extract("year", column), func.extract("month", column), "01")
elif operation == DateOperation.WEEK:
return cast(func.dateadd(text("day"), -func.extract("dw", column) + 2, column), DATE)
elif operation == DateOperation.QUARTER:
return func.eomonth(
func.datefromparts(
func.extract("YEAR", column),
func.datepart(text("QUARTER"), column) * text("3"),
text("1"),
)
)
elif operation == DateOperation.DAY:
return func.datefromparts(
func.extract("year", column),
Expand All @@ -131,7 +158,7 @@ def build_mssql(column: SqlAlchemyColumn, operation: DateOperation) -> SqlAlchem
@classmethod
def build(cls, dialect: Dialect, column: SqlAlchemyColumn, operation: DateOperation) -> SqlAlchemyColumn:
if dialect.name == "sqlite":
return cls.build_sqllite(column, operation)
return cls.build_sqlite(column, operation)
elif dialect.name in ["mysql", "mariadb"]:
return cls.build_mysql(column, operation)
elif dialect.name == "postgresql":
Expand Down
Loading