From b0387f053e386e28dd86710ad83278fd66c48f4e Mon Sep 17 00:00:00 2001 From: Mateo Date: Mon, 22 Dec 2025 08:33:24 +0100 Subject: [PATCH 01/55] compute cone during seq creation --- src/app/api/api_v1/endpoints/detections.py | 6 +- src/app/api/api_v1/endpoints/sequences.py | 76 +--------------------- src/app/models.py | 2 + src/app/schemas/sequences.py | 5 +- src/app/services/cones.py | 17 +++++ src/tests/conftest.py | 4 ++ src/tests/endpoints/test_sequences.py | 10 +-- 7 files changed, 35 insertions(+), 85 deletions(-) create mode 100644 src/app/services/cones.py diff --git a/src/app/api/api_v1/endpoints/detections.py b/src/app/api/api_v1/endpoints/detections.py index bd88daf9..f589221d 100644 --- a/src/app/api/api_v1/endpoints/detections.py +++ b/src/app/api/api_v1/endpoints/detections.py @@ -42,6 +42,7 @@ from app.schemas.login import TokenPayload from app.schemas.sequences import SequenceUpdate from app.services.slack import slack_client +from app.services.cones import resolve_cone from app.services.storage import s3_service, upload_file from app.services.telegram import telegram_client from app.services.telemetry import telemetry_client @@ -118,12 +119,16 @@ async def create_detection( ) if len(dets_) >= settings.SEQUENCE_MIN_INTERVAL_DETS: + camera = cast(Camera, await cameras.get(det.camera_id, strict=True)) + cone_azimuth, cone_angle = resolve_cone(det.azimuth, dets_[0].bboxes, camera.angle_of_view) # Create new sequence sequence_ = await sequences.create( Sequence( camera_id=token_payload.sub, pose_id=pose_id, azimuth=det.azimuth, + cone_azimuth=cone_azimuth, + cone_angle=cone_angle, started_at=dets_[0].created_at, last_seen_at=det.created_at, ) @@ -152,7 +157,6 @@ async def create_detection( if org.slack_hook: bucket = s3_service.get_bucket(s3_service.resolve_bucket_name(token_payload.organization_id)) url = bucket.get_public_url(det.bucket_key) - camera = cast(Camera, await cameras.get(det.camera_id, strict=True)) background_tasks.add_task( slack_client.notify, org.slack_hook, det.model_dump_json(), url, camera.name diff --git a/src/app/api/api_v1/endpoints/sequences.py b/src/app/api/api_v1/endpoints/sequences.py index c78c6170..13e708be 100644 --- a/src/app/api/api_v1/endpoints/sequences.py +++ b/src/app/api/api_v1/endpoints/sequences.py @@ -3,10 +3,8 @@ # This program is licensed under the Apache License 2.0. # See LICENSE or go to for full license details. -from ast import literal_eval from datetime import date, datetime, timedelta -from operator import itemgetter -from typing import Dict, List, Tuple, Union, cast +from typing import List, Union, cast from fastapi import APIRouter, Depends, HTTPException, Path, Query, Security, status from sqlmodel import func, select @@ -33,55 +31,6 @@ async def verify_org_rights( raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Access forbidden.") -def _resolve_cone(azimuth: float, bboxes_str: str, aov: float) -> Tuple[float, float]: - bboxes = literal_eval(bboxes_str) - # Take the bbox with the highest confidence - xmin, _, xmax, _, _ = max(bboxes, key=itemgetter(2)) - return azimuth + aov * ((xmin + xmax) / 2 - 0.5), aov * (xmax - xmin) - - -async def resolve_detection_cones( - seq_ids: List[int], session: AsyncSession = Depends(get_session) -) -> Dict[int, Tuple[float, float]]: - if not seq_ids: - return {} - - # Define a Common Table Expression (CTE) using a window function - # Partition by sequence_id, order by id ascending, assign row number - row_number_cte = ( - select( # type: ignore[call-overload] - Detection.id.label("detection_id"), # type: ignore[attr-defined] - Detection.sequence_id, - Detection.azimuth, - Detection.bboxes, - Detection.camera_id, - func.row_number() - .over( - partition_by=Detection.sequence_id, - order_by=Detection.id.asc(), # type: ignore[attr-defined] - ) - .label("rn"), # Assign row number within each sequence_id group - ) - .where(Detection.sequence_id.in_(seq_ids)) # type: ignore[union-attr] - .cte("ranked_detections") # Create a Common Table Expression - ) - - # Main query: Select from the CTE, join with Camera, filter for row_number = 1 - query = ( - select(row_number_cte.c.sequence_id, row_number_cte.c.azimuth, row_number_cte.c.bboxes, Camera.angle_of_view) # type: ignore[attr-defined] - # Join the CTE results with the Camera table - .join(Camera, row_number_cte.c.camera_id == Camera.id) - # Filter the CTE results to get only the row with rn = 1 (minimum id) for each sequence - .where(row_number_cte.c.rn == 1) - ) - - det_infos = await session.exec(query) - results = det_infos.all() - - # For each sequence, resolve the azimuth + opening angle - return {seq_id: _resolve_cone(azimuth, bboxes_str, aov) for seq_id, azimuth, bboxes_str, aov in results} - - @router.get("/{sequence_id}", status_code=status.HTTP_200_OK, summary="Fetch the information of a specific sequence") async def get_sequence( sequence_id: int = Path(..., gt=0), @@ -142,11 +91,8 @@ async def fetch_latest_unlabeled_sequences( token_payload: TokenPayload = Security(get_jwt, scopes=[UserRole.ADMIN, UserRole.AGENT, UserRole.USER]), ) -> List[SequenceWithCone]: telemetry_client.capture(token_payload.sub, event="sequence-fetch-latest") - # Limit to cameras in the same organization - # Get camera IDs for org camera_ids = await session.exec(select(Camera.id).where(Camera.organization_id == token_payload.organization_id)) - # Get sequences for those cameras fetched_sequences = ( await session.exec( select(Sequence) @@ -157,15 +103,7 @@ async def fetch_latest_unlabeled_sequences( .limit(15) ) ).all() - if len(fetched_sequences) == 0: - return [] - det_cones = await resolve_detection_cones([elt.__dict__["id"] for elt in fetched_sequences], session) - return [ - SequenceWithCone( - **elt.__dict__, cone_azimuth=det_cones[elt.__dict__["id"]][0], cone_angle=det_cones[elt.__dict__["id"]][1] - ) - for elt in fetched_sequences - ] + return [SequenceWithCone(**seq.model_dump()) for seq in fetched_sequences] @router.get("/all/fromdate", status_code=status.HTTP_200_OK, summary="Fetch all the sequences for a specific date") @@ -190,15 +128,7 @@ async def fetch_sequences_from_date( .offset(offset) ) ).all() - if len(fetched_sequences) == 0: - return [] - det_cones = await resolve_detection_cones([elt.__dict__["id"] for elt in fetched_sequences], session) - return [ - SequenceWithCone( - **elt.__dict__, cone_azimuth=det_cones[elt.__dict__["id"]][0], cone_angle=det_cones[elt.__dict__["id"]][1] - ) - for elt in fetched_sequences - ] + return [SequenceWithCone(**seq.model_dump()) for seq in fetched_sequences] @router.delete("/{sequence_id}", status_code=status.HTTP_200_OK, summary="Delete a sequence") diff --git a/src/app/models.py b/src/app/models.py index 64be6851..c81a06a7 100644 --- a/src/app/models.py +++ b/src/app/models.py @@ -86,6 +86,8 @@ class Sequence(SQLModel, table=True): pose_id: Union[int, None] = Field(None, foreign_key="poses.id", nullable=True) azimuth: float = Field(..., ge=0, lt=360) is_wildfire: Union[AnnotationType, None] = None + cone_azimuth: Union[float, None] = Field(None, nullable=True) + cone_angle: Union[float, None] = Field(None, nullable=True) started_at: datetime = Field(..., nullable=False) last_seen_at: datetime = Field(..., nullable=False) diff --git a/src/app/schemas/sequences.py b/src/app/schemas/sequences.py index 382aedbc..881d16dd 100644 --- a/src/app/schemas/sequences.py +++ b/src/app/schemas/sequences.py @@ -9,7 +9,7 @@ from app.models import AnnotationType, Sequence -__all__ = ["SequenceUpdate", "SequenceWithCone"] +__all__ = ["SequenceLabel", "SequenceUpdate", "SequenceWithCone"] # Accesses @@ -22,5 +22,4 @@ class SequenceLabel(BaseModel): class SequenceWithCone(Sequence): - cone_azimuth: float - cone_angle: float + pass diff --git a/src/app/services/cones.py b/src/app/services/cones.py new file mode 100644 index 00000000..c4c5889a --- /dev/null +++ b/src/app/services/cones.py @@ -0,0 +1,17 @@ +# Copyright (C) 2025, Pyronear. + +# This program is licensed under the Apache License 2.0. +# See LICENSE or go to for full license details. + +from ast import literal_eval +from operator import itemgetter +from typing import Tuple + + +def resolve_cone(azimuth: float, bboxes_str: str, aov: float) -> Tuple[float, float]: + """Compute the cone azimuth and opening angle using the most confident bbox.""" + bboxes = literal_eval(bboxes_str) + xmin, _, xmax, _, _ = max(bboxes, key=itemgetter(2)) + cone_azimuth = azimuth + aov * ((xmin + xmax) / 2 - 0.5) + cone_angle = aov * (xmax - xmin) + return cone_azimuth, cone_angle diff --git a/src/tests/conftest.py b/src/tests/conftest.py index 2f4d930e..0b6873b0 100644 --- a/src/tests/conftest.py +++ b/src/tests/conftest.py @@ -166,6 +166,8 @@ "pose_id": 1, "azimuth": 43.7, "is_wildfire": "wildfire_smoke", + "cone_azimuth": 34.57, + "cone_angle": 54.78, "started_at": datetime.strptime("2023-11-07T15:08:19.226673", dt_format), "last_seen_at": datetime.strptime("2023-11-07T15:28:19.226673", dt_format), }, @@ -175,6 +177,8 @@ "pose_id": 3, "azimuth": 74.8, "is_wildfire": None, + "cone_azimuth": 65.67, + "cone_angle": 54.78, "started_at": datetime.strptime("2023-11-07T16:08:19.226673", dt_format), "last_seen_at": datetime.strptime("2023-11-07T16:08:19.226673", dt_format), }, diff --git a/src/tests/endpoints/test_sequences.py b/src/tests/endpoints/test_sequences.py index bf9e8846..dd8aaf57 100644 --- a/src/tests/endpoints/test_sequences.py +++ b/src/tests/endpoints/test_sequences.py @@ -176,10 +176,7 @@ async def test_fetch_sequences_from_date( if isinstance(status_detail, str): assert response.json()["detail"] == status_detail if response.status_code // 100 == 2: - # Compare without cone_azimuth and cone_angle - assert [ - {k: v for k, v in item.items() if k not in {"cone_azimuth", "cone_angle"}} for item in response.json() - ] == expected_result + assert response.json() == expected_result assert all(isinstance(elt["cone_azimuth"], float) for elt in response.json()) assert all(isinstance(elt["cone_angle"], float) for elt in response.json()) @@ -216,9 +213,6 @@ async def test_latest_sequences( if isinstance(status_detail, str): assert response.json()["detail"] == status_detail if response.status_code // 100 == 2: - # Compare without cone_azimuth and cone_angle - assert [ - {k: v for k, v in item.items() if k not in {"cone_azimuth", "cone_angle"}} for item in response.json() - ] == expected_result + assert response.json() == expected_result assert all(isinstance(elt["cone_azimuth"], float) for elt in response.json()) assert all(isinstance(elt["cone_angle"], float) for elt in response.json()) From 05c2930f8ed1db02a8a395c11ca4e5485afc4608 Mon Sep 17 00:00:00 2001 From: Mateo Date: Mon, 22 Dec 2025 09:23:27 +0100 Subject: [PATCH 02/55] round values --- src/app/services/cones.py | 4 ++-- src/tests/conftest.py | 8 ++++---- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/src/app/services/cones.py b/src/app/services/cones.py index c4c5889a..46e4bd55 100644 --- a/src/app/services/cones.py +++ b/src/app/services/cones.py @@ -12,6 +12,6 @@ def resolve_cone(azimuth: float, bboxes_str: str, aov: float) -> Tuple[float, fl """Compute the cone azimuth and opening angle using the most confident bbox.""" bboxes = literal_eval(bboxes_str) xmin, _, xmax, _, _ = max(bboxes, key=itemgetter(2)) - cone_azimuth = azimuth + aov * ((xmin + xmax) / 2 - 0.5) - cone_angle = aov * (xmax - xmin) + cone_azimuth = round(azimuth + aov * ((xmin + xmax) / 2 - 0.5), 1) + cone_angle = round(aov * (xmax - xmin), 1) return cone_azimuth, cone_angle diff --git a/src/tests/conftest.py b/src/tests/conftest.py index 0b6873b0..276360b0 100644 --- a/src/tests/conftest.py +++ b/src/tests/conftest.py @@ -166,8 +166,8 @@ "pose_id": 1, "azimuth": 43.7, "is_wildfire": "wildfire_smoke", - "cone_azimuth": 34.57, - "cone_angle": 54.78, + "cone_azimuth": 34.6, + "cone_angle": 54.8, "started_at": datetime.strptime("2023-11-07T15:08:19.226673", dt_format), "last_seen_at": datetime.strptime("2023-11-07T15:28:19.226673", dt_format), }, @@ -177,8 +177,8 @@ "pose_id": 3, "azimuth": 74.8, "is_wildfire": None, - "cone_azimuth": 65.67, - "cone_angle": 54.78, + "cone_azimuth": 65.7, + "cone_angle": 54.8, "started_at": datetime.strptime("2023-11-07T16:08:19.226673", dt_format), "last_seen_at": datetime.strptime("2023-11-07T16:08:19.226673", dt_format), }, From 212bac9af17492a4348e55497d3fcf073ad3f9db Mon Sep 17 00:00:00 2001 From: Mateo Date: Mon, 22 Dec 2025 16:43:17 +0100 Subject: [PATCH 03/55] drop seq with cones --- src/app/api/api_v1/endpoints/sequences.py | 10 +++++----- src/app/schemas/sequences.py | 6 +----- 2 files changed, 6 insertions(+), 10 deletions(-) diff --git a/src/app/api/api_v1/endpoints/sequences.py b/src/app/api/api_v1/endpoints/sequences.py index 13e708be..e080ac8f 100644 --- a/src/app/api/api_v1/endpoints/sequences.py +++ b/src/app/api/api_v1/endpoints/sequences.py @@ -16,7 +16,7 @@ from app.models import Camera, Detection, Sequence, UserRole from app.schemas.detections import DetectionSequence, DetectionWithUrl from app.schemas.login import TokenPayload -from app.schemas.sequences import SequenceLabel, SequenceWithCone +from app.schemas.sequences import SequenceLabel from app.services.storage import s3_service from app.services.telemetry import telemetry_client @@ -89,7 +89,7 @@ async def fetch_sequence_detections( async def fetch_latest_unlabeled_sequences( session: AsyncSession = Depends(get_session), token_payload: TokenPayload = Security(get_jwt, scopes=[UserRole.ADMIN, UserRole.AGENT, UserRole.USER]), -) -> List[SequenceWithCone]: +) -> List[Sequence]: telemetry_client.capture(token_payload.sub, event="sequence-fetch-latest") camera_ids = await session.exec(select(Camera.id).where(Camera.organization_id == token_payload.organization_id)) @@ -103,7 +103,7 @@ async def fetch_latest_unlabeled_sequences( .limit(15) ) ).all() - return [SequenceWithCone(**seq.model_dump()) for seq in fetched_sequences] + return fetched_sequences @router.get("/all/fromdate", status_code=status.HTTP_200_OK, summary="Fetch all the sequences for a specific date") @@ -113,7 +113,7 @@ async def fetch_sequences_from_date( offset: Union[int, None] = Query(0, description="Number of sequences to skip before starting to fetch"), session: AsyncSession = Depends(get_session), token_payload: TokenPayload = Security(get_jwt, scopes=[UserRole.ADMIN, UserRole.AGENT, UserRole.USER]), -) -> List[SequenceWithCone]: +) -> List[Sequence]: telemetry_client.capture(token_payload.sub, event="sequence-fetch-from-date") # Limit to cameras in the same organization camera_ids = await session.exec(select(Camera.id).where(Camera.organization_id == token_payload.organization_id)) @@ -128,7 +128,7 @@ async def fetch_sequences_from_date( .offset(offset) ) ).all() - return [SequenceWithCone(**seq.model_dump()) for seq in fetched_sequences] + return fetched_sequences @router.delete("/{sequence_id}", status_code=status.HTTP_200_OK, summary="Delete a sequence") diff --git a/src/app/schemas/sequences.py b/src/app/schemas/sequences.py index 881d16dd..30a89692 100644 --- a/src/app/schemas/sequences.py +++ b/src/app/schemas/sequences.py @@ -9,7 +9,7 @@ from app.models import AnnotationType, Sequence -__all__ = ["SequenceLabel", "SequenceUpdate", "SequenceWithCone"] +__all__ = ["SequenceLabel", "SequenceUpdate"] # Accesses @@ -19,7 +19,3 @@ class SequenceUpdate(BaseModel): class SequenceLabel(BaseModel): is_wildfire: AnnotationType - - -class SequenceWithCone(Sequence): - pass From 2be61dda4f983a1ee1e87ecc35590d3be2163a74 Mon Sep 17 00:00:00 2001 From: Mateo Date: Mon, 22 Dec 2025 16:45:27 +0100 Subject: [PATCH 04/55] create alerts --- src/app/crud/crud_alert.py | 17 +++++++++++++++++ src/app/schemas/alerts.py | 30 ++++++++++++++++++++++++++++++ 2 files changed, 47 insertions(+) create mode 100644 src/app/crud/crud_alert.py create mode 100644 src/app/schemas/alerts.py diff --git a/src/app/crud/crud_alert.py b/src/app/crud/crud_alert.py new file mode 100644 index 00000000..0419be96 --- /dev/null +++ b/src/app/crud/crud_alert.py @@ -0,0 +1,17 @@ +# Copyright (C) 2025, Pyronear. + +# This program is licensed under the Apache License 2.0. +# See LICENSE or go to for full license details. + +from sqlmodel.ext.asyncio.session import AsyncSession + +from app.crud.base import BaseCRUD +from app.models import Alert +from app.schemas.alerts import AlertCreate, AlertUpdate + +__all__ = ["AlertCRUD"] + + +class AlertCRUD(BaseCRUD[Alert, AlertCreate, AlertUpdate]): + def __init__(self, session: AsyncSession) -> None: + super().__init__(session, Alert) diff --git a/src/app/schemas/alerts.py b/src/app/schemas/alerts.py new file mode 100644 index 00000000..bba0d196 --- /dev/null +++ b/src/app/schemas/alerts.py @@ -0,0 +1,30 @@ +# Copyright (C) 2025, Pyronear. + +# This program is licensed under the Apache License 2.0. +# See LICENSE or go to for full license details. + +from datetime import datetime +from typing import Optional + +from pydantic import BaseModel, Field + +__all__ = ["AlertCreate", "AlertRead", "AlertUpdate"] + + +class AlertCreate(BaseModel): + organization_id: int = Field(..., gt=0) + lat: Optional[float] = None + lon: Optional[float] = None + start_at: Optional[datetime] = None + + +class AlertUpdate(BaseModel): + organization_id: Optional[int] = Field(None, gt=0) + lat: Optional[float] = None + lon: Optional[float] = None + start_at: Optional[datetime] = None + + +class AlertRead(AlertCreate): + id: int + created_at: datetime From d504b350deb3f480a5312c1764f43df095513861 Mon Sep 17 00:00:00 2001 From: Mateo Date: Mon, 22 Dec 2025 16:46:02 +0100 Subject: [PATCH 05/55] add overlap from triangulation pr --- src/app/services/overlap.py | 365 ++++++++++++++++++++++++++++++++++++ 1 file changed, 365 insertions(+) create mode 100644 src/app/services/overlap.py diff --git a/src/app/services/overlap.py b/src/app/services/overlap.py new file mode 100644 index 00000000..54b30f94 --- /dev/null +++ b/src/app/services/overlap.py @@ -0,0 +1,365 @@ +# Copyright (C) 2020-2025, Pyronear. + +# This program is licensed under the Apache License 2.0. +# See LICENSE or go to for full license details. + + +from __future__ import annotations + +import itertools +from collections import defaultdict +from math import atan2, cos, radians, sin, sqrt +from typing import Dict, List, Optional, Tuple + +import networkx as nx # type: ignore +import numpy as np +import pandas as pd +import pyproj +from geopy.distance import geodesic +from pyproj import Transformer +from shapely.geometry import Polygon # type: ignore +from shapely.geometry.base import BaseGeometry +from shapely.ops import transform as shapely_transform # type: ignore + + +def haversine_km(lat1: float, lon1: float, lat2: float, lon2: float) -> float: + """ + Compute the great circle distance between two points on the Earth surface using the Haversine formula. + + Parameters + ---------- + lat1 : float + Latitude of point 1 in decimal degrees. + lon1 : float + Longitude of point 1 in decimal degrees. + lat2 : float + Latitude of point 2 in decimal degrees. + lon2 : float + Longitude of point 2 in decimal degrees. + + Returns + ------- + float + Distance between the two points in kilometers. + """ + r_earth = 6371.0 + dlat = radians(lat2 - lat1) + dlon = radians(lon2 - lon1) + a = sin(dlat / 2) ** 2 + cos(radians(lat1)) * cos(radians(lat2)) * sin(dlon / 2) ** 2 + c = 2 * atan2(sqrt(a), sqrt(1 - a)) + return r_earth * c + + +def get_centroid_latlon(geom: BaseGeometry) -> Tuple[float, float]: + """ + Compute the geographic coordinates of the centroid of a given geometry. + + Parameters + ---------- + geom : BaseGeometry + Geometry in EPSG:3857 (Web Mercator projection). + + Returns + ------- + tuple[float, float] + Latitude and longitude of the centroid in EPSG:4326. + """ + centroid = geom.centroid + transformer = pyproj.Transformer.from_crs("EPSG:3857", "EPSG:4326", always_xy=True) + lon, lat = transformer.transform(centroid.x, centroid.y) + return float(lat), float(lon) + + +def _build_cone_polygon( + lat: float, + lon: float, + azimuth: float, + opening_angle: float, + dist_km: float, + r_min_km: float, + resolution: int = 36, +) -> Polygon: + """ + Build a cone sector polygon on the sphere then return it in geographic coordinates. + + Parameters + ---------- + lat : float + Camera latitude. + lon : float + Camera longitude. + azimuth : float + Cone central azimuth in degrees. + opening_angle : float + Full opening angle in degrees. + dist_km : float + Outer radius in kilometers. + r_min_km : float + Inner radius in kilometers. + resolution : int + Number of points to sample the arc. + + Returns + ------- + shapely.geometry.Polygon + Cone polygon in EPSG:4326 coordinates. + """ + half_angle = opening_angle / 2.0 + angles = np.linspace(azimuth - half_angle, azimuth + half_angle, resolution) + + # Outer arc points + outer_arc = [geodesic(kilometers=dist_km).destination((lat, lon), float(az % 360)) for az in angles] + outer_points = [(p.longitude, p.latitude) for p in outer_arc] + + if r_min_km > 0: + # Inner arc points, walk reversed so ring orientation stays valid + inner_arc = [geodesic(kilometers=r_min_km).destination((lat, lon), float(az % 360)) for az in reversed(angles)] + inner_points = [(p.longitude, p.latitude) for p in inner_arc] + # Outer ring with a hole for the inner radius + return Polygon(outer_points + inner_points, holes=[inner_points]).buffer(0) + # Triangle like sector with apex at camera position + return Polygon([(lon, lat), *outer_points]).buffer(0) + + +def _project_polygon_to_3857(polygon: Polygon) -> Polygon: + """ + Project a polygon from EPSG:4326 to EPSG:3857. + + Parameters + ---------- + polygon : Polygon + Geometry in EPSG:4326. + + Returns + ------- + Polygon + Geometry in EPSG:3857. + """ + transformer = Transformer.from_crs("EPSG:4326", "EPSG:3857", always_xy=True) + return shapely_transform(transformer.transform, polygon) + + +def get_projected_cone(row: pd.Series, r_km: float, r_min_km: float) -> Polygon: + """ + Build and project a detection cone to Web Mercator. + + Parameters + ---------- + row : pd.Series + Row with fields: lat, lon, cone_azimuth, cone_angle. + r_km : float + Outer radius of the camera detection cone in kilometers. + r_min_km : float + Inner radius of the camera detection cone in kilometers. + + Returns + ------- + Polygon + Cone geometry in EPSG:3857. + """ + poly = _build_cone_polygon( + float(row["lat"]), + float(row["lon"]), + float(row["cone_azimuth"]), + float(row["cone_angle"]), + float(r_km), + float(r_min_km), + ) + return _project_polygon_to_3857(poly) + + +def _compute_localized_groups_from_cliques( + df: pd.DataFrame, + cliques: List[Tuple[int, ...]], + projected_cones: Dict[int, Polygon], + max_dist_km: float, +) -> List[Tuple[int, ...]]: + """ + From maximal cliques, split each clique into localized groups. + + Rules + ----- + For groups with size at least three, keep the whole group if the maximum distance + among all pair intersection barycenters is within max_dist_km. Otherwise split the + clique into all two by two pairs. + + Parameters + ---------- + df : pd.DataFrame + Source sequences, must contain column id. + cliques : list[tuple[int, ...]] + Maximal cliques computed from the overlap graph. + projected_cones : dict[int, Polygon] + Mapping from sequence id to its cone geometry in EPSG:3857. + max_dist_km : float + Maximum allowed distance between pair barycenters to keep a group. + + Returns + ------- + list[tuple[int, ...]] + Unique localized groups as sorted tuples, with strict subsets removed. + """ + base = [tuple(sorted(g)) for g in cliques] + ids_in_cliques = {x for g in base for x in g} + all_ids = set(df["id"].astype(int).tolist()) + work = base + [(sid,) for sid in sorted(all_ids - ids_in_cliques)] + + def split_one_group(group: Tuple[int, ...]) -> List[Tuple[int, ...]]: + group = tuple(sorted(group)) + if len(group) <= 1: + return [group] + + # Collect pairwise intersection barycenters + pair_barys: List[Tuple[float, float]] = [] + for i, j in itertools.combinations(group, 2): + gi = projected_cones.get(i) + gj = projected_cones.get(j) + if gi is None or gj is None: + continue + inter = gi.intersection(gj) + if inter.is_empty or inter.area <= 0: + continue + pair_barys.append(get_centroid_latlon(inter)) + + if len(group) == 2: + return [group] + + if len(pair_barys) < 2: + # Not enough info to validate locality, fall back to all pairs + return [tuple(sorted(p)) for p in itertools.combinations(group, 2)] + + # Diameter of barycenters + max_d = 0.0 + for (lat1, lon1), (lat2, lon2) in itertools.combinations(pair_barys, 2): + d = haversine_km(lat1, lon1, lat2, lon2) + if d > max_d: + max_d = d + + if max_d <= max_dist_km: + return [group] + return [tuple(sorted(p)) for p in itertools.combinations(group, 2)] + + # Build candidate groups from all cliques + candidates: List[Tuple[int, ...]] = [] + for clique in sorted(set(work)): + candidates.extend(split_one_group(clique)) + + # Remove exact duplicates + candidates = sorted({tuple(sorted(g)) for g in candidates}) + + # Drop strict subsets of any other group + keep: List[Tuple[int, ...]] = [] + as_sets = [set(g) for g in candidates] + for i, gi in enumerate(as_sets): + if any(i != j and gi.issubset(as_sets[j]) for j in range(len(as_sets))): + continue + keep.append(candidates[i]) + + return keep + + +def compute_overlap( + api_sequences: pd.DataFrame, + r_km: float = 35.0, + r_min_km: float = 0.5, + max_dist_km: float = 2.0, +) -> pd.DataFrame: + """ + Build localized event groups and attach them to the input DataFrame. + + This function sets two columns on the returned DataFrame: + event_groups: list of tuples of sequence ids + event_smoke_locations: list of (lat, lon), same order as event_groups + + Parameters + ---------- + api_sequences : pd.DataFrame + Input with fields: id, lat, lon, cone_azimuth, cone_angle, is_wildfire, + started_at, last_seen_at. + r_km : float + Outer radius of the camera detection cone in kilometers. + r_min_km : float + Inner radius of the camera detection cone in kilometers. + max_dist_km : float + Maximum allowed distance between pair intersection barycenters to keep a group. + + Returns + ------- + pd.DataFrame + DataFrame copy including event_groups and event_smoke_locations columns. + """ + df = api_sequences.copy() + df["id"] = df["id"].astype(int) + df["started_at"] = pd.to_datetime(df["started_at"]) + df["last_seen_at"] = pd.to_datetime(df["last_seen_at"]) + + # keep positives and unknowns + df_valid = df[df["is_wildfire"].isin([None, "wildfire_smoke"])] + + if df_valid.empty: + df["event_groups"] = df["id"].astype(int).map(lambda sid: [(sid,)]) + df["event_smoke_locations"] = [[] for _ in range(len(df))] + return df + + # Precompute cones in Web Mercator + projected_cones: Dict[int, Polygon] = { + int(row["id"]): get_projected_cone(row, r_km, r_min_km) for _, row in df_valid.iterrows() + } + + # Phase 1, build overlap graph gated by time overlap + ids = df_valid["id"].astype(int).tolist() + rows_by_id: Dict[int, Dict[str, pd.Timestamp]] = df_valid.set_index("id")[["started_at", "last_seen_at"]].to_dict( + "index" + ) + + overlapping_pairs: List[Tuple[int, int]] = [] + for i, id1 in enumerate(ids): + row1 = rows_by_id[id1] + for id2 in ids[i + 1 :]: + row2 = rows_by_id[id2] + # Require overlapping time windows + if row1["started_at"] > row2["last_seen_at"] or row2["started_at"] > row1["last_seen_at"]: + continue + # Spatial overlap test + if projected_cones[id1].intersects(projected_cones[id2]): + overlapping_pairs.append((id1, id2)) + + graph = nx.Graph() + graph.add_edges_from(overlapping_pairs) + cliques = [tuple(sorted(c)) for c in nx.find_cliques(graph) if len(c) >= 2] + + # Phase 2, localized groups from cliques + localized_groups = _compute_localized_groups_from_cliques(df, cliques, projected_cones, max_dist_km) + + # Per group localization, median of pair barycenters for robustness + def group_smoke_location(seq_tuple: Tuple[int, ...]) -> Optional[Tuple[float, float]]: + if len(seq_tuple) < 2: + return None + pts: List[Tuple[float, float]] = [] + for i, j in itertools.combinations(seq_tuple, 2): + inter = projected_cones[i].intersection(projected_cones[j]) + if inter.is_empty or inter.area <= 0: + continue + pts.append(get_centroid_latlon(inter)) + if not pts: + return None + lats, lons = zip(*pts) + return float(np.median(lats)), float(np.median(lons)) + + group_to_smoke: Dict[Tuple[int, ...], Optional[Tuple[float, float]]] = { + g: group_smoke_location(g) for g in localized_groups + } + + # Attach back to df + seq_to_groups: Dict[int, List[Tuple[int, ...]]] = defaultdict(list) + seq_to_smokes: Dict[int, List[Optional[Tuple[float, float]]]] = defaultdict(list) + for g in localized_groups: + smo = group_to_smoke[g] + for sid in g: + seq_to_groups[sid].append(g) + seq_to_smokes[sid].append(smo) + + df["event_groups"] = df["id"].astype(int).map(lambda sid: seq_to_groups.get(sid, [(sid,)])) + df["event_smoke_locations"] = df["id"].astype(int).map(lambda sid: seq_to_smokes.get(sid, [])) + + return df \ No newline at end of file From c508900257656c57d4150c5bfd381c2cf6b81756 Mon Sep 17 00:00:00 2001 From: Mateo Date: Mon, 22 Dec 2025 16:47:18 +0100 Subject: [PATCH 06/55] new alerts strat --- src/app/api/api_v1/endpoints/detections.py | 115 ++++++++++++++++++++- src/app/api/dependencies.py | 6 +- 2 files changed, 118 insertions(+), 3 deletions(-) diff --git a/src/app/api/api_v1/endpoints/detections.py b/src/app/api/api_v1/endpoints/detections.py index f589221d..8570ea13 100644 --- a/src/app/api/api_v1/endpoints/detections.py +++ b/src/app/api/api_v1/endpoints/detections.py @@ -4,6 +4,7 @@ # See LICENSE or go to for full license details. +import itertools from datetime import datetime, timedelta from typing import List, Optional, cast @@ -19,6 +20,8 @@ UploadFile, status, ) +import pandas as pd +from sqlmodel import select from app.api.dependencies import ( dispatch_webhook, @@ -26,12 +29,13 @@ get_detection_crud, get_jwt, get_organization_crud, + get_alert_crud, get_sequence_crud, get_webhook_crud, ) from app.core.config import settings -from app.crud import CameraCRUD, DetectionCRUD, OrganizationCRUD, SequenceCRUD, WebhookCRUD -from app.models import Camera, Detection, Organization, Role, Sequence, UserRole +from app.crud import AlertCRUD, CameraCRUD, DetectionCRUD, OrganizationCRUD, SequenceCRUD, WebhookCRUD +from app.models import AlertSequence, Camera, Detection, Organization, Role, Sequence, UserRole from app.schemas.detections import ( BOXES_PATTERN, COMPILED_BOXES_PATTERN, @@ -39,8 +43,10 @@ DetectionSequence, DetectionUrl, ) +from app.schemas.alerts import AlertCreate from app.schemas.login import TokenPayload from app.schemas.sequences import SequenceUpdate +from app.services.overlap import compute_overlap from app.services.slack import slack_client from app.services.cones import resolve_cone from app.services.storage import s3_service, upload_file @@ -49,6 +55,108 @@ router = APIRouter() +ALERT_LOOKBACK_HOURS = 24 + + +async def _attach_sequence_to_alert( + sequence_: Sequence, + camera: Camera, + cameras: CameraCRUD, + sequences: SequenceCRUD, + alerts: AlertCRUD, +) -> None: + """Assign the given sequence to an alert based on cone/time overlap.""" + org_cameras = await cameras.fetch_all(filters=("organization_id", camera.organization_id)) + camera_by_id = {cam.id: cam for cam in org_cameras} + + if sequence_.camera_id not in camera_by_id: + camera_by_id[sequence_.camera_id] = camera + + # Fetch recent sequences for the organization based on recency of last_seen_at + recent_sequences = await sequences.fetch_all( + in_pair=("camera_id", list(camera_by_id.keys())), + inequality_pair=("last_seen_at", ">", datetime.utcnow() - timedelta(minutes=30)), + ) + + # Ensure the newly created sequence is present + if all(seq.id != sequence_.id for seq in recent_sequences): + recent_sequences.append(sequence_) + + # Build DataFrame for overlap computation + records = [] + for seq in recent_sequences: + cam = camera_by_id.get(seq.camera_id) + if cam is None or seq.cone_azimuth is None or seq.cone_angle is None: + continue + records.append( + { + "id": int(seq.id), + "lat": float(cam.lat), + "lon": float(cam.lon), + "cone_azimuth": float(seq.cone_azimuth), + "cone_angle": float(seq.cone_angle), + "is_wildfire": seq.is_wildfire, + "started_at": seq.started_at, + "last_seen_at": seq.last_seen_at, + } + ) + + if not records: + return + + df = compute_overlap(pd.DataFrame.from_records(records)) + row = df[df["id"] == int(sequence_.id)] + if row.empty: + return + groups = row.iloc[0]["event_groups"] + locations = row.iloc[0].get("event_smoke_locations", []) + group_locations = {tuple(g): locations[idx] if idx < len(locations) else None for idx, g in enumerate(groups)} + + seq_by_id = {seq.id: seq for seq in recent_sequences} + seq_ids = list(seq_by_id.keys()) + + # Existing alert links + session = sequences.session + mapping: dict[int, set[int]] = {} + if seq_ids: + stmt = ( + select(AlertSequence.alert_id, AlertSequence.sequence_id) + .where(AlertSequence.sequence_id.in_(seq_ids)) + ) + res = await session.exec(stmt) # type: ignore[arg-type] + for aid, sid in res: + mapping.setdefault(int(sid), set()).add(int(aid)) + + to_link: List[AlertSequence] = [] + + for g in groups: + g_tuple = tuple(g) + existing_alert_ids = {aid for sid in g_tuple for aid in mapping.get(int(sid), set())} + if existing_alert_ids: + target_alert_id = min(existing_alert_ids) + else: + location = group_locations.get(g_tuple) + start_at = min(seq_by_id[int(sid)].started_at for sid in g_tuple if int(sid) in seq_by_id) + alert = await alerts.create( + AlertCreate( + organization_id=camera.organization_id, + lat=location[0] if isinstance(location, tuple) else None, + lon=location[1] if isinstance(location, tuple) else None, + start_at=start_at, + ) + ) + target_alert_id = alert.id + for sid in g_tuple: + sid_int = int(sid) + if target_alert_id in mapping.get(sid_int, set()): + continue + mapping.setdefault(sid_int, set()).add(target_alert_id) + to_link.append(AlertSequence(alert_id=target_alert_id, sequence_id=sid_int)) + + if to_link: + session.add_all(to_link) + await session.commit() + @router.post("/", status_code=status.HTTP_201_CREATED, summary="Register a new wildfire detection") async def create_detection( @@ -67,6 +175,7 @@ async def create_detection( webhooks: WebhookCRUD = Depends(get_webhook_crud), organizations: OrganizationCRUD = Depends(get_organization_crud), sequences: SequenceCRUD = Depends(get_sequence_crud), + alerts: AlertCRUD = Depends(get_alert_crud), cameras: CameraCRUD = Depends(get_camera_crud), token_payload: TokenPayload = Security(get_jwt, scopes=[Role.CAMERA]), ) -> Detection: @@ -138,6 +247,8 @@ async def create_detection( for det_ in dets_: await detections.update(det_.id, DetectionSequence(sequence_id=sequence_.id)) + await _attach_sequence_to_alert(sequence_, camera, cameras, sequences, alerts) + # Webhooks whs = await webhooks.fetch_all() if any(whs): diff --git a/src/app/api/dependencies.py b/src/app/api/dependencies.py index ddb47bfe..f6173b87 100644 --- a/src/app/api/dependencies.py +++ b/src/app/api/dependencies.py @@ -15,7 +15,7 @@ from sqlmodel.ext.asyncio.session import AsyncSession from app.core.config import settings -from app.crud import CameraCRUD, DetectionCRUD, OrganizationCRUD, SequenceCRUD, UserCRUD, WebhookCRUD +from app.crud import AlertCRUD, CameraCRUD, DetectionCRUD, OrganizationCRUD, SequenceCRUD, UserCRUD, WebhookCRUD from app.crud.crud_pose import PoseCRUD from app.db import get_session from app.models import User, UserRole @@ -65,6 +65,10 @@ def get_sequence_crud(session: AsyncSession = Depends(get_session)) -> SequenceC return SequenceCRUD(session=session) +def get_alert_crud(session: AsyncSession = Depends(get_session)) -> AlertCRUD: + return AlertCRUD(session=session) + + def decode_token(token: str, authenticate_value: Union[str, None] = None) -> Dict[str, str]: try: payload = jwt_decode(token, settings.JWT_SECRET, algorithms=[settings.JWT_ALGORITHM]) From 756038ba262aa0b093dcd9de512f91e72fd416ec Mon Sep 17 00:00:00 2001 From: Mateo Date: Mon, 22 Dec 2025 16:47:40 +0100 Subject: [PATCH 07/55] missing init --- src/app/crud/__init__.py | 1 + src/app/models.py | 18 +++++++++++++++++- src/app/schemas/__init__.py | 1 + 3 files changed, 19 insertions(+), 1 deletion(-) diff --git a/src/app/crud/__init__.py b/src/app/crud/__init__.py index f343a7a8..72b96e39 100644 --- a/src/app/crud/__init__.py +++ b/src/app/crud/__init__.py @@ -5,3 +5,4 @@ from .crud_organization import * from .crud_sequence import * from .crud_webhook import * +from .crud_alert import * diff --git a/src/app/models.py b/src/app/models.py index c81a06a7..5be83cd5 100644 --- a/src/app/models.py +++ b/src/app/models.py @@ -11,7 +11,7 @@ from app.core.config import settings -__all__ = ["Camera", "Detection", "Organization", "Pose", "Sequence", "User"] +__all__ = ["Alert", "AlertSequence", "Camera", "Detection", "Organization", "Pose", "Sequence", "User"] class UserRole(str, Enum): @@ -92,6 +92,22 @@ class Sequence(SQLModel, table=True): last_seen_at: datetime = Field(..., nullable=False) +class Alert(SQLModel, table=True): + __tablename__ = "alerts" + id: int = Field(None, primary_key=True) + organization_id: int = Field(..., foreign_key="organizations.id", nullable=False) + lat: Union[float, None] = Field(default=None) + lon: Union[float, None] = Field(default=None) + start_at: Union[datetime, None] = Field(default=None, nullable=True) + created_at: datetime = Field(default_factory=datetime.utcnow, nullable=False) + + +class AlertSequence(SQLModel, table=True): + __tablename__ = "alerts_sequences" + alert_id: int = Field(primary_key=True, foreign_key="alerts.id") + sequence_id: int = Field(primary_key=True, foreign_key="sequences.id") + + class Organization(SQLModel, table=True): __tablename__ = "organizations" id: int = Field(None, primary_key=True) diff --git a/src/app/schemas/__init__.py b/src/app/schemas/__init__.py index 93d4e0ea..46c37536 100644 --- a/src/app/schemas/__init__.py +++ b/src/app/schemas/__init__.py @@ -7,3 +7,4 @@ from .organizations import * from .sequences import * from .webhooks import * +from .alerts import * From f6cf578b975ed09aaee1dd1d76449877ebf7e3fa Mon Sep 17 00:00:00 2001 From: Mateo Date: Mon, 22 Dec 2025 16:47:50 +0100 Subject: [PATCH 08/55] missings deps --- pyproject.toml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/pyproject.toml b/pyproject.toml index 9d799bca..1fef5be0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -29,6 +29,12 @@ python-multipart = "==0.0.7" python-magic = "^0.4.17" boto3 = "^1.26.0" httpx = "^0.24.0" +geopy = "^2.4.0" +networkx = "^3.2.0" +numpy = "^1.26.0" +pandas = "^2.2.0" +pyproj = "^3.6.0" +shapely = "^2.0.0" [tool.poetry.group.quality] optional = true From 086cd7debc3b2452d0b037e38928c1f8087ef451 Mon Sep 17 00:00:00 2001 From: Mateo Date: Mon, 22 Dec 2025 16:50:25 +0100 Subject: [PATCH 09/55] use preset variable --- src/app/api/api_v1/endpoints/detections.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/src/app/api/api_v1/endpoints/detections.py b/src/app/api/api_v1/endpoints/detections.py index 8570ea13..0c46b05c 100644 --- a/src/app/api/api_v1/endpoints/detections.py +++ b/src/app/api/api_v1/endpoints/detections.py @@ -55,8 +55,6 @@ router = APIRouter() -ALERT_LOOKBACK_HOURS = 24 - async def _attach_sequence_to_alert( sequence_: Sequence, @@ -75,7 +73,7 @@ async def _attach_sequence_to_alert( # Fetch recent sequences for the organization based on recency of last_seen_at recent_sequences = await sequences.fetch_all( in_pair=("camera_id", list(camera_by_id.keys())), - inequality_pair=("last_seen_at", ">", datetime.utcnow() - timedelta(minutes=30)), + inequality_pair=("last_seen_at", ">", datetime.utcnow() - timedelta(seconds=settings.SEQUENCE_RELAXATION_SECONDS)), ) # Ensure the newly created sequence is present From b9e3faf8f76e5ee7ad8feb9a052db8edb8c5ba36 Mon Sep 17 00:00:00 2001 From: Mateo Date: Mon, 22 Dec 2025 17:15:44 +0100 Subject: [PATCH 10/55] updates routes --- src/app/api/api_v1/endpoints/alerts.py | 135 +++++++++++++++++++++++++ src/app/api/api_v1/router.py | 3 +- 2 files changed, 137 insertions(+), 1 deletion(-) create mode 100644 src/app/api/api_v1/endpoints/alerts.py diff --git a/src/app/api/api_v1/endpoints/alerts.py b/src/app/api/api_v1/endpoints/alerts.py new file mode 100644 index 00000000..c9d23fa6 --- /dev/null +++ b/src/app/api/api_v1/endpoints/alerts.py @@ -0,0 +1,135 @@ +# Copyright (C) 2025, Pyronear. + +# This program is licensed under the Apache License 2.0. +# See LICENSE or go to for full license details. + +from datetime import date, datetime, timedelta +from typing import List, Union, cast + +from fastapi import APIRouter, Depends, HTTPException, Path, Query, Security, status +from sqlmodel import delete, func, select +from sqlmodel.ext.asyncio.session import AsyncSession + +from app.api.dependencies import get_alert_crud, get_jwt, get_sequence_crud +from app.crud import AlertCRUD, SequenceCRUD +from app.db import get_session +from app.models import Alert, AlertSequence, Sequence, UserRole +from app.schemas.login import TokenPayload +from app.services.telemetry import telemetry_client + +router = APIRouter() + + +async def verify_org_rights(organization_id: int, alert: Alert) -> None: + if organization_id != alert.organization_id: + raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Access forbidden.") + + +@router.get("/{alert_id}", status_code=status.HTTP_200_OK, summary="Fetch the information of a specific alert") +async def get_alert( + alert_id: int = Path(..., gt=0), + alerts: AlertCRUD = Depends(get_alert_crud), + token_payload: TokenPayload = Security(get_jwt, scopes=[UserRole.ADMIN, UserRole.AGENT, UserRole.USER]), +) -> Alert: + telemetry_client.capture(token_payload.sub, event="alerts-get", properties={"alert_id": alert_id}) + alert = cast(Alert, await alerts.get(alert_id, strict=True)) + + if UserRole.ADMIN not in token_payload.scopes: + await verify_org_rights(token_payload.organization_id, alert) + + return alert + + +@router.get( + "/{alert_id}/sequences", status_code=status.HTTP_200_OK, summary="Fetch the sequences associated to an alert" +) +async def fetch_alert_sequences( + alert_id: int = Path(..., gt=0), + limit: int = Query(10, description="Maximum number of sequences to fetch", ge=1, le=100), + desc: bool = Query(True, description="Whether to order the sequences by last_seen_at in descending order"), + alerts: AlertCRUD = Depends(get_alert_crud), + sequences: SequenceCRUD = Depends(get_sequence_crud), + session: AsyncSession = Depends(get_session), + token_payload: TokenPayload = Security(get_jwt, scopes=[UserRole.ADMIN, UserRole.AGENT, UserRole.USER]), +) -> List[Sequence]: + telemetry_client.capture(token_payload.sub, event="alerts-sequences-get", properties={"alert_id": alert_id}) + alert = cast(Alert, await alerts.get(alert_id, strict=True)) + if UserRole.ADMIN not in token_payload.scopes: + await verify_org_rights(token_payload.organization_id, alert) + + stmt = ( + select(Sequence) + .join(AlertSequence, AlertSequence.sequence_id == Sequence.id) + .where(AlertSequence.alert_id == alert_id) + .order_by(Sequence.last_seen_at.desc() if desc else Sequence.last_seen_at.asc()) # type: ignore[arg-type] + .limit(limit) + ) + res = await session.exec(stmt) + return res.all() + + +@router.get( + "/unlabeled/latest", + status_code=status.HTTP_200_OK, + summary="Fetch all the alerts with unlabeled sequences from the last 24 hours", +) +async def fetch_latest_unlabeled_alerts( + session: AsyncSession = Depends(get_session), + token_payload: TokenPayload = Security(get_jwt, scopes=[UserRole.ADMIN, UserRole.AGENT, UserRole.USER]), +) -> List[Alert]: + telemetry_client.capture(token_payload.sub, event="alerts-fetch-latest") + + alerts_stmt = ( + select(Alert) + .join(AlertSequence, AlertSequence.alert_id == Alert.id) + .join(Sequence, Sequence.id == AlertSequence.sequence_id) + .where(Alert.organization_id == token_payload.organization_id) + .where(Sequence.last_seen_at > datetime.utcnow() - timedelta(hours=24)) + .where(Sequence.is_wildfire.is_(None)) # type: ignore[union-attr] + .order_by(Alert.start_at.desc().nullslast()) # type: ignore[attr-defined] + .limit(15) + ) + alerts_res = await session.exec(alerts_stmt) + return alerts_res.unique().all() # unique to deduplicate joins + + +@router.get("/all/fromdate", status_code=status.HTTP_200_OK, summary="Fetch all the alerts for a specific date") +async def fetch_alerts_from_date( + from_date: date = Query(), + limit: Union[int, None] = Query(15, description="Maximum number of alerts to fetch"), + offset: Union[int, None] = Query(0, description="Number of alerts to skip before starting to fetch"), + session: AsyncSession = Depends(get_session), + token_payload: TokenPayload = Security(get_jwt, scopes=[UserRole.ADMIN, UserRole.AGENT, UserRole.USER]), +) -> List[Alert]: + telemetry_client.capture(token_payload.sub, event="alerts-fetch-from-date") + + alerts_stmt = ( + select(Alert) + .where(Alert.organization_id == token_payload.organization_id) + .where(func.date(Alert.start_at) == from_date) + .order_by(Alert.start_at.desc().nullslast()) # type: ignore[attr-defined] + .limit(limit) + .offset(offset) + ) + alerts_res = await session.exec(alerts_stmt) + return alerts_res.all() + + +@router.delete("/{alert_id}", status_code=status.HTTP_200_OK, summary="Delete an alert") +async def delete_alert( + alert_id: int = Path(..., gt=0), + alerts: AlertCRUD = Depends(get_alert_crud), + session: AsyncSession = Depends(get_session), + token_payload: TokenPayload = Security(get_jwt, scopes=[UserRole.ADMIN]), +) -> None: + telemetry_client.capture(token_payload.sub, event="alert-deletion", properties={"alert_id": alert_id}) + + # Ensure alert exists and org is valid + alert = cast(Alert, await alerts.get(alert_id, strict=True)) + await verify_org_rights(token_payload.organization_id, alert) + + # Delete associations + await session.exec(delete(AlertSequence).where(AlertSequence.alert_id == alert_id)) + await session.commit() + # Delete alert + await alerts.delete(alert_id) diff --git a/src/app/api/api_v1/router.py b/src/app/api/api_v1/router.py index e4efbf68..979b009b 100644 --- a/src/app/api/api_v1/router.py +++ b/src/app/api/api_v1/router.py @@ -5,7 +5,7 @@ from fastapi import APIRouter -from app.api.api_v1.endpoints import cameras, detections, login, organizations, poses, sequences, users, webhooks +from app.api.api_v1.endpoints import alerts, cameras, detections, login, organizations, poses, sequences, users, webhooks api_router = APIRouter(redirect_slashes=True) api_router.include_router(login.router, prefix="/login", tags=["login"]) @@ -13,6 +13,7 @@ api_router.include_router(cameras.router, prefix="/cameras", tags=["cameras"]) api_router.include_router(poses.router, prefix="/poses", tags=["poses"]) api_router.include_router(detections.router, prefix="/detections", tags=["detections"]) +api_router.include_router(alerts.router, prefix="/alerts", tags=["alerts"]) api_router.include_router(sequences.router, prefix="/sequences", tags=["sequences"]) api_router.include_router(organizations.router, prefix="/organizations", tags=["organizations"]) api_router.include_router(webhooks.router, prefix="/webhooks", tags=["webhooks"]) From 2d8d6d0b6aee21b49ca8f5cd99d10c77a0f7d05c Mon Sep 17 00:00:00 2001 From: Mateo Date: Mon, 22 Dec 2025 17:16:30 +0100 Subject: [PATCH 11/55] update poetry --- poetry.lock | 437 +++++++++++++++++++++++++++++++++++++++++++++++++--- 1 file changed, 416 insertions(+), 21 deletions(-) diff --git a/poetry.lock b/poetry.lock index 3ab1cc95..d052e730 100644 --- a/poetry.lock +++ b/poetry.lock @@ -199,18 +199,18 @@ tests = ["pytest (>=3.2.1,!=3.3.0)"] [[package]] name = "boto3" -version = "1.42.13" +version = "1.42.14" description = "The AWS SDK for Python" optional = false python-versions = ">=3.9" groups = ["main"] files = [ - {file = "boto3-1.42.13-py3-none-any.whl", hash = "sha256:9d6aad3fa8b90567006bf7b32efa26489fc306fbe63946eaf57b72356a45761d"}, - {file = "boto3-1.42.13.tar.gz", hash = "sha256:4c9a62dcb5c3f905630fe99fb4b81131da84c5c92eedcc81a89cbd924c1c524f"}, + {file = "boto3-1.42.14-py3-none-any.whl", hash = "sha256:bfcc665227bb4432a235cb4adb47719438d6472e5ccbf7f09512046c3f749670"}, + {file = "boto3-1.42.14.tar.gz", hash = "sha256:a5d005667b480c844ed3f814a59f199ce249d0f5669532a17d06200c0a93119c"}, ] [package.dependencies] -botocore = ">=1.42.13,<1.43.0" +botocore = ">=1.42.14,<1.43.0" jmespath = ">=0.7.1,<2.0.0" s3transfer = ">=0.16.0,<0.17.0" @@ -219,14 +219,14 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "botocore" -version = "1.42.13" +version = "1.42.14" description = "Low-level, data-driven core of boto 3." optional = false python-versions = ">=3.9" groups = ["main"] files = [ - {file = "botocore-1.42.13-py3-none-any.whl", hash = "sha256:b750b2de4a2478db9718a02395cb9da8698901ba02378d60037d6369ecb6bb88"}, - {file = "botocore-1.42.13.tar.gz", hash = "sha256:7e4cf14bd5719b60600fb45d2bb3ae140feb3c182a863b93093aafce7f93cfee"}, + {file = "botocore-1.42.14-py3-none-any.whl", hash = "sha256:efe89adfafa00101390ec2c371d453b3359d5f9690261bc3bd70131e0d453e8e"}, + {file = "botocore-1.42.14.tar.gz", hash = "sha256:cf5bebb580803c6cfd9886902ca24834b42ecaa808da14fb8cd35ad523c9f621"}, ] [package.dependencies] @@ -640,26 +640,26 @@ files = [ [[package]] name = "fastapi" -version = "0.125.0" +version = "0.127.0" description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production" optional = false python-versions = ">=3.9" groups = ["main"] files = [ - {file = "fastapi-0.125.0-py3-none-any.whl", hash = "sha256:2570ec4f3aecf5cca8f0428aed2398b774fcdfee6c2116f86e80513f2f86a7a1"}, - {file = "fastapi-0.125.0.tar.gz", hash = "sha256:16b532691a33e2c5dee1dac32feb31dc6eb41a3dd4ff29a95f9487cb21c054c0"}, + {file = "fastapi-0.127.0-py3-none-any.whl", hash = "sha256:725aa2bb904e2eff8031557cf4b9b77459bfedd63cae8427634744fd199f6a49"}, + {file = "fastapi-0.127.0.tar.gz", hash = "sha256:5a9246e03dcd1fdb19f1396db30894867c1d630f5107dc167dcbc5ed1ea7d259"}, ] [package.dependencies] annotated-doc = ">=0.0.2" -pydantic = ">=1.7.4,<1.8 || >1.8,<1.8.1 || >1.8.1,<2.0.0 || >2.0.0,<2.0.1 || >2.0.1,<2.1.0 || >2.1.0,<3.0.0" +pydantic = ">=2.7.0" starlette = ">=0.40.0,<0.51.0" typing-extensions = ">=4.8.0" [package.extras] all = ["email-validator (>=2.0.0)", "fastapi-cli[standard] (>=0.0.8)", "httpx (>=0.23.0,<1.0.0)", "itsdangerous (>=1.1.0)", "jinja2 (>=3.1.5)", "orjson (>=3.2.1)", "pydantic-extra-types (>=2.0.0)", "pydantic-settings (>=2.0.0)", "python-multipart (>=0.0.18)", "pyyaml (>=5.3.1)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0)", "uvicorn[standard] (>=0.12.0)"] -standard = ["email-validator (>=2.0.0)", "fastapi-cli[standard] (>=0.0.8)", "httpx (>=0.23.0,<1.0.0)", "jinja2 (>=3.1.5)", "python-multipart (>=0.0.18)", "uvicorn[standard] (>=0.12.0)"] -standard-no-fastapi-cloud-cli = ["email-validator (>=2.0.0)", "fastapi-cli[standard-no-fastapi-cloud-cli] (>=0.0.8)", "httpx (>=0.23.0,<1.0.0)", "jinja2 (>=3.1.5)", "python-multipart (>=0.0.18)", "uvicorn[standard] (>=0.12.0)"] +standard = ["email-validator (>=2.0.0)", "fastapi-cli[standard] (>=0.0.8)", "httpx (>=0.23.0,<1.0.0)", "jinja2 (>=3.1.5)", "pydantic-extra-types (>=2.0.0)", "pydantic-settings (>=2.0.0)", "python-multipart (>=0.0.18)", "uvicorn[standard] (>=0.12.0)"] +standard-no-fastapi-cloud-cli = ["email-validator (>=2.0.0)", "fastapi-cli[standard-no-fastapi-cloud-cli] (>=0.0.8)", "httpx (>=0.23.0,<1.0.0)", "jinja2 (>=3.1.5)", "pydantic-extra-types (>=2.0.0)", "pydantic-settings (>=2.0.0)", "python-multipart (>=0.0.18)", "uvicorn[standard] (>=0.12.0)"] [[package]] name = "filelock" @@ -673,6 +673,42 @@ files = [ {file = "filelock-3.20.1.tar.gz", hash = "sha256:b8360948b351b80f420878d8516519a2204b07aefcdcfd24912a5d33127f188c"}, ] +[[package]] +name = "geographiclib" +version = "2.1" +description = "The geodesic routines from GeographicLib" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "geographiclib-2.1-py3-none-any.whl", hash = "sha256:e2a873b9b9e7fc38721ad73d5f4e6c9ed140d428a339970f505c07056997d40b"}, + {file = "geographiclib-2.1.tar.gz", hash = "sha256:6a6545e6262d0ed3522e13c515713718797e37ed8c672c31ad7b249f372ef108"}, +] + +[[package]] +name = "geopy" +version = "2.4.1" +description = "Python Geocoding Toolbox" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "geopy-2.4.1-py3-none-any.whl", hash = "sha256:ae8b4bc5c1131820f4d75fce9d4aaaca0c85189b3aa5d64c3dcaf5e3b7b882a7"}, + {file = "geopy-2.4.1.tar.gz", hash = "sha256:50283d8e7ad07d89be5cb027338c6365a32044df3ae2556ad3f52f4840b3d0d1"}, +] + +[package.dependencies] +geographiclib = ">=1.52,<3" + +[package.extras] +aiohttp = ["aiohttp"] +dev = ["coverage", "flake8 (>=5.0,<5.1)", "isort (>=5.10.0,<5.11.0)", "pytest (>=3.10)", "pytest-asyncio (>=0.17)", "readme-renderer", "sphinx (<=4.3.2)", "sphinx-issues", "sphinx-rtd-theme (>=0.5.0)"] +dev-docs = ["readme-renderer", "sphinx (<=4.3.2)", "sphinx-issues", "sphinx-rtd-theme (>=0.5.0)"] +dev-lint = ["flake8 (>=5.0,<5.1)", "isort (>=5.10.0,<5.11.0)"] +dev-test = ["coverage", "pytest (>=3.10)", "pytest-asyncio (>=0.17)", "sphinx (<=4.3.2)"] +requests = ["requests (>=2.16.2)", "urllib3 (>=1.24.2)"] +timezone = ["pytz"] + [[package]] name = "greenlet" version = "3.3.0" @@ -1074,16 +1110,110 @@ files = [ {file = "mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558"}, ] +[[package]] +name = "networkx" +version = "3.6" +description = "Python package for creating and manipulating graphs and networks" +optional = false +python-versions = ">=3.11" +groups = ["main"] +markers = "python_version >= \"3.12\"" +files = [ + {file = "networkx-3.6-py3-none-any.whl", hash = "sha256:cdb395b105806062473d3be36458d8f1459a4e4b98e236a66c3a48996e07684f"}, + {file = "networkx-3.6.tar.gz", hash = "sha256:285276002ad1f7f7da0f7b42f004bcba70d381e936559166363707fdad3d72ad"}, +] + +[package.extras] +benchmarking = ["asv", "virtualenv"] +default = ["matplotlib (>=3.8)", "numpy (>=1.25)", "pandas (>=2.0)", "scipy (>=1.11.2)"] +developer = ["mypy (>=1.15)", "pre-commit (>=4.1)"] +doc = ["intersphinx-registry", "myst-nb (>=1.1)", "numpydoc (>=1.8.0)", "pillow (>=10)", "pydata-sphinx-theme (>=0.16)", "sphinx (>=8.0)", "sphinx-gallery (>=0.18)", "texext (>=0.6.7)"] +example = ["cairocffi (>=1.7)", "contextily (>=1.6)", "igraph (>=0.11)", "iplotx (>=0.9.0)", "momepy (>=0.7.2)", "osmnx (>=2.0.0)", "scikit-learn (>=1.5)", "seaborn (>=0.13)"] +extra = ["lxml (>=4.6)", "pydot (>=3.0.1)", "pygraphviz (>=1.14)", "sympy (>=1.10)"] +release = ["build (>=0.10)", "changelist (==0.5)", "twine (>=4.0)", "wheel (>=0.40)"] +test = ["pytest (>=7.2)", "pytest-cov (>=4.0)", "pytest-xdist (>=3.0)"] +test-extras = ["pytest-mpl", "pytest-randomly"] + +[[package]] +name = "networkx" +version = "3.6.1" +description = "Python package for creating and manipulating graphs and networks" +optional = false +python-versions = "!=3.14.1,>=3.11" +groups = ["main"] +markers = "python_version == \"3.11\"" +files = [ + {file = "networkx-3.6.1-py3-none-any.whl", hash = "sha256:d47fbf302e7d9cbbb9e2555a0d267983d2aa476bac30e90dfbe5669bd57f3762"}, + {file = "networkx-3.6.1.tar.gz", hash = "sha256:26b7c357accc0c8cde558ad486283728b65b6a95d85ee1cd66bafab4c8168509"}, +] + +[package.extras] +benchmarking = ["asv", "virtualenv"] +default = ["matplotlib (>=3.8)", "numpy (>=1.25)", "pandas (>=2.0)", "scipy (>=1.11.2)"] +developer = ["mypy (>=1.15)", "pre-commit (>=4.1)"] +doc = ["intersphinx-registry", "myst-nb (>=1.1)", "numpydoc (>=1.8.0)", "pillow (>=10)", "pydata-sphinx-theme (>=0.16)", "sphinx (>=8.0)", "sphinx-gallery (>=0.18)", "texext (>=0.6.7)"] +example = ["cairocffi (>=1.7)", "contextily (>=1.6)", "igraph (>=0.11)", "iplotx (>=0.9.0)", "momepy (>=0.7.2)", "osmnx (>=2.0.0)", "scikit-learn (>=1.5)", "seaborn (>=0.13)"] +extra = ["lxml (>=4.6)", "pydot (>=3.0.1)", "pygraphviz (>=1.14)", "sympy (>=1.10)"] +release = ["build (>=0.10)", "changelist (==0.5)", "twine (>=4.0)", "wheel (>=0.40)"] +test = ["pytest (>=7.2)", "pytest-cov (>=4.0)", "pytest-xdist (>=3.0)"] +test-extras = ["pytest-mpl", "pytest-randomly"] + [[package]] name = "nodeenv" -version = "1.9.1" +version = "1.10.0" description = "Node.js virtual environment builder" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" groups = ["quality"] files = [ - {file = "nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9"}, - {file = "nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f"}, + {file = "nodeenv-1.10.0-py2.py3-none-any.whl", hash = "sha256:5bb13e3eed2923615535339b3c620e76779af4cb4c6a90deccc9e36b274d3827"}, + {file = "nodeenv-1.10.0.tar.gz", hash = "sha256:996c191ad80897d076bdfba80a41994c2b47c68e224c542b48feba42ba00f8bb"}, +] + +[[package]] +name = "numpy" +version = "1.26.4" +description = "Fundamental package for array computing in Python" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0"}, + {file = "numpy-1.26.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a"}, + {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d209d8969599b27ad20994c8e41936ee0964e6da07478d6c35016bc386b66ad4"}, + {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffa75af20b44f8dba823498024771d5ac50620e6915abac414251bd971b4529f"}, + {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:62b8e4b1e28009ef2846b4c7852046736bab361f7aeadeb6a5b89ebec3c7055a"}, + {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a4abb4f9001ad2858e7ac189089c42178fcce737e4169dc61321660f1a96c7d2"}, + {file = "numpy-1.26.4-cp310-cp310-win32.whl", hash = "sha256:bfe25acf8b437eb2a8b2d49d443800a5f18508cd811fea3181723922a8a82b07"}, + {file = "numpy-1.26.4-cp310-cp310-win_amd64.whl", hash = "sha256:b97fe8060236edf3662adfc2c633f56a08ae30560c56310562cb4f95500022d5"}, + {file = "numpy-1.26.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c66707fabe114439db9068ee468c26bbdf909cac0fb58686a42a24de1760c71"}, + {file = "numpy-1.26.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:edd8b5fe47dab091176d21bb6de568acdd906d1887a4584a15a9a96a1dca06ef"}, + {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab55401287bfec946ced39700c053796e7cc0e3acbef09993a9ad2adba6ca6e"}, + {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:666dbfb6ec68962c033a450943ded891bed2d54e6755e35e5835d63f4f6931d5"}, + {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:96ff0b2ad353d8f990b63294c8986f1ec3cb19d749234014f4e7eb0112ceba5a"}, + {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:60dedbb91afcbfdc9bc0b1f3f402804070deed7392c23eb7a7f07fa857868e8a"}, + {file = "numpy-1.26.4-cp311-cp311-win32.whl", hash = "sha256:1af303d6b2210eb850fcf03064d364652b7120803a0b872f5211f5234b399f20"}, + {file = "numpy-1.26.4-cp311-cp311-win_amd64.whl", hash = "sha256:cd25bcecc4974d09257ffcd1f098ee778f7834c3ad767fe5db785be9a4aa9cb2"}, + {file = "numpy-1.26.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b3ce300f3644fb06443ee2222c2201dd3a89ea6040541412b8fa189341847218"}, + {file = "numpy-1.26.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:03a8c78d01d9781b28a6989f6fa1bb2c4f2d51201cf99d3dd875df6fbd96b23b"}, + {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9fad7dcb1aac3c7f0584a5a8133e3a43eeb2fe127f47e3632d43d677c66c102b"}, + {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675d61ffbfa78604709862923189bad94014bef562cc35cf61d3a07bba02a7ed"}, + {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ab47dbe5cc8210f55aa58e4805fe224dac469cde56b9f731a4c098b91917159a"}, + {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1dda2e7b4ec9dd512f84935c5f126c8bd8b9f2fc001e9f54af255e8c5f16b0e0"}, + {file = "numpy-1.26.4-cp312-cp312-win32.whl", hash = "sha256:50193e430acfc1346175fcbdaa28ffec49947a06918b7b92130744e81e640110"}, + {file = "numpy-1.26.4-cp312-cp312-win_amd64.whl", hash = "sha256:08beddf13648eb95f8d867350f6a018a4be2e5ad54c8d8caed89ebca558b2818"}, + {file = "numpy-1.26.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7349ab0fa0c429c82442a27a9673fc802ffdb7c7775fad780226cb234965e53c"}, + {file = "numpy-1.26.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:52b8b60467cd7dd1e9ed082188b4e6bb35aa5cdd01777621a1658910745b90be"}, + {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5241e0a80d808d70546c697135da2c613f30e28251ff8307eb72ba696945764"}, + {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f870204a840a60da0b12273ef34f7051e98c3b5961b61b0c2c1be6dfd64fbcd3"}, + {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:679b0076f67ecc0138fd2ede3a8fd196dddc2ad3254069bcb9faf9a79b1cebcd"}, + {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:47711010ad8555514b434df65f7d7b076bb8261df1ca9bb78f53d3b2db02e95c"}, + {file = "numpy-1.26.4-cp39-cp39-win32.whl", hash = "sha256:a354325ee03388678242a4d7ebcd08b5c727033fcff3b2f536aea978e15ee9e6"}, + {file = "numpy-1.26.4-cp39-cp39-win_amd64.whl", hash = "sha256:3373d5d70a5fe74a2c1bb6d2cfd9609ecf686d47a2d7b1d37a8f3b6bf6003aea"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:afedb719a9dcfc7eaf2287b839d8198e06dcd4cb5d276a3df279231138e83d30"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95a7476c59002f2f6c590b9b7b998306fba6a5aa646b1e22ddfeaf8f78c3a29c"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7e50d0a0cc3189f9cb0aeb3a6a6af18c16f59f004b866cd2be1c14b36134a4a0"}, + {file = "numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010"}, ] [[package]] @@ -1098,6 +1228,105 @@ files = [ {file = "packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f"}, ] +[[package]] +name = "pandas" +version = "2.3.3" +description = "Powerful data structures for data analysis, time series, and statistics" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "pandas-2.3.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:376c6446ae31770764215a6c937f72d917f214b43560603cd60da6408f183b6c"}, + {file = "pandas-2.3.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e19d192383eab2f4ceb30b412b22ea30690c9e618f78870357ae1d682912015a"}, + {file = "pandas-2.3.3-cp310-cp310-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5caf26f64126b6c7aec964f74266f435afef1c1b13da3b0636c7518a1fa3e2b1"}, + {file = "pandas-2.3.3-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dd7478f1463441ae4ca7308a70e90b33470fa593429f9d4c578dd00d1fa78838"}, + {file = "pandas-2.3.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:4793891684806ae50d1288c9bae9330293ab4e083ccd1c5e383c34549c6e4250"}, + {file = "pandas-2.3.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:28083c648d9a99a5dd035ec125d42439c6c1c525098c58af0fc38dd1a7a1b3d4"}, + {file = "pandas-2.3.3-cp310-cp310-win_amd64.whl", hash = "sha256:503cf027cf9940d2ceaa1a93cfb5f8c8c7e6e90720a2850378f0b3f3b1e06826"}, + {file = "pandas-2.3.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:602b8615ebcc4a0c1751e71840428ddebeb142ec02c786e8ad6b1ce3c8dec523"}, + {file = "pandas-2.3.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8fe25fc7b623b0ef6b5009149627e34d2a4657e880948ec3c840e9402e5c1b45"}, + {file = "pandas-2.3.3-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b468d3dad6ff947df92dcb32ede5b7bd41a9b3cceef0a30ed925f6d01fb8fa66"}, + {file = "pandas-2.3.3-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b98560e98cb334799c0b07ca7967ac361a47326e9b4e5a7dfb5ab2b1c9d35a1b"}, + {file = "pandas-2.3.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1d37b5848ba49824e5c30bedb9c830ab9b7751fd049bc7914533e01c65f79791"}, + {file = "pandas-2.3.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:db4301b2d1f926ae677a751eb2bd0e8c5f5319c9cb3f88b0becbbb0b07b34151"}, + {file = "pandas-2.3.3-cp311-cp311-win_amd64.whl", hash = "sha256:f086f6fe114e19d92014a1966f43a3e62285109afe874f067f5abbdcbb10e59c"}, + {file = "pandas-2.3.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6d21f6d74eb1725c2efaa71a2bfc661a0689579b58e9c0ca58a739ff0b002b53"}, + {file = "pandas-2.3.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3fd2f887589c7aa868e02632612ba39acb0b8948faf5cc58f0850e165bd46f35"}, + {file = "pandas-2.3.3-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ecaf1e12bdc03c86ad4a7ea848d66c685cb6851d807a26aa245ca3d2017a1908"}, + {file = "pandas-2.3.3-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b3d11d2fda7eb164ef27ffc14b4fcab16a80e1ce67e9f57e19ec0afaf715ba89"}, + {file = "pandas-2.3.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a68e15f780eddf2b07d242e17a04aa187a7ee12b40b930bfdd78070556550e98"}, + {file = "pandas-2.3.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:371a4ab48e950033bcf52b6527eccb564f52dc826c02afd9a1bc0ab731bba084"}, + {file = "pandas-2.3.3-cp312-cp312-win_amd64.whl", hash = "sha256:a16dcec078a01eeef8ee61bf64074b4e524a2a3f4b3be9326420cabe59c4778b"}, + {file = "pandas-2.3.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:56851a737e3470de7fa88e6131f41281ed440d29a9268dcbf0002da5ac366713"}, + {file = "pandas-2.3.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:bdcd9d1167f4885211e401b3036c0c8d9e274eee67ea8d0758a256d60704cfe8"}, + {file = "pandas-2.3.3-cp313-cp313-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e32e7cc9af0f1cc15548288a51a3b681cc2a219faa838e995f7dc53dbab1062d"}, + {file = "pandas-2.3.3-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:318d77e0e42a628c04dc56bcef4b40de67918f7041c2b061af1da41dcff670ac"}, + {file = "pandas-2.3.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4e0a175408804d566144e170d0476b15d78458795bb18f1304fb94160cabf40c"}, + {file = "pandas-2.3.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:93c2d9ab0fc11822b5eece72ec9587e172f63cff87c00b062f6e37448ced4493"}, + {file = "pandas-2.3.3-cp313-cp313-win_amd64.whl", hash = "sha256:f8bfc0e12dc78f777f323f55c58649591b2cd0c43534e8355c51d3fede5f4dee"}, + {file = "pandas-2.3.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:75ea25f9529fdec2d2e93a42c523962261e567d250b0013b16210e1d40d7c2e5"}, + {file = "pandas-2.3.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:74ecdf1d301e812db96a465a525952f4dde225fdb6d8e5a521d47e1f42041e21"}, + {file = "pandas-2.3.3-cp313-cp313t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6435cb949cb34ec11cc9860246ccb2fdc9ecd742c12d3304989017d53f039a78"}, + {file = "pandas-2.3.3-cp313-cp313t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:900f47d8f20860de523a1ac881c4c36d65efcb2eb850e6948140fa781736e110"}, + {file = "pandas-2.3.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:a45c765238e2ed7d7c608fc5bc4a6f88b642f2f01e70c0c23d2224dd21829d86"}, + {file = "pandas-2.3.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:c4fc4c21971a1a9f4bdb4c73978c7f7256caa3e62b323f70d6cb80db583350bc"}, + {file = "pandas-2.3.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:ee15f284898e7b246df8087fc82b87b01686f98ee67d85a17b7ab44143a3a9a0"}, + {file = "pandas-2.3.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:1611aedd912e1ff81ff41c745822980c49ce4a7907537be8692c8dbc31924593"}, + {file = "pandas-2.3.3-cp314-cp314-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6d2cefc361461662ac48810cb14365a365ce864afe85ef1f447ff5a1e99ea81c"}, + {file = "pandas-2.3.3-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ee67acbbf05014ea6c763beb097e03cd629961c8a632075eeb34247120abcb4b"}, + {file = "pandas-2.3.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:c46467899aaa4da076d5abc11084634e2d197e9460643dd455ac3db5856b24d6"}, + {file = "pandas-2.3.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:6253c72c6a1d990a410bc7de641d34053364ef8bcd3126f7e7450125887dffe3"}, + {file = "pandas-2.3.3-cp314-cp314-win_amd64.whl", hash = "sha256:1b07204a219b3b7350abaae088f451860223a52cfb8a6c53358e7948735158e5"}, + {file = "pandas-2.3.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:2462b1a365b6109d275250baaae7b760fd25c726aaca0054649286bcfbb3e8ec"}, + {file = "pandas-2.3.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:0242fe9a49aa8b4d78a4fa03acb397a58833ef6199e9aa40a95f027bb3a1b6e7"}, + {file = "pandas-2.3.3-cp314-cp314t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a21d830e78df0a515db2b3d2f5570610f5e6bd2e27749770e8bb7b524b89b450"}, + {file = "pandas-2.3.3-cp314-cp314t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2e3ebdb170b5ef78f19bfb71b0dc5dc58775032361fa188e814959b74d726dd5"}, + {file = "pandas-2.3.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:d051c0e065b94b7a3cea50eb1ec32e912cd96dba41647eb24104b6c6c14c5788"}, + {file = "pandas-2.3.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:3869faf4bd07b3b66a9f462417d0ca3a9df29a9f6abd5d0d0dbab15dac7abe87"}, + {file = "pandas-2.3.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c503ba5216814e295f40711470446bc3fd00f0faea8a086cbc688808e26f92a2"}, + {file = "pandas-2.3.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a637c5cdfa04b6d6e2ecedcb81fc52ffb0fd78ce2ebccc9ea964df9f658de8c8"}, + {file = "pandas-2.3.3-cp39-cp39-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:854d00d556406bffe66a4c0802f334c9ad5a96b4f1f868adf036a21b11ef13ff"}, + {file = "pandas-2.3.3-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bf1f8a81d04ca90e32a0aceb819d34dbd378a98bf923b6398b9a3ec0bf44de29"}, + {file = "pandas-2.3.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:23ebd657a4d38268c7dfbdf089fbc31ea709d82e4923c5ffd4fbd5747133ce73"}, + {file = "pandas-2.3.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:5554c929ccc317d41a5e3d1234f3be588248e61f08a74dd17c9eabb535777dc9"}, + {file = "pandas-2.3.3-cp39-cp39-win_amd64.whl", hash = "sha256:d3e28b3e83862ccf4d85ff19cf8c20b2ae7e503881711ff2d534dc8f761131aa"}, + {file = "pandas-2.3.3.tar.gz", hash = "sha256:e05e1af93b977f7eafa636d043f9f94c7ee3ac81af99c13508215942e64c993b"}, +] + +[package.dependencies] +numpy = [ + {version = ">=1.23.2", markers = "python_version == \"3.11\""}, + {version = ">=1.26.0", markers = "python_version >= \"3.12\""}, +] +python-dateutil = ">=2.8.2" +pytz = ">=2020.1" +tzdata = ">=2022.7" + +[package.extras] +all = ["PyQt5 (>=5.15.9)", "SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)", "beautifulsoup4 (>=4.11.2)", "bottleneck (>=1.3.6)", "dataframe-api-compat (>=0.1.7)", "fastparquet (>=2022.12.0)", "fsspec (>=2022.11.0)", "gcsfs (>=2022.11.0)", "html5lib (>=1.1)", "hypothesis (>=6.46.1)", "jinja2 (>=3.1.2)", "lxml (>=4.9.2)", "matplotlib (>=3.6.3)", "numba (>=0.56.4)", "numexpr (>=2.8.4)", "odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "pandas-gbq (>=0.19.0)", "psycopg2 (>=2.9.6)", "pyarrow (>=10.0.1)", "pymysql (>=1.0.2)", "pyreadstat (>=1.2.0)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "qtpy (>=2.3.0)", "s3fs (>=2022.11.0)", "scipy (>=1.10.0)", "tables (>=3.8.0)", "tabulate (>=0.9.0)", "xarray (>=2022.12.0)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)", "zstandard (>=0.19.0)"] +aws = ["s3fs (>=2022.11.0)"] +clipboard = ["PyQt5 (>=5.15.9)", "qtpy (>=2.3.0)"] +compression = ["zstandard (>=0.19.0)"] +computation = ["scipy (>=1.10.0)", "xarray (>=2022.12.0)"] +consortium-standard = ["dataframe-api-compat (>=0.1.7)"] +excel = ["odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)"] +feather = ["pyarrow (>=10.0.1)"] +fss = ["fsspec (>=2022.11.0)"] +gcp = ["gcsfs (>=2022.11.0)", "pandas-gbq (>=0.19.0)"] +hdf5 = ["tables (>=3.8.0)"] +html = ["beautifulsoup4 (>=4.11.2)", "html5lib (>=1.1)", "lxml (>=4.9.2)"] +mysql = ["SQLAlchemy (>=2.0.0)", "pymysql (>=1.0.2)"] +output-formatting = ["jinja2 (>=3.1.2)", "tabulate (>=0.9.0)"] +parquet = ["pyarrow (>=10.0.1)"] +performance = ["bottleneck (>=1.3.6)", "numba (>=0.56.4)", "numexpr (>=2.8.4)"] +plot = ["matplotlib (>=3.6.3)"] +postgresql = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "psycopg2 (>=2.9.6)"] +pyarrow = ["pyarrow (>=10.0.1)"] +spss = ["pyreadstat (>=1.2.0)"] +sql-other = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)"] +test = ["hypothesis (>=6.46.1)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)"] +xml = ["lxml (>=4.9.2)"] + [[package]] name = "passlib" version = "1.7.4" @@ -1454,6 +1683,74 @@ dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.4.0)", "pre-commit", "pyte docs = ["sphinx", "sphinx-rtd-theme", "zope.interface"] tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] +[[package]] +name = "pyproj" +version = "3.7.2" +description = "Python interface to PROJ (cartographic projections and coordinate transformations library)" +optional = false +python-versions = ">=3.11" +groups = ["main"] +files = [ + {file = "pyproj-3.7.2-cp311-cp311-macosx_13_0_x86_64.whl", hash = "sha256:2514d61f24c4e0bb9913e2c51487ecdaeca5f8748d8313c933693416ca41d4d5"}, + {file = "pyproj-3.7.2-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:8693ca3892d82e70de077701ee76dd13d7bca4ae1c9d1e739d72004df015923a"}, + {file = "pyproj-3.7.2-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:5e26484d80fea56273ed1555abaea161e9661d81a6c07815d54b8e883d4ceb25"}, + {file = "pyproj-3.7.2-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:281cb92847814e8018010c48b4069ff858a30236638631c1a91dd7bfa68f8a8a"}, + {file = "pyproj-3.7.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:9c8577f0b7bb09118ec2e57e3babdc977127dd66326d6c5d755c76b063e6d9dc"}, + {file = "pyproj-3.7.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a23f59904fac3a5e7364b3aa44d288234af267ca041adb2c2b14a903cd5d3ac5"}, + {file = "pyproj-3.7.2-cp311-cp311-win32.whl", hash = "sha256:f2af4ed34b2cf3e031a2d85b067a3ecbd38df073c567e04b52fa7a0202afde8a"}, + {file = "pyproj-3.7.2-cp311-cp311-win_amd64.whl", hash = "sha256:0b7cb633565129677b2a183c4d807c727d1c736fcb0568a12299383056e67433"}, + {file = "pyproj-3.7.2-cp311-cp311-win_arm64.whl", hash = "sha256:38b08d85e3a38e455625b80e9eb9f78027c8e2649a21dec4df1f9c3525460c71"}, + {file = "pyproj-3.7.2-cp312-cp312-macosx_13_0_x86_64.whl", hash = "sha256:0a9bb26a6356fb5b033433a6d1b4542158fb71e3c51de49b4c318a1dff3aeaab"}, + {file = "pyproj-3.7.2-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:567caa03021178861fad27fabde87500ec6d2ee173dd32f3e2d9871e40eebd68"}, + {file = "pyproj-3.7.2-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:c203101d1dc3c038a56cff0447acc515dd29d6e14811406ac539c21eed422b2a"}, + {file = "pyproj-3.7.2-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:1edc34266c0c23ced85f95a1ee8b47c9035eae6aca5b6b340327250e8e281630"}, + {file = "pyproj-3.7.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:aa9f26c21bc0e2dc3d224cb1eb4020cf23e76af179a7c66fea49b828611e4260"}, + {file = "pyproj-3.7.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f9428b318530625cb389b9ddc9c51251e172808a4af79b82809376daaeabe5e9"}, + {file = "pyproj-3.7.2-cp312-cp312-win32.whl", hash = "sha256:b3d99ed57d319da042f175f4554fc7038aa4bcecc4ac89e217e350346b742c9d"}, + {file = "pyproj-3.7.2-cp312-cp312-win_amd64.whl", hash = "sha256:11614a054cd86a2ed968a657d00987a86eeb91fdcbd9ad3310478685dc14a128"}, + {file = "pyproj-3.7.2-cp312-cp312-win_arm64.whl", hash = "sha256:509a146d1398bafe4f53273398c3bb0b4732535065fa995270e52a9d3676bca3"}, + {file = "pyproj-3.7.2-cp313-cp313-macosx_13_0_x86_64.whl", hash = "sha256:19466e529b1b15eeefdf8ff26b06fa745856c044f2f77bf0edbae94078c1dfa1"}, + {file = "pyproj-3.7.2-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:c79b9b84c4a626c5dc324c0d666be0bfcebd99f7538d66e8898c2444221b3da7"}, + {file = "pyproj-3.7.2-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:ceecf374cacca317bc09e165db38ac548ee3cad07c3609442bd70311c59c21aa"}, + {file = "pyproj-3.7.2-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:5141a538ffdbe4bfd157421828bb2e07123a90a7a2d6f30fa1462abcfb5ce681"}, + {file = "pyproj-3.7.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f000841e98ea99acbb7b8ca168d67773b0191de95187228a16110245c5d954d5"}, + {file = "pyproj-3.7.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8115faf2597f281a42ab608ceac346b4eb1383d3b45ab474fd37341c4bf82a67"}, + {file = "pyproj-3.7.2-cp313-cp313-win32.whl", hash = "sha256:f18c0579dd6be00b970cb1a6719197fceecc407515bab37da0066f0184aafdf3"}, + {file = "pyproj-3.7.2-cp313-cp313-win_amd64.whl", hash = "sha256:bb41c29d5f60854b1075853fe80c58950b398d4ebb404eb532536ac8d2834ed7"}, + {file = "pyproj-3.7.2-cp313-cp313-win_arm64.whl", hash = "sha256:2b617d573be4118c11cd96b8891a0b7f65778fa7733ed8ecdb297a447d439100"}, + {file = "pyproj-3.7.2-cp313-cp313t-macosx_13_0_x86_64.whl", hash = "sha256:d27b48f0e81beeaa2b4d60c516c3a1cfbb0c7ff6ef71256d8e9c07792f735279"}, + {file = "pyproj-3.7.2-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:55a3610d75023c7b1c6e583e48ef8f62918e85a2ae81300569d9f104d6684bb6"}, + {file = "pyproj-3.7.2-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:8d7349182fa622696787cc9e195508d2a41a64765da9b8a6bee846702b9e6220"}, + {file = "pyproj-3.7.2-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:d230b186eb876ed4f29a7c5ee310144c3a0e44e89e55f65fb3607e13f6db337c"}, + {file = "pyproj-3.7.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:237499c7862c578d0369e2b8ac56eec550e391a025ff70e2af8417139dabb41c"}, + {file = "pyproj-3.7.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:8c225f5978abd506fd9a78eaaf794435e823c9156091cabaab5374efb29d7f69"}, + {file = "pyproj-3.7.2-cp313-cp313t-win32.whl", hash = "sha256:2da731876d27639ff9d2d81c151f6ab90a1546455fabd93368e753047be344a2"}, + {file = "pyproj-3.7.2-cp313-cp313t-win_amd64.whl", hash = "sha256:f54d91ae18dd23b6c0ab48126d446820e725419da10617d86a1b69ada6d881d3"}, + {file = "pyproj-3.7.2-cp313-cp313t-win_arm64.whl", hash = "sha256:fc52ba896cfc3214dc9f9ca3c0677a623e8fdd096b257c14a31e719d21ff3fdd"}, + {file = "pyproj-3.7.2-cp314-cp314-macosx_13_0_x86_64.whl", hash = "sha256:2aaa328605ace41db050d06bac1adc11f01b71fe95c18661497763116c3a0f02"}, + {file = "pyproj-3.7.2-cp314-cp314-macosx_14_0_arm64.whl", hash = "sha256:35dccbce8201313c596a970fde90e33605248b66272595c061b511c8100ccc08"}, + {file = "pyproj-3.7.2-cp314-cp314-manylinux_2_28_aarch64.whl", hash = "sha256:25b0b7cb0042444c29a164b993c45c1b8013d6c48baa61dc1160d834a277e83b"}, + {file = "pyproj-3.7.2-cp314-cp314-manylinux_2_28_x86_64.whl", hash = "sha256:85def3a6388e9ba51f964619aa002a9d2098e77c6454ff47773bb68871024281"}, + {file = "pyproj-3.7.2-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:b1bccefec3875ab81eabf49059e2b2ea77362c178b66fd3528c3e4df242f1516"}, + {file = "pyproj-3.7.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:d5371ca114d6990b675247355a801925814eca53e6c4b2f1b5c0a956336ee36e"}, + {file = "pyproj-3.7.2-cp314-cp314-win32.whl", hash = "sha256:77f066626030f41be543274f5ac79f2a511fe89860ecd0914f22131b40a0ec25"}, + {file = "pyproj-3.7.2-cp314-cp314-win_amd64.whl", hash = "sha256:5a964da1696b8522806f4276ab04ccfff8f9eb95133a92a25900697609d40112"}, + {file = "pyproj-3.7.2-cp314-cp314-win_arm64.whl", hash = "sha256:e258ab4dbd3cf627809067c0ba8f9884ea76c8e5999d039fb37a1619c6c3e1f6"}, + {file = "pyproj-3.7.2-cp314-cp314t-macosx_13_0_x86_64.whl", hash = "sha256:bbbac2f930c6d266f70ec75df35ef851d96fdb3701c674f42fd23a9314573b37"}, + {file = "pyproj-3.7.2-cp314-cp314t-macosx_14_0_arm64.whl", hash = "sha256:b7544e0a3d6339dc9151e9c8f3ea62a936ab7cc446a806ec448bbe86aebb979b"}, + {file = "pyproj-3.7.2-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:f7f5133dca4c703e8acadf6f30bc567d39a42c6af321e7f81975c2518f3ed357"}, + {file = "pyproj-3.7.2-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:5aff3343038d7426aa5076f07feb88065f50e0502d1b0d7c22ddfdd2c75a3f81"}, + {file = "pyproj-3.7.2-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:b0552178c61f2ac1c820d087e8ba6e62b29442debddbb09d51c4bf8acc84d888"}, + {file = "pyproj-3.7.2-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:47d87db2d2c436c5fd0409b34d70bb6cdb875cca2ebe7a9d1c442367b0ab8d59"}, + {file = "pyproj-3.7.2-cp314-cp314t-win32.whl", hash = "sha256:c9b6f1d8ad3e80a0ee0903a778b6ece7dca1d1d40f6d114ae01bc8ddbad971aa"}, + {file = "pyproj-3.7.2-cp314-cp314t-win_amd64.whl", hash = "sha256:1914e29e27933ba6f9822663ee0600f169014a2859f851c054c88cf5ea8a333c"}, + {file = "pyproj-3.7.2-cp314-cp314t-win_arm64.whl", hash = "sha256:d9d25bae416a24397e0d85739f84d323b55f6511e45a522dd7d7eae70d10c7e4"}, + {file = "pyproj-3.7.2.tar.gz", hash = "sha256:39a0cf1ecc7e282d1d30f36594ebd55c9fae1fda8a2622cee5d100430628f88c"}, +] + +[package.dependencies] +certifi = "*" + [[package]] name = "pytest" version = "8.3.5" @@ -1586,6 +1883,18 @@ files = [ [package.extras] dev = ["atomicwrites (==1.2.1)", "attrs (==19.2.0)", "coverage (==6.5.0)", "hatch", "invoke (==2.2.0)", "more-itertools (==4.3.0)", "pbr (==4.3.0)", "pluggy (==1.0.0)", "py (==1.11.0)", "pytest (==7.2.0)", "pytest-cov (==4.0.0)", "pytest-timeout (==2.1.0)", "pyyaml (==5.1)"] +[[package]] +name = "pytz" +version = "2025.2" +description = "World timezone definitions, modern and historical" +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "pytz-2025.2-py2.py3-none-any.whl", hash = "sha256:5ddf76296dd8c44c26eb8f4b6f35488f3ccbf6fbbd7adee0b7262d43f0ec2f00"}, + {file = "pytz-2025.2.tar.gz", hash = "sha256:360b9e3dbb49a209c21ad61809c7fb453643e048b38924c765813546746e81c3"}, +] + [[package]] name = "pyyaml" version = "6.0.3" @@ -1821,6 +2130,80 @@ statsig = ["statsig (>=0.55.3)"] tornado = ["tornado (>=6)"] unleash = ["UnleashClient (>=6.0.1)"] +[[package]] +name = "shapely" +version = "2.1.2" +description = "Manipulation and analysis of geometric objects" +optional = false +python-versions = ">=3.10" +groups = ["main"] +files = [ + {file = "shapely-2.1.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7ae48c236c0324b4e139bea88a306a04ca630f49be66741b340729d380d8f52f"}, + {file = "shapely-2.1.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:eba6710407f1daa8e7602c347dfc94adc02205ec27ed956346190d66579eb9ea"}, + {file = "shapely-2.1.2-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ef4a456cc8b7b3d50ccec29642aa4aeda959e9da2fe9540a92754770d5f0cf1f"}, + {file = "shapely-2.1.2-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:e38a190442aacc67ff9f75ce60aec04893041f16f97d242209106d502486a142"}, + {file = "shapely-2.1.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:40d784101f5d06a1fd30b55fc11ea58a61be23f930d934d86f19a180909908a4"}, + {file = "shapely-2.1.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f6f6cd5819c50d9bcf921882784586aab34a4bd53e7553e175dece6db513a6f0"}, + {file = "shapely-2.1.2-cp310-cp310-win32.whl", hash = "sha256:fe9627c39c59e553c90f5bc3128252cb85dc3b3be8189710666d2f8bc3a5503e"}, + {file = "shapely-2.1.2-cp310-cp310-win_amd64.whl", hash = "sha256:1d0bfb4b8f661b3b4ec3565fa36c340bfb1cda82087199711f86a88647d26b2f"}, + {file = "shapely-2.1.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:91121757b0a36c9aac3427a651a7e6567110a4a67c97edf04f8d55d4765f6618"}, + {file = "shapely-2.1.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:16a9c722ba774cf50b5d4541242b4cce05aafd44a015290c82ba8a16931ff63d"}, + {file = "shapely-2.1.2-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cc4f7397459b12c0b196c9efe1f9d7e92463cbba142632b4cc6d8bbbbd3e2b09"}, + {file = "shapely-2.1.2-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:136ab87b17e733e22f0961504d05e77e7be8c9b5a8184f685b4a91a84efe3c26"}, + {file = "shapely-2.1.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:16c5d0fc45d3aa0a69074979f4f1928ca2734fb2e0dde8af9611e134e46774e7"}, + {file = "shapely-2.1.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6ddc759f72b5b2b0f54a7e7cde44acef680a55019eb52ac63a7af2cf17cb9cd2"}, + {file = "shapely-2.1.2-cp311-cp311-win32.whl", hash = "sha256:2fa78b49485391224755a856ed3b3bd91c8455f6121fee0db0e71cefb07d0ef6"}, + {file = "shapely-2.1.2-cp311-cp311-win_amd64.whl", hash = "sha256:c64d5c97b2f47e3cd9b712eaced3b061f2b71234b3fc263e0fcf7d889c6559dc"}, + {file = "shapely-2.1.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:fe2533caae6a91a543dec62e8360fe86ffcdc42a7c55f9dfd0128a977a896b94"}, + {file = "shapely-2.1.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ba4d1333cc0bc94381d6d4308d2e4e008e0bd128bdcff5573199742ee3634359"}, + {file = "shapely-2.1.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0bd308103340030feef6c111d3eb98d50dc13feea33affc8a6f9fa549e9458a3"}, + {file = "shapely-2.1.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:1e7d4d7ad262a48bb44277ca12c7c78cb1b0f56b32c10734ec9a1d30c0b0c54b"}, + {file = "shapely-2.1.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e9eddfe513096a71896441a7c37db72da0687b34752c4e193577a145c71736fc"}, + {file = "shapely-2.1.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:980c777c612514c0cf99bc8a9de6d286f5e186dcaf9091252fcd444e5638193d"}, + {file = "shapely-2.1.2-cp312-cp312-win32.whl", hash = "sha256:9111274b88e4d7b54a95218e243282709b330ef52b7b86bc6aaf4f805306f454"}, + {file = "shapely-2.1.2-cp312-cp312-win_amd64.whl", hash = "sha256:743044b4cfb34f9a67205cee9279feaf60ba7d02e69febc2afc609047cb49179"}, + {file = "shapely-2.1.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b510dda1a3672d6879beb319bc7c5fd302c6c354584690973c838f46ec3e0fa8"}, + {file = "shapely-2.1.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:8cff473e81017594d20ec55d86b54bc635544897e13a7cfc12e36909c5309a2a"}, + {file = "shapely-2.1.2-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:fe7b77dc63d707c09726b7908f575fc04ff1d1ad0f3fb92aec212396bc6cfe5e"}, + {file = "shapely-2.1.2-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:7ed1a5bbfb386ee8332713bf7508bc24e32d24b74fc9a7b9f8529a55db9f4ee6"}, + {file = "shapely-2.1.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a84e0582858d841d54355246ddfcbd1fce3179f185da7470f41ce39d001ee1af"}, + {file = "shapely-2.1.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:dc3487447a43d42adcdf52d7ac73804f2312cbfa5d433a7d2c506dcab0033dfd"}, + {file = "shapely-2.1.2-cp313-cp313-win32.whl", hash = "sha256:9c3a3c648aedc9f99c09263b39f2d8252f199cb3ac154fadc173283d7d111350"}, + {file = "shapely-2.1.2-cp313-cp313-win_amd64.whl", hash = "sha256:ca2591bff6645c216695bdf1614fca9c82ea1144d4a7591a466fef64f28f0715"}, + {file = "shapely-2.1.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:2d93d23bdd2ed9dc157b46bc2f19b7da143ca8714464249bef6771c679d5ff40"}, + {file = "shapely-2.1.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:01d0d304b25634d60bd7cf291828119ab55a3bab87dc4af1e44b07fb225f188b"}, + {file = "shapely-2.1.2-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8d8382dd120d64b03698b7298b89611a6ea6f55ada9d39942838b79c9bc89801"}, + {file = "shapely-2.1.2-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:19efa3611eef966e776183e338b2d7ea43569ae99ab34f8d17c2c054d3205cc0"}, + {file = "shapely-2.1.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:346ec0c1a0fcd32f57f00e4134d1200e14bf3f5ae12af87ba83ca275c502498c"}, + {file = "shapely-2.1.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:6305993a35989391bd3476ee538a5c9a845861462327efe00dd11a5c8c709a99"}, + {file = "shapely-2.1.2-cp313-cp313t-win32.whl", hash = "sha256:c8876673449f3401f278c86eb33224c5764582f72b653a415d0e6672fde887bf"}, + {file = "shapely-2.1.2-cp313-cp313t-win_amd64.whl", hash = "sha256:4a44bc62a10d84c11a7a3d7c1c4fe857f7477c3506e24c9062da0db0ae0c449c"}, + {file = "shapely-2.1.2-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:9a522f460d28e2bf4e12396240a5fc1518788b2fcd73535166d748399ef0c223"}, + {file = "shapely-2.1.2-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:1ff629e00818033b8d71139565527ced7d776c269a49bd78c9df84e8f852190c"}, + {file = "shapely-2.1.2-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:f67b34271dedc3c653eba4e3d7111aa421d5be9b4c4c7d38d30907f796cb30df"}, + {file = "shapely-2.1.2-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:21952dc00df38a2c28375659b07a3979d22641aeb104751e769c3ee825aadecf"}, + {file = "shapely-2.1.2-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:1f2f33f486777456586948e333a56ae21f35ae273be99255a191f5c1fa302eb4"}, + {file = "shapely-2.1.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:cf831a13e0d5a7eb519e96f58ec26e049b1fad411fc6fc23b162a7ce04d9cffc"}, + {file = "shapely-2.1.2-cp314-cp314-win32.whl", hash = "sha256:61edcd8d0d17dd99075d320a1dd39c0cb9616f7572f10ef91b4b5b00c4aeb566"}, + {file = "shapely-2.1.2-cp314-cp314-win_amd64.whl", hash = "sha256:a444e7afccdb0999e203b976adb37ea633725333e5b119ad40b1ca291ecf311c"}, + {file = "shapely-2.1.2-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:5ebe3f84c6112ad3d4632b1fd2290665aa75d4cef5f6c5d77c4c95b324527c6a"}, + {file = "shapely-2.1.2-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:5860eb9f00a1d49ebb14e881f5caf6c2cf472c7fd38bd7f253bbd34f934eb076"}, + {file = "shapely-2.1.2-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b705c99c76695702656327b819c9660768ec33f5ce01fa32b2af62b56ba400a1"}, + {file = "shapely-2.1.2-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:a1fd0ea855b2cf7c9cddaf25543e914dd75af9de08785f20ca3085f2c9ca60b0"}, + {file = "shapely-2.1.2-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:df90e2db118c3671a0754f38e36802db75fe0920d211a27481daf50a711fdf26"}, + {file = "shapely-2.1.2-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:361b6d45030b4ac64ddd0a26046906c8202eb60d0f9f53085f5179f1d23021a0"}, + {file = "shapely-2.1.2-cp314-cp314t-win32.whl", hash = "sha256:b54df60f1fbdecc8ebc2c5b11870461a6417b3d617f555e5033f1505d36e5735"}, + {file = "shapely-2.1.2-cp314-cp314t-win_amd64.whl", hash = "sha256:0036ac886e0923417932c2e6369b6c52e38e0ff5d9120b90eef5cd9a5fc5cae9"}, + {file = "shapely-2.1.2.tar.gz", hash = "sha256:2ed4ecb28320a433db18a5bf029986aa8afcfd740745e78847e330d5d94922a9"}, +] + +[package.dependencies] +numpy = ">=1.21" + +[package.extras] +docs = ["matplotlib", "numpydoc (==1.1.*)", "sphinx", "sphinx-book-theme", "sphinx-remove-toctrees"] +test = ["pytest", "pytest-cov", "scipy-doctest"] + [[package]] name = "six" version = "1.17.0" @@ -2054,6 +2437,18 @@ files = [ [package.dependencies] typing-extensions = ">=4.12.0" +[[package]] +name = "tzdata" +version = "2025.3" +description = "Provider of IANA time zone data" +optional = false +python-versions = ">=2" +groups = ["main"] +files = [ + {file = "tzdata-2025.3-py2.py3-none-any.whl", hash = "sha256:06a47e5700f3081aab02b2e513160914ff0694bce9947d6b76ebd6bf57cfc5d1"}, + {file = "tzdata-2025.3.tar.gz", hash = "sha256:de39c2ca5dc7b0344f2eba86f49d614019d29f060fc4ebc8a417896a620b56a7"}, +] + [[package]] name = "urllib3" version = "2.6.2" @@ -2074,14 +2469,14 @@ zstd = ["backports-zstd (>=1.0.0) ; python_version < \"3.14\""] [[package]] name = "uvicorn" -version = "0.38.0" +version = "0.40.0" description = "The lightning-fast ASGI server." optional = false -python-versions = ">=3.9" +python-versions = ">=3.10" groups = ["main"] files = [ - {file = "uvicorn-0.38.0-py3-none-any.whl", hash = "sha256:48c0afd214ceb59340075b4a052ea1ee91c16fbc2a9b1469cca0e54566977b02"}, - {file = "uvicorn-0.38.0.tar.gz", hash = "sha256:fd97093bdd120a2609fc0d3afe931d4d4ad688b6e75f0f929fde1bc36fe0e91d"}, + {file = "uvicorn-0.40.0-py3-none-any.whl", hash = "sha256:c6c8f55bc8bf13eb6fa9ff87ad62308bbbc33d0b67f84293151efe87e0d5f2ee"}, + {file = "uvicorn-0.40.0.tar.gz", hash = "sha256:839676675e87e73694518b5574fd0f24c9d97b46bea16df7b8c05ea1a51071ea"}, ] [package.dependencies] @@ -2115,4 +2510,4 @@ test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess [metadata] lock-version = "2.1" python-versions = "^3.11" -content-hash = "3f7fd812d4b7a4a4b0abde381dcaafcecef23514d717879c83510f2f2507d0ba" +content-hash = "aa48b3d2633da3dff397471f72c63e0bbce90edde9c7a9235101518e549770b6" From 45e5cbb87081f7ca4c830ba92bd0aaa63c574dd8 Mon Sep 17 00:00:00 2001 From: Mateo Date: Mon, 22 Dec 2025 17:54:09 +0100 Subject: [PATCH 12/55] update loc --- src/app/api/api_v1/endpoints/detections.py | 19 ++++++++++++++++--- 1 file changed, 16 insertions(+), 3 deletions(-) diff --git a/src/app/api/api_v1/endpoints/detections.py b/src/app/api/api_v1/endpoints/detections.py index 0c46b05c..2cb9d65c 100644 --- a/src/app/api/api_v1/endpoints/detections.py +++ b/src/app/api/api_v1/endpoints/detections.py @@ -43,7 +43,7 @@ DetectionSequence, DetectionUrl, ) -from app.schemas.alerts import AlertCreate +from app.schemas.alerts import AlertCreate, AlertUpdate from app.schemas.login import TokenPayload from app.schemas.sequences import SequenceUpdate from app.services.overlap import compute_overlap @@ -129,12 +129,25 @@ async def _attach_sequence_to_alert( for g in groups: g_tuple = tuple(g) + location = group_locations.get(g_tuple) + start_at = min(seq_by_id[int(sid)].started_at for sid in g_tuple if int(sid) in seq_by_id) existing_alert_ids = {aid for sid in g_tuple for aid in mapping.get(int(sid), set())} if existing_alert_ids: target_alert_id = min(existing_alert_ids) + # If we now have a location and the alert is missing it (or start_at can be improved), update it + if isinstance(location, tuple): + current_alert = await alerts.get(target_alert_id, strict=True) + new_start_at = min(start_at, current_alert.start_at) if current_alert.start_at else start_at + if ( + current_alert.lat is None + or current_alert.lon is None + or (current_alert.start_at is None or new_start_at < current_alert.start_at) + ): + await alerts.update( + target_alert_id, + AlertUpdate(lat=location[0], lon=location[1], start_at=new_start_at), + ) else: - location = group_locations.get(g_tuple) - start_at = min(seq_by_id[int(sid)].started_at for sid in g_tuple if int(sid) in seq_by_id) alert = await alerts.create( AlertCreate( organization_id=camera.organization_id, From f8523c44b25af6e9e6dbbb3955ec85b42a45f421 Mon Sep 17 00:00:00 2001 From: Mateo Date: Tue, 23 Dec 2025 10:26:54 +0100 Subject: [PATCH 13/55] error management --- src/app/services/overlap.py | 40 ++++++++++++++++++++++++++++++------- 1 file changed, 33 insertions(+), 7 deletions(-) diff --git a/src/app/services/overlap.py b/src/app/services/overlap.py index 54b30f94..0c20f5e8 100644 --- a/src/app/services/overlap.py +++ b/src/app/services/overlap.py @@ -7,6 +7,7 @@ from __future__ import annotations import itertools +import logging from collections import defaultdict from math import atan2, cos, radians, sin, sqrt from typing import Dict, List, Optional, Tuple @@ -21,6 +22,8 @@ from shapely.geometry.base import BaseGeometry from shapely.ops import transform as shapely_transform # type: ignore +logger = logging.getLogger(__name__) + def haversine_km(lat1: float, lon1: float, lat2: float, lon2: float) -> float: """ @@ -302,9 +305,13 @@ def compute_overlap( return df # Precompute cones in Web Mercator - projected_cones: Dict[int, Polygon] = { - int(row["id"]): get_projected_cone(row, r_km, r_min_km) for _, row in df_valid.iterrows() - } + projected_cones: Dict[int, Polygon] = {} + for _, row in df_valid.iterrows(): + sid = int(row["id"]) + try: + projected_cones[sid] = get_projected_cone(row, r_km, r_min_km) + except Exception as exc: # noqa: BLE001 + logger.warning("Failed to build cone for sequence %s: %s", sid, exc) # Phase 1, build overlap graph gated by time overlap ids = df_valid["id"].astype(int).tolist() @@ -334,15 +341,34 @@ def compute_overlap( # Per group localization, median of pair barycenters for robustness def group_smoke_location(seq_tuple: Tuple[int, ...]) -> Optional[Tuple[float, float]]: if len(seq_tuple) < 2: - return None + # Fallback: use cone center as a proxy + sid = seq_tuple[0] + poly = projected_cones.get(sid) + return get_centroid_latlon(poly) if poly is not None else None pts: List[Tuple[float, float]] = [] for i, j in itertools.combinations(seq_tuple, 2): - inter = projected_cones[i].intersection(projected_cones[j]) + gi = projected_cones.get(i) + gj = projected_cones.get(j) + if gi is None or gj is None: + continue + inter = gi.intersection(gj) if inter.is_empty or inter.area <= 0: continue pts.append(get_centroid_latlon(inter)) if not pts: - return None + # No intersections: use centroid of available cones as best-effort location + polys = [projected_cones.get(sid) for sid in seq_tuple] + polys = [p for p in polys if p is not None] + if not polys: + return None + try: + merged = polys[0] + for p in polys[1:]: + merged = merged.union(p) + return get_centroid_latlon(merged) + except Exception as exc: # noqa: BLE001 + logger.warning("Failed fallback centroid for group %s: %s", seq_tuple, exc) + return None lats, lons = zip(*pts) return float(np.median(lats)), float(np.median(lons)) @@ -362,4 +388,4 @@ def group_smoke_location(seq_tuple: Tuple[int, ...]) -> Optional[Tuple[float, fl df["event_groups"] = df["id"].astype(int).map(lambda sid: seq_to_groups.get(sid, [(sid,)])) df["event_smoke_locations"] = df["id"].astype(int).map(lambda sid: seq_to_smokes.get(sid, [])) - return df \ No newline at end of file + return df From 4fe12f84ccdbb05a908c3ba16a2421cb7570f24e Mon Sep 17 00:00:00 2001 From: Mateo Date: Tue, 23 Dec 2025 11:23:11 +0100 Subject: [PATCH 14/55] fix on seq case --- src/app/services/overlap.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/src/app/services/overlap.py b/src/app/services/overlap.py index 0c20f5e8..4ad5fbbf 100644 --- a/src/app/services/overlap.py +++ b/src/app/services/overlap.py @@ -341,10 +341,7 @@ def compute_overlap( # Per group localization, median of pair barycenters for robustness def group_smoke_location(seq_tuple: Tuple[int, ...]) -> Optional[Tuple[float, float]]: if len(seq_tuple) < 2: - # Fallback: use cone center as a proxy - sid = seq_tuple[0] - poly = projected_cones.get(sid) - return get_centroid_latlon(poly) if poly is not None else None + return None pts: List[Tuple[float, float]] = [] for i, j in itertools.combinations(seq_tuple, 2): gi = projected_cones.get(i) From 1bb2d2048435c55ec32157fedcea288d800774e4 Mon Sep 17 00:00:00 2001 From: Mateo Date: Tue, 23 Dec 2025 11:32:26 +0100 Subject: [PATCH 15/55] use started_at and last_seens_at --- src/app/api/api_v1/endpoints/alerts.py | 6 +++--- src/app/api/api_v1/endpoints/detections.py | 12 ++++++++---- src/app/models.py | 4 ++-- src/app/schemas/alerts.py | 7 ++++--- 4 files changed, 17 insertions(+), 12 deletions(-) diff --git a/src/app/api/api_v1/endpoints/alerts.py b/src/app/api/api_v1/endpoints/alerts.py index c9d23fa6..951a18da 100644 --- a/src/app/api/api_v1/endpoints/alerts.py +++ b/src/app/api/api_v1/endpoints/alerts.py @@ -86,7 +86,7 @@ async def fetch_latest_unlabeled_alerts( .where(Alert.organization_id == token_payload.organization_id) .where(Sequence.last_seen_at > datetime.utcnow() - timedelta(hours=24)) .where(Sequence.is_wildfire.is_(None)) # type: ignore[union-attr] - .order_by(Alert.start_at.desc().nullslast()) # type: ignore[attr-defined] + .order_by(Alert.started_at.desc()) # type: ignore[attr-defined] .limit(15) ) alerts_res = await session.exec(alerts_stmt) @@ -106,8 +106,8 @@ async def fetch_alerts_from_date( alerts_stmt = ( select(Alert) .where(Alert.organization_id == token_payload.organization_id) - .where(func.date(Alert.start_at) == from_date) - .order_by(Alert.start_at.desc().nullslast()) # type: ignore[attr-defined] + .where(func.date(Alert.started_at) == from_date) + .order_by(Alert.started_at.desc()) # type: ignore[attr-defined] .limit(limit) .offset(offset) ) diff --git a/src/app/api/api_v1/endpoints/detections.py b/src/app/api/api_v1/endpoints/detections.py index 2cb9d65c..1a6a41d4 100644 --- a/src/app/api/api_v1/endpoints/detections.py +++ b/src/app/api/api_v1/endpoints/detections.py @@ -131,21 +131,24 @@ async def _attach_sequence_to_alert( g_tuple = tuple(g) location = group_locations.get(g_tuple) start_at = min(seq_by_id[int(sid)].started_at for sid in g_tuple if int(sid) in seq_by_id) + last_seen_at = max(seq_by_id[int(sid)].last_seen_at for sid in g_tuple if int(sid) in seq_by_id) existing_alert_ids = {aid for sid in g_tuple for aid in mapping.get(int(sid), set())} if existing_alert_ids: target_alert_id = min(existing_alert_ids) # If we now have a location and the alert is missing it (or start_at can be improved), update it if isinstance(location, tuple): current_alert = await alerts.get(target_alert_id, strict=True) - new_start_at = min(start_at, current_alert.start_at) if current_alert.start_at else start_at + new_start_at = min(start_at, current_alert.started_at) if current_alert.started_at else start_at + new_last_seen = max(last_seen_at, current_alert.last_seen_at) if current_alert.last_seen_at else last_seen_at if ( current_alert.lat is None or current_alert.lon is None - or (current_alert.start_at is None or new_start_at < current_alert.start_at) + or (current_alert.started_at is None or new_start_at < current_alert.started_at) + or (current_alert.last_seen_at is None or new_last_seen > current_alert.last_seen_at) ): await alerts.update( target_alert_id, - AlertUpdate(lat=location[0], lon=location[1], start_at=new_start_at), + AlertUpdate(lat=location[0], lon=location[1], started_at=new_start_at, last_seen_at=new_last_seen), ) else: alert = await alerts.create( @@ -153,7 +156,8 @@ async def _attach_sequence_to_alert( organization_id=camera.organization_id, lat=location[0] if isinstance(location, tuple) else None, lon=location[1] if isinstance(location, tuple) else None, - start_at=start_at, + started_at=start_at, + last_seen_at=last_seen_at, ) ) target_alert_id = alert.id diff --git a/src/app/models.py b/src/app/models.py index 5be83cd5..a6be132c 100644 --- a/src/app/models.py +++ b/src/app/models.py @@ -98,8 +98,8 @@ class Alert(SQLModel, table=True): organization_id: int = Field(..., foreign_key="organizations.id", nullable=False) lat: Union[float, None] = Field(default=None) lon: Union[float, None] = Field(default=None) - start_at: Union[datetime, None] = Field(default=None, nullable=True) - created_at: datetime = Field(default_factory=datetime.utcnow, nullable=False) + started_at: datetime = Field(..., nullable=False) + last_seen_at: datetime = Field(..., nullable=False) class AlertSequence(SQLModel, table=True): diff --git a/src/app/schemas/alerts.py b/src/app/schemas/alerts.py index bba0d196..6aa5da5c 100644 --- a/src/app/schemas/alerts.py +++ b/src/app/schemas/alerts.py @@ -15,16 +15,17 @@ class AlertCreate(BaseModel): organization_id: int = Field(..., gt=0) lat: Optional[float] = None lon: Optional[float] = None - start_at: Optional[datetime] = None + started_at: datetime + last_seen_at: datetime class AlertUpdate(BaseModel): organization_id: Optional[int] = Field(None, gt=0) lat: Optional[float] = None lon: Optional[float] = None - start_at: Optional[datetime] = None + started_at: Optional[datetime] = None + last_seen_at: Optional[datetime] = None class AlertRead(AlertCreate): id: int - created_at: datetime From 2527baa9094ed750509a7ced53f172ea69136758 Mon Sep 17 00:00:00 2001 From: Mateo Date: Tue, 23 Dec 2025 11:49:50 +0100 Subject: [PATCH 16/55] clean output --- src/app/api/api_v1/endpoints/alerts.py | 13 +++++++------ src/app/api/api_v1/endpoints/detections.py | 11 ++++++----- src/app/api/api_v1/endpoints/sequences.py | 8 ++++---- 3 files changed, 17 insertions(+), 15 deletions(-) diff --git a/src/app/api/api_v1/endpoints/alerts.py b/src/app/api/api_v1/endpoints/alerts.py index 951a18da..cfb8421e 100644 --- a/src/app/api/api_v1/endpoints/alerts.py +++ b/src/app/api/api_v1/endpoints/alerts.py @@ -14,6 +14,7 @@ from app.crud import AlertCRUD, SequenceCRUD from app.db import get_session from app.models import Alert, AlertSequence, Sequence, UserRole +from app.schemas.alerts import AlertRead from app.schemas.login import TokenPayload from app.services.telemetry import telemetry_client @@ -30,14 +31,14 @@ async def get_alert( alert_id: int = Path(..., gt=0), alerts: AlertCRUD = Depends(get_alert_crud), token_payload: TokenPayload = Security(get_jwt, scopes=[UserRole.ADMIN, UserRole.AGENT, UserRole.USER]), -) -> Alert: +) -> AlertRead: telemetry_client.capture(token_payload.sub, event="alerts-get", properties={"alert_id": alert_id}) alert = cast(Alert, await alerts.get(alert_id, strict=True)) if UserRole.ADMIN not in token_payload.scopes: await verify_org_rights(token_payload.organization_id, alert) - return alert + return AlertRead(**alert.model_dump()) @router.get( @@ -76,7 +77,7 @@ async def fetch_alert_sequences( async def fetch_latest_unlabeled_alerts( session: AsyncSession = Depends(get_session), token_payload: TokenPayload = Security(get_jwt, scopes=[UserRole.ADMIN, UserRole.AGENT, UserRole.USER]), -) -> List[Alert]: +) -> List[AlertRead]: telemetry_client.capture(token_payload.sub, event="alerts-fetch-latest") alerts_stmt = ( @@ -90,7 +91,7 @@ async def fetch_latest_unlabeled_alerts( .limit(15) ) alerts_res = await session.exec(alerts_stmt) - return alerts_res.unique().all() # unique to deduplicate joins + return [AlertRead(**a.model_dump()) for a in alerts_res.unique().all()] # unique to deduplicate joins @router.get("/all/fromdate", status_code=status.HTTP_200_OK, summary="Fetch all the alerts for a specific date") @@ -100,7 +101,7 @@ async def fetch_alerts_from_date( offset: Union[int, None] = Query(0, description="Number of alerts to skip before starting to fetch"), session: AsyncSession = Depends(get_session), token_payload: TokenPayload = Security(get_jwt, scopes=[UserRole.ADMIN, UserRole.AGENT, UserRole.USER]), -) -> List[Alert]: +) -> List[AlertRead]: telemetry_client.capture(token_payload.sub, event="alerts-fetch-from-date") alerts_stmt = ( @@ -112,7 +113,7 @@ async def fetch_alerts_from_date( .offset(offset) ) alerts_res = await session.exec(alerts_stmt) - return alerts_res.all() + return [AlertRead(**a.model_dump()) for a in alerts_res.all()] @router.delete("/{alert_id}", status_code=status.HTTP_200_OK, summary="Delete an alert") diff --git a/src/app/api/api_v1/endpoints/detections.py b/src/app/api/api_v1/endpoints/detections.py index 1a6a41d4..02395689 100644 --- a/src/app/api/api_v1/endpoints/detections.py +++ b/src/app/api/api_v1/endpoints/detections.py @@ -40,6 +40,7 @@ BOXES_PATTERN, COMPILED_BOXES_PATTERN, DetectionCreate, + DetectionRead, DetectionSequence, DetectionUrl, ) @@ -288,7 +289,7 @@ async def create_detection( slack_client.notify, org.slack_hook, det.model_dump_json(), url, camera.name ) - return det + return DetectionRead(**det.model_dump()) @router.get("/{detection_id}", status_code=status.HTTP_200_OK, summary="Fetch the information of a specific detection") @@ -307,7 +308,7 @@ async def get_detection( camera = cast(Camera, await cameras.get(detection.camera_id, strict=True)) if token_payload.organization_id != camera.organization_id: raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Access forbidden.") - return detection + return DetectionRead(**detection.model_dump()) @router.get("/{detection_id}/url", status_code=200) @@ -340,15 +341,15 @@ async def fetch_detections( detections: DetectionCRUD = Depends(get_detection_crud), cameras: CameraCRUD = Depends(get_camera_crud), token_payload: TokenPayload = Security(get_jwt, scopes=[UserRole.ADMIN, UserRole.AGENT, UserRole.USER]), -) -> List[Detection]: +) -> List[DetectionRead]: telemetry_client.capture(token_payload.sub, event="detections-fetch") if UserRole.ADMIN in token_payload.scopes: - return [elt for elt in await detections.fetch_all()] + return [DetectionRead(**elt.model_dump()) for elt in await detections.fetch_all()] cameras_list = await cameras.fetch_all(filters=("organization_id", token_payload.organization_id)) camera_ids = [camera.id for camera in cameras_list] - return await detections.fetch_all(in_pair=("camera_id", camera_ids), order_by="id") + return [DetectionRead(**elt.model_dump()) for elt in await detections.fetch_all(in_pair=("camera_id", camera_ids), order_by="id")] @router.delete("/{detection_id}", status_code=status.HTTP_200_OK, summary="Delete a detection") diff --git a/src/app/api/api_v1/endpoints/sequences.py b/src/app/api/api_v1/endpoints/sequences.py index e080ac8f..5a22a8e8 100644 --- a/src/app/api/api_v1/endpoints/sequences.py +++ b/src/app/api/api_v1/endpoints/sequences.py @@ -14,7 +14,7 @@ from app.crud import CameraCRUD, DetectionCRUD, SequenceCRUD from app.db import get_session from app.models import Camera, Detection, Sequence, UserRole -from app.schemas.detections import DetectionSequence, DetectionWithUrl +from app.schemas.detections import DetectionRead, DetectionSequence, DetectionWithUrl from app.schemas.login import TokenPayload from app.schemas.sequences import SequenceLabel from app.services.storage import s3_service @@ -69,7 +69,7 @@ async def fetch_sequence_detections( bucket = s3_service.get_bucket(s3_service.resolve_bucket_name(camera.organization_id)) return [ DetectionWithUrl( - **elt.__dict__, + **DetectionRead(**elt.model_dump()).model_dump(), url=bucket.get_public_url(elt.bucket_key), ) for elt in await detections.fetch_all( @@ -103,7 +103,7 @@ async def fetch_latest_unlabeled_sequences( .limit(15) ) ).all() - return fetched_sequences + return [Sequence(**elt.model_dump()) for elt in fetched_sequences] @router.get("/all/fromdate", status_code=status.HTTP_200_OK, summary="Fetch all the sequences for a specific date") @@ -128,7 +128,7 @@ async def fetch_sequences_from_date( .offset(offset) ) ).all() - return fetched_sequences + return [Sequence(**elt.model_dump()) for elt in fetched_sequences] @router.delete("/{sequence_id}", status_code=status.HTTP_200_OK, summary="Delete a sequence") From 31a39609143f1919f45d4230f5a44da4e9ecff60 Mon Sep 17 00:00:00 2001 From: Mateo Date: Tue, 23 Dec 2025 11:56:06 +0100 Subject: [PATCH 17/55] missing READ --- src/app/api/api_v1/endpoints/sequences.py | 12 ++++++------ src/app/schemas/detections.py | 6 +++++- src/app/schemas/sequences.py | 6 +++++- 3 files changed, 16 insertions(+), 8 deletions(-) diff --git a/src/app/api/api_v1/endpoints/sequences.py b/src/app/api/api_v1/endpoints/sequences.py index 5a22a8e8..28084cf0 100644 --- a/src/app/api/api_v1/endpoints/sequences.py +++ b/src/app/api/api_v1/endpoints/sequences.py @@ -16,7 +16,7 @@ from app.models import Camera, Detection, Sequence, UserRole from app.schemas.detections import DetectionRead, DetectionSequence, DetectionWithUrl from app.schemas.login import TokenPayload -from app.schemas.sequences import SequenceLabel +from app.schemas.sequences import SequenceLabel, SequenceRead from app.services.storage import s3_service from app.services.telemetry import telemetry_client @@ -44,7 +44,7 @@ async def get_sequence( if UserRole.ADMIN not in token_payload.scopes: await verify_org_rights(token_payload.organization_id, sequence.camera_id, cameras) - return sequence + return SequenceRead(**sequence.model_dump()) @router.get( @@ -89,7 +89,7 @@ async def fetch_sequence_detections( async def fetch_latest_unlabeled_sequences( session: AsyncSession = Depends(get_session), token_payload: TokenPayload = Security(get_jwt, scopes=[UserRole.ADMIN, UserRole.AGENT, UserRole.USER]), -) -> List[Sequence]: +) -> List[SequenceRead]: telemetry_client.capture(token_payload.sub, event="sequence-fetch-latest") camera_ids = await session.exec(select(Camera.id).where(Camera.organization_id == token_payload.organization_id)) @@ -103,7 +103,7 @@ async def fetch_latest_unlabeled_sequences( .limit(15) ) ).all() - return [Sequence(**elt.model_dump()) for elt in fetched_sequences] + return [SequenceRead(**elt.model_dump()) for elt in fetched_sequences] @router.get("/all/fromdate", status_code=status.HTTP_200_OK, summary="Fetch all the sequences for a specific date") @@ -113,7 +113,7 @@ async def fetch_sequences_from_date( offset: Union[int, None] = Query(0, description="Number of sequences to skip before starting to fetch"), session: AsyncSession = Depends(get_session), token_payload: TokenPayload = Security(get_jwt, scopes=[UserRole.ADMIN, UserRole.AGENT, UserRole.USER]), -) -> List[Sequence]: +) -> List[SequenceRead]: telemetry_client.capture(token_payload.sub, event="sequence-fetch-from-date") # Limit to cameras in the same organization camera_ids = await session.exec(select(Camera.id).where(Camera.organization_id == token_payload.organization_id)) @@ -128,7 +128,7 @@ async def fetch_sequences_from_date( .offset(offset) ) ).all() - return [Sequence(**elt.model_dump()) for elt in fetched_sequences] + return [SequenceRead(**elt.model_dump()) for elt in fetched_sequences] @router.delete("/{sequence_id}", status_code=status.HTTP_200_OK, summary="Delete a sequence") diff --git a/src/app/schemas/detections.py b/src/app/schemas/detections.py index ec335033..f31c9fa0 100644 --- a/src/app/schemas/detections.py +++ b/src/app/schemas/detections.py @@ -11,7 +11,7 @@ from app.core.config import settings from app.models import AnnotationType, Detection -__all__ = ["Azimuth", "DetectionCreate", "DetectionLabel", "DetectionUrl"] +__all__ = ["Azimuth", "DetectionCreate", "DetectionLabel", "DetectionUrl", "DetectionRead", "DetectionWithUrl"] class DetectionLabel(BaseModel): @@ -52,6 +52,10 @@ class DetectionUrl(BaseModel): url: str = Field(..., description="temporary URL to access the media content") +class DetectionRead(Detection): + pass + + class DetectionWithUrl(Detection): url: str = Field(..., description="temporary URL to access the media content") diff --git a/src/app/schemas/sequences.py b/src/app/schemas/sequences.py index 30a89692..ae77014c 100644 --- a/src/app/schemas/sequences.py +++ b/src/app/schemas/sequences.py @@ -9,7 +9,7 @@ from app.models import AnnotationType, Sequence -__all__ = ["SequenceLabel", "SequenceUpdate"] +__all__ = ["SequenceLabel", "SequenceRead", "SequenceUpdate"] # Accesses @@ -19,3 +19,7 @@ class SequenceUpdate(BaseModel): class SequenceLabel(BaseModel): is_wildfire: AnnotationType + + +class SequenceRead(Sequence): + pass From e5ab687bd84ab22147c651e431e84013355626bc Mon Sep 17 00:00:00 2001 From: Mateo Date: Tue, 23 Dec 2025 12:08:31 +0100 Subject: [PATCH 18/55] add test --- src/tests/endpoints/test_alerts.py | 117 +++++++++++++++++++++++++++++ 1 file changed, 117 insertions(+) create mode 100644 src/tests/endpoints/test_alerts.py diff --git a/src/tests/endpoints/test_alerts.py b/src/tests/endpoints/test_alerts.py new file mode 100644 index 00000000..dae8fdbf --- /dev/null +++ b/src/tests/endpoints/test_alerts.py @@ -0,0 +1,117 @@ +# Copyright (C) 2025, Pyronear. +# +# This program is licensed under the Apache License 2.0. +# See LICENSE or go to for full license details. + +from datetime import datetime, timedelta +from typing import List, Tuple + +import pytest +from httpx import AsyncClient +from sqlmodel.ext.asyncio.session import AsyncSession + +from app.models import Alert, AlertSequence, Sequence + + +async def _create_alert_with_sequences( + session: AsyncSession, org_id: int, camera_id: int, lat: float, lon: float +) -> Tuple[Alert, List[int]]: + now = datetime.utcnow() + seq_payloads = [ + dict(camera_id=camera_id, pose_id=None, azimuth=180.0, is_wildfire=None, cone_azimuth=163.4, cone_angle=1.0), + dict(camera_id=camera_id, pose_id=None, azimuth=25.0, is_wildfire=None, cone_azimuth=8.3, cone_angle=0.8), + dict(camera_id=camera_id, pose_id=None, azimuth=276.0, is_wildfire=None, cone_azimuth=276.5, cone_angle=3.0), + ] + sequences: List[Sequence] = [] + for idx, payload in enumerate(seq_payloads): + seq = Sequence( + **payload, + started_at=now - timedelta(seconds=10 * (idx + 1)), + last_seen_at=now - timedelta(seconds=idx), + ) + session.add(seq) + sequences.append(seq) + await session.commit() + for seq in sequences: + await session.refresh(seq) + + alert = Alert( + organization_id=org_id, + lat=lat, + lon=lon, + started_at=min(seq.started_at for seq in sequences), + last_seen_at=max(seq.last_seen_at for seq in sequences), + ) + session.add(alert) + await session.commit() + await session.refresh(alert) + + for seq in sequences: + session.add(AlertSequence(alert_id=alert.id, sequence_id=seq.id)) + await session.commit() + return alert, [seq.id for seq in sequences] + + +@pytest.mark.asyncio +async def test_get_alert_and_sequences(async_client: AsyncClient, detection_session: AsyncSession): + alert, seq_ids = await _create_alert_with_sequences( + detection_session, org_id=1, camera_id=1, lat=48.3856355, lon=2.7323256 + ) + + auth = pytest.get_token( + pytest.user_table[0]["id"], pytest.user_table[0]["role"].split(), pytest.user_table[0]["organization_id"] + ) + + resp = await async_client.get(f"/alerts/{alert.id}", headers=auth) + assert resp.status_code == 200, resp.text + assert resp.json()["id"] == alert.id + assert resp.json()["lat"] == pytest.approx(alert.lat) + assert resp.json()["lon"] == pytest.approx(alert.lon) + assert resp.json()["started_at"] == alert.started_at.isoformat() + assert resp.json()["last_seen_at"] == alert.last_seen_at.isoformat() + + resp = await async_client.get(f"/alerts/{alert.id}/sequences?limit=5&desc=true", headers=auth) + assert resp.status_code == 200, resp.text + returned = resp.json() + last_seen_times = [item["last_seen_at"] for item in returned] + assert last_seen_times == sorted(last_seen_times, reverse=True) + + +@pytest.mark.asyncio +async def test_alerts_unlabeled_latest(async_client: AsyncClient, detection_session: AsyncSession): + alert, _ = await _create_alert_with_sequences( + detection_session, org_id=1, camera_id=1, lat=48.3856355, lon=2.7323256 + ) + + auth = pytest.get_token( + pytest.user_table[0]["id"], pytest.user_table[0]["role"].split(), pytest.user_table[0]["organization_id"] + ) + resp = await async_client.get("/alerts/unlabeled/latest", headers=auth) + assert resp.status_code == 200, resp.text + payload = resp.json() + assert any(item["id"] == alert.id for item in payload) + returned = next(item for item in payload if item["id"] == alert.id) + assert returned["lat"] == pytest.approx(alert.lat) + assert returned["lon"] == pytest.approx(alert.lon) + assert returned["started_at"] == alert.started_at.isoformat() + assert returned["last_seen_at"] == alert.last_seen_at.isoformat() + + +@pytest.mark.asyncio +async def test_alerts_from_date(async_client: AsyncClient, detection_session: AsyncSession): + alert, _ = await _create_alert_with_sequences( + detection_session, org_id=1, camera_id=1, lat=48.3856355, lon=2.7323256 + ) + date_str = alert.started_at.date().isoformat() + + auth = pytest.get_token( + pytest.user_table[0]["id"], pytest.user_table[0]["role"].split(), pytest.user_table[0]["organization_id"] + ) + resp = await async_client.get(f"/alerts/all/fromdate?from_date={date_str}", headers=auth) + assert resp.status_code == 200, resp.text + assert any(item["id"] == alert.id for item in resp.json()) + + # Ensure order is by started_at desc + returned = resp.json() + started_times = [item["started_at"] for item in returned] + assert started_times == sorted(started_times, reverse=True) From 92510a87a9dee3c89e7c0062901d65cdd6f9df35 Mon Sep 17 00:00:00 2001 From: Mateo Date: Tue, 23 Dec 2025 12:24:30 +0100 Subject: [PATCH 19/55] fix style --- src/app/api/api_v1/endpoints/alerts.py | 13 +++-- src/app/api/api_v1/endpoints/detections.py | 55 ++++++++++++---------- src/app/api/api_v1/router.py | 12 ++++- src/app/schemas/detections.py | 2 +- src/app/services/overlap.py | 2 +- src/tests/endpoints/test_alerts.py | 29 ++++++++++-- 6 files changed, 74 insertions(+), 39 deletions(-) diff --git a/src/app/api/api_v1/endpoints/alerts.py b/src/app/api/api_v1/endpoints/alerts.py index cfb8421e..dc3829df 100644 --- a/src/app/api/api_v1/endpoints/alerts.py +++ b/src/app/api/api_v1/endpoints/alerts.py @@ -10,8 +10,8 @@ from sqlmodel import delete, func, select from sqlmodel.ext.asyncio.session import AsyncSession -from app.api.dependencies import get_alert_crud, get_jwt, get_sequence_crud -from app.crud import AlertCRUD, SequenceCRUD +from app.api.dependencies import get_alert_crud, get_jwt +from app.crud import AlertCRUD from app.db import get_session from app.models import Alert, AlertSequence, Sequence, UserRole from app.schemas.alerts import AlertRead @@ -21,7 +21,7 @@ router = APIRouter() -async def verify_org_rights(organization_id: int, alert: Alert) -> None: +def verify_org_rights(organization_id: int, alert: Alert) -> None: if organization_id != alert.organization_id: raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Access forbidden.") @@ -36,7 +36,7 @@ async def get_alert( alert = cast(Alert, await alerts.get(alert_id, strict=True)) if UserRole.ADMIN not in token_payload.scopes: - await verify_org_rights(token_payload.organization_id, alert) + verify_org_rights(token_payload.organization_id, alert) return AlertRead(**alert.model_dump()) @@ -49,14 +49,13 @@ async def fetch_alert_sequences( limit: int = Query(10, description="Maximum number of sequences to fetch", ge=1, le=100), desc: bool = Query(True, description="Whether to order the sequences by last_seen_at in descending order"), alerts: AlertCRUD = Depends(get_alert_crud), - sequences: SequenceCRUD = Depends(get_sequence_crud), session: AsyncSession = Depends(get_session), token_payload: TokenPayload = Security(get_jwt, scopes=[UserRole.ADMIN, UserRole.AGENT, UserRole.USER]), ) -> List[Sequence]: telemetry_client.capture(token_payload.sub, event="alerts-sequences-get", properties={"alert_id": alert_id}) alert = cast(Alert, await alerts.get(alert_id, strict=True)) if UserRole.ADMIN not in token_payload.scopes: - await verify_org_rights(token_payload.organization_id, alert) + verify_org_rights(token_payload.organization_id, alert) stmt = ( select(Sequence) @@ -127,7 +126,7 @@ async def delete_alert( # Ensure alert exists and org is valid alert = cast(Alert, await alerts.get(alert_id, strict=True)) - await verify_org_rights(token_payload.organization_id, alert) + verify_org_rights(token_payload.organization_id, alert) # Delete associations await session.exec(delete(AlertSequence).where(AlertSequence.alert_id == alert_id)) diff --git a/src/app/api/api_v1/endpoints/detections.py b/src/app/api/api_v1/endpoints/detections.py index 02395689..f635c4a8 100644 --- a/src/app/api/api_v1/endpoints/detections.py +++ b/src/app/api/api_v1/endpoints/detections.py @@ -4,10 +4,10 @@ # See LICENSE or go to for full license details. -import itertools from datetime import datetime, timedelta from typing import List, Optional, cast +import pandas as pd from fastapi import ( APIRouter, BackgroundTasks, @@ -20,22 +20,22 @@ UploadFile, status, ) -import pandas as pd from sqlmodel import select from app.api.dependencies import ( dispatch_webhook, + get_alert_crud, get_camera_crud, get_detection_crud, get_jwt, get_organization_crud, - get_alert_crud, get_sequence_crud, get_webhook_crud, ) from app.core.config import settings from app.crud import AlertCRUD, CameraCRUD, DetectionCRUD, OrganizationCRUD, SequenceCRUD, WebhookCRUD from app.models import AlertSequence, Camera, Detection, Organization, Role, Sequence, UserRole +from app.schemas.alerts import AlertCreate, AlertUpdate from app.schemas.detections import ( BOXES_PATTERN, COMPILED_BOXES_PATTERN, @@ -44,12 +44,11 @@ DetectionSequence, DetectionUrl, ) -from app.schemas.alerts import AlertCreate, AlertUpdate from app.schemas.login import TokenPayload from app.schemas.sequences import SequenceUpdate +from app.services.cones import resolve_cone from app.services.overlap import compute_overlap from app.services.slack import slack_client -from app.services.cones import resolve_cone from app.services.storage import s3_service, upload_file from app.services.telegram import telegram_client from app.services.telemetry import telemetry_client @@ -74,7 +73,11 @@ async def _attach_sequence_to_alert( # Fetch recent sequences for the organization based on recency of last_seen_at recent_sequences = await sequences.fetch_all( in_pair=("camera_id", list(camera_by_id.keys())), - inequality_pair=("last_seen_at", ">", datetime.utcnow() - timedelta(seconds=settings.SEQUENCE_RELAXATION_SECONDS)), + inequality_pair=( + "last_seen_at", + ">", + datetime.utcnow() - timedelta(seconds=settings.SEQUENCE_RELAXATION_SECONDS), + ), ) # Ensure the newly created sequence is present @@ -87,18 +90,16 @@ async def _attach_sequence_to_alert( cam = camera_by_id.get(seq.camera_id) if cam is None or seq.cone_azimuth is None or seq.cone_angle is None: continue - records.append( - { - "id": int(seq.id), - "lat": float(cam.lat), - "lon": float(cam.lon), - "cone_azimuth": float(seq.cone_azimuth), - "cone_angle": float(seq.cone_angle), - "is_wildfire": seq.is_wildfire, - "started_at": seq.started_at, - "last_seen_at": seq.last_seen_at, - } - ) + records.append({ + "id": int(seq.id), + "lat": float(cam.lat), + "lon": float(cam.lon), + "cone_azimuth": float(seq.cone_azimuth), + "cone_angle": float(seq.cone_angle), + "is_wildfire": seq.is_wildfire, + "started_at": seq.started_at, + "last_seen_at": seq.last_seen_at, + }) if not records: return @@ -118,10 +119,7 @@ async def _attach_sequence_to_alert( session = sequences.session mapping: dict[int, set[int]] = {} if seq_ids: - stmt = ( - select(AlertSequence.alert_id, AlertSequence.sequence_id) - .where(AlertSequence.sequence_id.in_(seq_ids)) - ) + stmt = select(AlertSequence.alert_id, AlertSequence.sequence_id).where(AlertSequence.sequence_id.in_(seq_ids)) res = await session.exec(stmt) # type: ignore[arg-type] for aid, sid in res: mapping.setdefault(int(sid), set()).add(int(aid)) @@ -140,7 +138,9 @@ async def _attach_sequence_to_alert( if isinstance(location, tuple): current_alert = await alerts.get(target_alert_id, strict=True) new_start_at = min(start_at, current_alert.started_at) if current_alert.started_at else start_at - new_last_seen = max(last_seen_at, current_alert.last_seen_at) if current_alert.last_seen_at else last_seen_at + new_last_seen = ( + max(last_seen_at, current_alert.last_seen_at) if current_alert.last_seen_at else last_seen_at + ) if ( current_alert.lat is None or current_alert.lon is None @@ -149,7 +149,9 @@ async def _attach_sequence_to_alert( ): await alerts.update( target_alert_id, - AlertUpdate(lat=location[0], lon=location[1], started_at=new_start_at, last_seen_at=new_last_seen), + AlertUpdate( + lat=location[0], lon=location[1], started_at=new_start_at, last_seen_at=new_last_seen + ), ) else: alert = await alerts.create( @@ -349,7 +351,10 @@ async def fetch_detections( cameras_list = await cameras.fetch_all(filters=("organization_id", token_payload.organization_id)) camera_ids = [camera.id for camera in cameras_list] - return [DetectionRead(**elt.model_dump()) for elt in await detections.fetch_all(in_pair=("camera_id", camera_ids), order_by="id")] + return [ + DetectionRead(**elt.model_dump()) + for elt in await detections.fetch_all(in_pair=("camera_id", camera_ids), order_by="id") + ] @router.delete("/{detection_id}", status_code=status.HTTP_200_OK, summary="Delete a detection") diff --git a/src/app/api/api_v1/router.py b/src/app/api/api_v1/router.py index 979b009b..3510d26f 100644 --- a/src/app/api/api_v1/router.py +++ b/src/app/api/api_v1/router.py @@ -5,7 +5,17 @@ from fastapi import APIRouter -from app.api.api_v1.endpoints import alerts, cameras, detections, login, organizations, poses, sequences, users, webhooks +from app.api.api_v1.endpoints import ( + alerts, + cameras, + detections, + login, + organizations, + poses, + sequences, + users, + webhooks, +) api_router = APIRouter(redirect_slashes=True) api_router.include_router(login.router, prefix="/login", tags=["login"]) diff --git a/src/app/schemas/detections.py b/src/app/schemas/detections.py index f31c9fa0..508207ce 100644 --- a/src/app/schemas/detections.py +++ b/src/app/schemas/detections.py @@ -11,7 +11,7 @@ from app.core.config import settings from app.models import AnnotationType, Detection -__all__ = ["Azimuth", "DetectionCreate", "DetectionLabel", "DetectionUrl", "DetectionRead", "DetectionWithUrl"] +__all__ = ["Azimuth", "DetectionCreate", "DetectionLabel", "DetectionRead", "DetectionUrl", "DetectionWithUrl"] class DetectionLabel(BaseModel): diff --git a/src/app/services/overlap.py b/src/app/services/overlap.py index 4ad5fbbf..0e5e886e 100644 --- a/src/app/services/overlap.py +++ b/src/app/services/overlap.py @@ -366,7 +366,7 @@ def group_smoke_location(seq_tuple: Tuple[int, ...]) -> Optional[Tuple[float, fl except Exception as exc: # noqa: BLE001 logger.warning("Failed fallback centroid for group %s: %s", seq_tuple, exc) return None - lats, lons = zip(*pts) + lats, lons = zip(*pts, strict=False) return float(np.median(lats)), float(np.median(lons)) group_to_smoke: Dict[Tuple[int, ...], Optional[Tuple[float, float]]] = { diff --git a/src/tests/endpoints/test_alerts.py b/src/tests/endpoints/test_alerts.py index dae8fdbf..fb83f706 100644 --- a/src/tests/endpoints/test_alerts.py +++ b/src/tests/endpoints/test_alerts.py @@ -18,9 +18,30 @@ async def _create_alert_with_sequences( ) -> Tuple[Alert, List[int]]: now = datetime.utcnow() seq_payloads = [ - dict(camera_id=camera_id, pose_id=None, azimuth=180.0, is_wildfire=None, cone_azimuth=163.4, cone_angle=1.0), - dict(camera_id=camera_id, pose_id=None, azimuth=25.0, is_wildfire=None, cone_azimuth=8.3, cone_angle=0.8), - dict(camera_id=camera_id, pose_id=None, azimuth=276.0, is_wildfire=None, cone_azimuth=276.5, cone_angle=3.0), + { + "camera_id": camera_id, + "pose_id": None, + "azimuth": 180.0, + "is_wildfire": None, + "cone_azimuth": 163.4, + "cone_angle": 1.0, + }, + { + "camera_id": camera_id, + "pose_id": None, + "azimuth": 25.0, + "is_wildfire": None, + "cone_azimuth": 8.3, + "cone_angle": 0.8, + }, + { + "camera_id": camera_id, + "pose_id": None, + "azimuth": 276.0, + "is_wildfire": None, + "cone_azimuth": 276.5, + "cone_angle": 3.0, + }, ] sequences: List[Sequence] = [] for idx, payload in enumerate(seq_payloads): @@ -54,7 +75,7 @@ async def _create_alert_with_sequences( @pytest.mark.asyncio async def test_get_alert_and_sequences(async_client: AsyncClient, detection_session: AsyncSession): - alert, seq_ids = await _create_alert_with_sequences( + alert, _seq_ids = await _create_alert_with_sequences( detection_session, org_id=1, camera_id=1, lat=48.3856355, lon=2.7323256 ) From 0140d29d675fd721b5ade8491474a7584d45e561 Mon Sep 17 00:00:00 2001 From: Mateo Date: Tue, 23 Dec 2025 12:28:41 +0100 Subject: [PATCH 20/55] test overlap --- src/tests/services/test_overlap.py | 53 ++++++++++++++++++++++++++++++ 1 file changed, 53 insertions(+) create mode 100644 src/tests/services/test_overlap.py diff --git a/src/tests/services/test_overlap.py b/src/tests/services/test_overlap.py new file mode 100644 index 00000000..aa656053 --- /dev/null +++ b/src/tests/services/test_overlap.py @@ -0,0 +1,53 @@ +# Copyright (C) 2025, Pyronear. +# +# This program is licensed under the Apache License 2.0. +# See LICENSE or go to for full license details. + +from datetime import datetime, timedelta + +import pandas as pd + +from app.services.overlap import compute_overlap + + +def _make_sequence( + id_: int, + lat: float, + lon: float, + cone_azimuth: float, + cone_angle: float, + started_at: datetime, + last_seen_at: datetime, + is_wildfire=None, +): + return { + "id": id_, + "lat": lat, + "lon": lon, + "cone_azimuth": cone_azimuth, + "cone_angle": cone_angle, + "is_wildfire": is_wildfire, + "started_at": started_at, + "last_seen_at": last_seen_at, + } + + +def test_compute_overlap_groups_and_locations() -> None: + now = datetime.utcnow() + seqs = [ + _make_sequence(1, 48.3792, 2.8208, 276.5, 3.0, now - timedelta(seconds=9), now - timedelta(seconds=1)), + _make_sequence(2, 48.2605, 2.7064, 8.3, 0.8, now - timedelta(seconds=8), now - timedelta(seconds=2)), + _make_sequence(3, 48.4267, 2.7109, 163.4, 1.0, now - timedelta(seconds=7), now - timedelta(seconds=3)), + _make_sequence(4, 10.0, 10.0, 90.0, 1.0, now - timedelta(seconds=6), now - timedelta(seconds=4)), + ] + df = compute_overlap(pd.DataFrame.from_records(seqs)) + + row1 = df[df["id"] == 1].iloc[0] + row4 = df[df["id"] == 4].iloc[0] + + assert [(1, 2, 3)] == row1["event_groups"] + assert row1["event_smoke_locations"][0] is not None + + # Non-overlapping singleton keeps its own group and no location + assert [(4,)] == row4["event_groups"] + assert row4["event_smoke_locations"] == [None] From f1a4236621dcb1318af4ec8cb4029c2ab58b089c Mon Sep 17 00:00:00 2001 From: Mateo Date: Tue, 23 Dec 2025 12:32:51 +0100 Subject: [PATCH 21/55] mypy --- pyproject.toml | 10 ++++++++++ src/app/api/api_v1/endpoints/alerts.py | 5 +++-- src/app/api/api_v1/endpoints/detections.py | 8 +++++--- 3 files changed, 18 insertions(+), 5 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 1fef5be0..f68bc2dc 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -160,6 +160,16 @@ check_untyped_defs = true implicit_reexport = false explicit_package_bases = true plugins = ["pydantic.mypy"] +[[tool.mypy.overrides]] +module = [ + "pandas", + "pandas.*", + "numpy", + "pyproj", + "shapely.*", + "geopy.*", +] +ignore_missing_imports = true [[tool.mypy.overrides]] module = [ diff --git a/src/app/api/api_v1/endpoints/alerts.py b/src/app/api/api_v1/endpoints/alerts.py index dc3829df..9865a327 100644 --- a/src/app/api/api_v1/endpoints/alerts.py +++ b/src/app/api/api_v1/endpoints/alerts.py @@ -7,6 +7,7 @@ from typing import List, Union, cast from fastapi import APIRouter, Depends, HTTPException, Path, Query, Security, status +from sqlalchemy import asc, desc from sqlmodel import delete, func, select from sqlmodel.ext.asyncio.session import AsyncSession @@ -61,7 +62,7 @@ async def fetch_alert_sequences( select(Sequence) .join(AlertSequence, AlertSequence.sequence_id == Sequence.id) .where(AlertSequence.alert_id == alert_id) - .order_by(Sequence.last_seen_at.desc() if desc else Sequence.last_seen_at.asc()) # type: ignore[arg-type] + .order_by(desc(Sequence.last_seen_at) if desc else asc(Sequence.last_seen_at)) .limit(limit) ) res = await session.exec(stmt) @@ -129,7 +130,7 @@ async def delete_alert( verify_org_rights(token_payload.organization_id, alert) # Delete associations - await session.exec(delete(AlertSequence).where(AlertSequence.alert_id == alert_id)) + await session.exec(delete(AlertSequence).where(AlertSequence.alert_id == alert_id)) # type: ignore[arg-type] await session.commit() # Delete alert await alerts.delete(alert_id) diff --git a/src/app/api/api_v1/endpoints/detections.py b/src/app/api/api_v1/endpoints/detections.py index f635c4a8..6f97b4ce 100644 --- a/src/app/api/api_v1/endpoints/detections.py +++ b/src/app/api/api_v1/endpoints/detections.py @@ -119,8 +119,10 @@ async def _attach_sequence_to_alert( session = sequences.session mapping: dict[int, set[int]] = {} if seq_ids: - stmt = select(AlertSequence.alert_id, AlertSequence.sequence_id).where(AlertSequence.sequence_id.in_(seq_ids)) - res = await session.exec(stmt) # type: ignore[arg-type] + stmt = select(AlertSequence.alert_id, AlertSequence.sequence_id).where( + AlertSequence.sequence_id.in_(seq_ids) # type: ignore[attr-defined] + ) + res = await session.exec(stmt) for aid, sid in res: mapping.setdefault(int(sid), set()).add(int(aid)) @@ -136,7 +138,7 @@ async def _attach_sequence_to_alert( target_alert_id = min(existing_alert_ids) # If we now have a location and the alert is missing it (or start_at can be improved), update it if isinstance(location, tuple): - current_alert = await alerts.get(target_alert_id, strict=True) + current_alert = cast(Alert, await alerts.get(target_alert_id, strict=True)) new_start_at = min(start_at, current_alert.started_at) if current_alert.started_at else start_at new_last_seen = ( max(last_seen_at, current_alert.last_seen_at) if current_alert.last_seen_at else last_seen_at From cf8a8ed6bebdfbd41ab1a26b506f65b1ccd787dd Mon Sep 17 00:00:00 2001 From: Mateo Date: Tue, 23 Dec 2025 12:33:23 +0100 Subject: [PATCH 22/55] ruff on test overlap --- src/tests/services/test_overlap.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/tests/services/test_overlap.py b/src/tests/services/test_overlap.py index aa656053..1298ed10 100644 --- a/src/tests/services/test_overlap.py +++ b/src/tests/services/test_overlap.py @@ -45,9 +45,9 @@ def test_compute_overlap_groups_and_locations() -> None: row1 = df[df["id"] == 1].iloc[0] row4 = df[df["id"] == 4].iloc[0] - assert [(1, 2, 3)] == row1["event_groups"] + assert row1["event_groups"] == [(1, 2, 3)] assert row1["event_smoke_locations"][0] is not None # Non-overlapping singleton keeps its own group and no location - assert [(4,)] == row4["event_groups"] + assert row4["event_groups"] == [(4,)] assert row4["event_smoke_locations"] == [None] From 0bc7de160192d382bf222dc077ac166129642511 Mon Sep 17 00:00:00 2001 From: Mateo Date: Tue, 23 Dec 2025 12:40:10 +0100 Subject: [PATCH 23/55] import issue --- src/app/api/api_v1/endpoints/alerts.py | 14 +++++++------- src/app/api/api_v1/endpoints/detections.py | 6 +++--- src/app/services/overlap.py | 9 ++++----- 3 files changed, 14 insertions(+), 15 deletions(-) diff --git a/src/app/api/api_v1/endpoints/alerts.py b/src/app/api/api_v1/endpoints/alerts.py index 9865a327..3d7a608a 100644 --- a/src/app/api/api_v1/endpoints/alerts.py +++ b/src/app/api/api_v1/endpoints/alerts.py @@ -4,7 +4,7 @@ # See LICENSE or go to for full license details. from datetime import date, datetime, timedelta -from typing import List, Union, cast +from typing import Any, List, Union, cast from fastapi import APIRouter, Depends, HTTPException, Path, Query, Security, status from sqlalchemy import asc, desc @@ -48,7 +48,7 @@ async def get_alert( async def fetch_alert_sequences( alert_id: int = Path(..., gt=0), limit: int = Query(10, description="Maximum number of sequences to fetch", ge=1, le=100), - desc: bool = Query(True, description="Whether to order the sequences by last_seen_at in descending order"), + order_desc: bool = Query(True, description="Whether to order the sequences by last_seen_at in descending order"), alerts: AlertCRUD = Depends(get_alert_crud), session: AsyncSession = Depends(get_session), token_payload: TokenPayload = Security(get_jwt, scopes=[UserRole.ADMIN, UserRole.AGENT, UserRole.USER]), @@ -58,11 +58,11 @@ async def fetch_alert_sequences( if UserRole.ADMIN not in token_payload.scopes: verify_org_rights(token_payload.organization_id, alert) - stmt = ( + stmt: Any = ( select(Sequence) .join(AlertSequence, AlertSequence.sequence_id == Sequence.id) .where(AlertSequence.alert_id == alert_id) - .order_by(desc(Sequence.last_seen_at) if desc else asc(Sequence.last_seen_at)) + .order_by(desc(Sequence.last_seen_at) if order_desc else asc(Sequence.last_seen_at)) .limit(limit) ) res = await session.exec(stmt) @@ -80,7 +80,7 @@ async def fetch_latest_unlabeled_alerts( ) -> List[AlertRead]: telemetry_client.capture(token_payload.sub, event="alerts-fetch-latest") - alerts_stmt = ( + alerts_stmt: Any = ( select(Alert) .join(AlertSequence, AlertSequence.alert_id == Alert.id) .join(Sequence, Sequence.id == AlertSequence.sequence_id) @@ -104,7 +104,7 @@ async def fetch_alerts_from_date( ) -> List[AlertRead]: telemetry_client.capture(token_payload.sub, event="alerts-fetch-from-date") - alerts_stmt = ( + alerts_stmt: Any = ( select(Alert) .where(Alert.organization_id == token_payload.organization_id) .where(func.date(Alert.started_at) == from_date) @@ -130,7 +130,7 @@ async def delete_alert( verify_org_rights(token_payload.organization_id, alert) # Delete associations - await session.exec(delete(AlertSequence).where(AlertSequence.alert_id == alert_id)) # type: ignore[arg-type] + await session.exec(cast(Any, delete(AlertSequence).where(AlertSequence.alert_id == alert_id))) await session.commit() # Delete alert await alerts.delete(alert_id) diff --git a/src/app/api/api_v1/endpoints/detections.py b/src/app/api/api_v1/endpoints/detections.py index 6f97b4ce..c8e304d6 100644 --- a/src/app/api/api_v1/endpoints/detections.py +++ b/src/app/api/api_v1/endpoints/detections.py @@ -5,7 +5,7 @@ from datetime import datetime, timedelta -from typing import List, Optional, cast +from typing import Any, List, Optional, cast import pandas as pd from fastapi import ( @@ -34,7 +34,7 @@ ) from app.core.config import settings from app.crud import AlertCRUD, CameraCRUD, DetectionCRUD, OrganizationCRUD, SequenceCRUD, WebhookCRUD -from app.models import AlertSequence, Camera, Detection, Organization, Role, Sequence, UserRole +from app.models import Alert, AlertSequence, Camera, Detection, Organization, Role, Sequence, UserRole from app.schemas.alerts import AlertCreate, AlertUpdate from app.schemas.detections import ( BOXES_PATTERN, @@ -119,7 +119,7 @@ async def _attach_sequence_to_alert( session = sequences.session mapping: dict[int, set[int]] = {} if seq_ids: - stmt = select(AlertSequence.alert_id, AlertSequence.sequence_id).where( + stmt: Any = select(AlertSequence.alert_id, AlertSequence.sequence_id).where( AlertSequence.sequence_id.in_(seq_ids) # type: ignore[attr-defined] ) res = await session.exec(stmt) diff --git a/src/app/services/overlap.py b/src/app/services/overlap.py index 0e5e886e..d09137c0 100644 --- a/src/app/services/overlap.py +++ b/src/app/services/overlap.py @@ -18,9 +18,9 @@ import pyproj from geopy.distance import geodesic from pyproj import Transformer -from shapely.geometry import Polygon # type: ignore +from shapely.geometry import Polygon from shapely.geometry.base import BaseGeometry -from shapely.ops import transform as shapely_transform # type: ignore +from shapely.ops import transform as shapely_transform logger = logging.getLogger(__name__) @@ -354,12 +354,11 @@ def group_smoke_location(seq_tuple: Tuple[int, ...]) -> Optional[Tuple[float, fl pts.append(get_centroid_latlon(inter)) if not pts: # No intersections: use centroid of available cones as best-effort location - polys = [projected_cones.get(sid) for sid in seq_tuple] - polys = [p for p in polys if p is not None] + polys: List[BaseGeometry] = [p for p in (projected_cones.get(sid) for sid in seq_tuple) if p is not None] if not polys: return None try: - merged = polys[0] + merged: BaseGeometry = polys[0] for p in polys[1:]: merged = merged.union(p) return get_centroid_latlon(merged) From 6ab2d8d1b453b212407ed0b116fbbcb40acfd113 Mon Sep 17 00:00:00 2001 From: Mateo Date: Tue, 23 Dec 2025 13:15:30 +0100 Subject: [PATCH 24/55] fix style --- pyproject.toml | 4 ++++ src/app/api/api_v1/endpoints/alerts.py | 28 ++++++++++++-------------- src/app/crud/base.py | 8 ++++---- src/app/db.py | 17 ++++++++-------- src/app/main.py | 2 +- 5 files changed, 31 insertions(+), 28 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index f68bc2dc..8dbc7e61 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -168,6 +168,10 @@ module = [ "pyproj", "shapely.*", "geopy.*", + "passlib", + "passlib.*", + "requests", + "requests.*", ] ignore_missing_imports = true diff --git a/src/app/api/api_v1/endpoints/alerts.py b/src/app/api/api_v1/endpoints/alerts.py index 3d7a608a..476cc7a6 100644 --- a/src/app/api/api_v1/endpoints/alerts.py +++ b/src/app/api/api_v1/endpoints/alerts.py @@ -58,15 +58,13 @@ async def fetch_alert_sequences( if UserRole.ADMIN not in token_payload.scopes: verify_org_rights(token_payload.organization_id, alert) - stmt: Any = ( - select(Sequence) - .join(AlertSequence, AlertSequence.sequence_id == Sequence.id) - .where(AlertSequence.alert_id == alert_id) - .order_by(desc(Sequence.last_seen_at) if order_desc else asc(Sequence.last_seen_at)) - .limit(limit) - ) - res = await session.exec(stmt) - return res.all() + order_clause: Any = desc(cast(Any, Sequence.last_seen_at)) if order_desc else asc(cast(Any, Sequence.last_seen_at)) + + seq_stmt: Any = select(Sequence).join(AlertSequence, cast(Any, AlertSequence.sequence_id == Sequence.id)) + seq_stmt = seq_stmt.where(AlertSequence.alert_id == alert_id).order_by(order_clause).limit(limit) + + res = await session.exec(seq_stmt) + return list(res.all()) @router.get( @@ -80,11 +78,10 @@ async def fetch_latest_unlabeled_alerts( ) -> List[AlertRead]: telemetry_client.capture(token_payload.sub, event="alerts-fetch-latest") - alerts_stmt: Any = ( - select(Alert) - .join(AlertSequence, AlertSequence.alert_id == Alert.id) - .join(Sequence, Sequence.id == AlertSequence.sequence_id) - .where(Alert.organization_id == token_payload.organization_id) + alerts_stmt: Any = select(Alert).join(AlertSequence, cast(Any, AlertSequence.alert_id == Alert.id)) + alerts_stmt = alerts_stmt.join(Sequence, cast(Any, Sequence.id == AlertSequence.sequence_id)) + alerts_stmt = ( + alerts_stmt.where(Alert.organization_id == token_payload.organization_id) .where(Sequence.last_seen_at > datetime.utcnow() - timedelta(hours=24)) .where(Sequence.is_wildfire.is_(None)) # type: ignore[union-attr] .order_by(Alert.started_at.desc()) # type: ignore[attr-defined] @@ -130,7 +127,8 @@ async def delete_alert( verify_org_rights(token_payload.organization_id, alert) # Delete associations - await session.exec(cast(Any, delete(AlertSequence).where(AlertSequence.alert_id == alert_id))) + delete_stmt: Any = delete(AlertSequence).where(AlertSequence.alert_id == cast(Any, alert_id)) + await session.exec(delete_stmt) await session.commit() # Delete alert await alerts.delete(alert_id) diff --git a/src/app/crud/base.py b/src/app/crud/base.py index be0b3dd1..d035b1a8 100644 --- a/src/app/crud/base.py +++ b/src/app/crud/base.py @@ -48,7 +48,7 @@ async def get(self, entry_id: int, strict: bool = False) -> Union[ModelType, Non return entry async def get_by(self, field_name: str, val: Union[str, int], strict: bool = False) -> Union[ModelType, None]: - statement = select(self.model).where(getattr(self.model, field_name) == val) # type: ignore[var-annotated] + statement: Any = select(self.model).where(getattr(self.model, field_name) == val) results = await self.session.exec(statement=statement) entry = results.one_or_none() if strict and entry is None: @@ -68,7 +68,7 @@ async def fetch_all( limit: Optional[int] = None, offset: Optional[int] = None, ) -> List[ModelType]: - statement = select(self.model) # type: ignore[var-annotated] + statement: Any = select(self.model) if isinstance(filters, tuple): statement = statement.where(getattr(self.model, filters[0]) == filters[1]) elif isinstance(filters, list): @@ -126,6 +126,6 @@ async def delete(self, entry_id: int) -> None: await self.session.commit() async def get_in(self, list_: List[Any], field_name: str) -> List[ModelType]: - statement = select(self.model).where(getattr(self.model, field_name).in_(list_)) # type: ignore[var-annotated] + statement: Any = select(self.model).where(getattr(self.model, field_name).in_(list_)) results = await self.session.exec(statement) - return results.all() + return list(results.all()) diff --git a/src/app/db.py b/src/app/db.py index 8716548d..6a8ab781 100644 --- a/src/app/db.py +++ b/src/app/db.py @@ -5,9 +5,10 @@ import asyncio import logging +from typing import Any +from sqlalchemy.ext.asyncio import async_sessionmaker from sqlalchemy.ext.asyncio.engine import AsyncEngine -from sqlalchemy.orm import sessionmaker from sqlmodel import SQLModel, create_engine, select from sqlmodel.ext.asyncio.session import AsyncSession @@ -23,7 +24,7 @@ async def get_session() -> AsyncSession: # type: ignore[misc] - async_session = sessionmaker(bind=engine, class_=AsyncSession, expire_on_commit=False) + async_session = async_sessionmaker(bind=engine, class_=AsyncSession, expire_on_commit=False) async with async_session() as session: yield session @@ -36,9 +37,9 @@ async def init_db() -> None: logger.info("Initializing PostgreSQL database...") # Create the superadmin organization - statement = select(Organization).where(Organization.name == settings.SUPERADMIN_ORG) # type: ignore[var-annotated] - results = await session.execute(statement=statement) - organization = results.scalar_one_or_none() + org_stmt: Any = select(Organization).where(Organization.name == settings.SUPERADMIN_ORG) + org_results = await session.exec(statement=org_stmt) + organization = org_results.one_or_none() if not organization: new_orga = Organization(name=settings.SUPERADMIN_ORG) session.add(new_orga) @@ -51,9 +52,9 @@ async def init_db() -> None: s3_service.create_bucket(s3_service.resolve_bucket_name(organization_id)) # Check if admin exists - statement = select(User).where(User.login == settings.SUPERADMIN_LOGIN) - results = await session.exec(statement=statement) - user = results.one_or_none() + user_stmt: Any = select(User).where(User.login == settings.SUPERADMIN_LOGIN) + user_results = await session.exec(statement=user_stmt) + user = user_results.one_or_none() if not user: pwd = hash_password(settings.SUPERADMIN_PWD) session.add( diff --git a/src/app/main.py b/src/app/main.py index 9762ec1e..df8513c8 100644 --- a/src/app/main.py +++ b/src/app/main.py @@ -80,7 +80,7 @@ async def add_process_time_header(request: Request, call_next): ) if isinstance(settings.SENTRY_DSN, str): - # Sentry middleware is compatible at runtime; ignore type mismatch from Starlette signature + # Sentry middleware is compatible at runtime app.add_middleware(SentryAsgiMiddleware) # type: ignore[arg-type] From 83626e05bb529240e984b509b99286ebde115ca6 Mon Sep 17 00:00:00 2001 From: Mateo Date: Tue, 23 Dec 2025 15:48:11 +0100 Subject: [PATCH 25/55] fix deletions to respect fk --- src/app/api/api_v1/endpoints/organizations.py | 16 ++++++++++++++-- src/app/api/api_v1/endpoints/sequences.py | 7 +++++-- src/app/crud/base.py | 2 +- 3 files changed, 20 insertions(+), 5 deletions(-) diff --git a/src/app/api/api_v1/endpoints/organizations.py b/src/app/api/api_v1/endpoints/organizations.py index 23596d72..5004ee6d 100644 --- a/src/app/api/api_v1/endpoints/organizations.py +++ b/src/app/api/api_v1/endpoints/organizations.py @@ -4,13 +4,14 @@ # See LICENSE or go to for full license details. -from typing import List, cast +from typing import Any, List, cast from fastapi import APIRouter, Depends, HTTPException, Path, Security, status +from sqlmodel import delete, select from app.api.dependencies import get_jwt, get_organization_crud from app.crud import OrganizationCRUD -from app.models import Organization, UserRole +from app.models import Alert, AlertSequence, Organization, UserRole from app.schemas.login import TokenPayload from app.schemas.organizations import OrganizationCreate, SlackHook, TelegramChannelId from app.services.slack import slack_client @@ -71,6 +72,17 @@ async def delete_organization( telemetry_client.capture( token_payload.sub, event="organizations-deletion", properties={"organization_id": organization_id} ) + # Remove alerts and their associations for this organization to satisfy FK constraints + org_session = organizations.session + alert_ids_res = await org_session.exec(select(Alert.id).where(Alert.organization_id == organization_id)) + alert_ids = list(alert_ids_res.all()) + if alert_ids: + delete_links: Any = delete(AlertSequence).where(cast(Any, AlertSequence.alert_id).in_(alert_ids)) + delete_alerts: Any = delete(Alert).where(cast(Any, Alert.id).in_(alert_ids)) + await org_session.exec(delete_links) + await org_session.exec(delete_alerts) + await org_session.commit() + bucket_name = s3_service.resolve_bucket_name(organization_id) if not (await s3_service.delete_bucket(bucket_name)): raise HTTPException(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail="Failed to create bucket") diff --git a/src/app/api/api_v1/endpoints/sequences.py b/src/app/api/api_v1/endpoints/sequences.py index 28084cf0..001223e8 100644 --- a/src/app/api/api_v1/endpoints/sequences.py +++ b/src/app/api/api_v1/endpoints/sequences.py @@ -7,13 +7,13 @@ from typing import List, Union, cast from fastapi import APIRouter, Depends, HTTPException, Path, Query, Security, status -from sqlmodel import func, select +from sqlmodel import delete, func, select from sqlmodel.ext.asyncio.session import AsyncSession from app.api.dependencies import get_camera_crud, get_detection_crud, get_jwt, get_sequence_crud from app.crud import CameraCRUD, DetectionCRUD, SequenceCRUD from app.db import get_session -from app.models import Camera, Detection, Sequence, UserRole +from app.models import AlertSequence, Camera, Detection, Sequence, UserRole from app.schemas.detections import DetectionRead, DetectionSequence, DetectionWithUrl from app.schemas.login import TokenPayload from app.schemas.sequences import SequenceLabel, SequenceRead @@ -144,6 +144,9 @@ async def delete_sequence( det_ids = await session.exec(select(Detection.id).where(Detection.sequence_id == sequence_id)) for det_id in det_ids.all(): await detections.update(det_id, DetectionSequence(sequence_id=None)) + # Drop alert links for this sequence to avoid FK issues + await session.exec(delete(AlertSequence).where(AlertSequence.sequence_id == sequence_id)) + await session.commit() # Delete the sequence await sequences.delete(sequence_id) diff --git a/src/app/crud/base.py b/src/app/crud/base.py index d035b1a8..08b89947 100644 --- a/src/app/crud/base.py +++ b/src/app/crud/base.py @@ -120,7 +120,7 @@ async def update(self, entry_id: int, payload: UpdateSchemaType) -> ModelType: async def delete(self, entry_id: int) -> None: await self.get(entry_id, strict=True) - statement = delete(self.model).where(self.model.id == entry_id) + statement = delete(self.model).where(cast(Any, self.model).id == entry_id) await self.session.exec(statement=statement) # type: ignore[call-overload] await self.session.commit() From 19e576231d2d6ca59d24f397513ae693f0b73fd9 Mon Sep 17 00:00:00 2001 From: Mateo Date: Tue, 23 Dec 2025 15:50:43 +0100 Subject: [PATCH 26/55] cast --- src/app/api/api_v1/endpoints/sequences.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/app/api/api_v1/endpoints/sequences.py b/src/app/api/api_v1/endpoints/sequences.py index 001223e8..84c299d5 100644 --- a/src/app/api/api_v1/endpoints/sequences.py +++ b/src/app/api/api_v1/endpoints/sequences.py @@ -4,7 +4,7 @@ # See LICENSE or go to for full license details. from datetime import date, datetime, timedelta -from typing import List, Union, cast +from typing import Any, List, Union, cast from fastapi import APIRouter, Depends, HTTPException, Path, Query, Security, status from sqlmodel import delete, func, select @@ -145,7 +145,8 @@ async def delete_sequence( for det_id in det_ids.all(): await detections.update(det_id, DetectionSequence(sequence_id=None)) # Drop alert links for this sequence to avoid FK issues - await session.exec(delete(AlertSequence).where(AlertSequence.sequence_id == sequence_id)) + delete_stmt: Any = delete(AlertSequence).where(cast(Any, AlertSequence.sequence_id) == sequence_id) + await session.exec(delete_stmt) await session.commit() # Delete the sequence await sequences.delete(sequence_id) From 2bc5efb89db4d2de984ad3c901c401888e3e1b15 Mon Sep 17 00:00:00 2001 From: Mateo Date: Tue, 23 Dec 2025 16:03:05 +0100 Subject: [PATCH 27/55] recompyte alerts after seq annotation --- src/app/api/api_v1/endpoints/sequences.py | 48 +++++++++++++++++++++-- 1 file changed, 44 insertions(+), 4 deletions(-) diff --git a/src/app/api/api_v1/endpoints/sequences.py b/src/app/api/api_v1/endpoints/sequences.py index 84c299d5..3edf2808 100644 --- a/src/app/api/api_v1/endpoints/sequences.py +++ b/src/app/api/api_v1/endpoints/sequences.py @@ -10,10 +10,11 @@ from sqlmodel import delete, func, select from sqlmodel.ext.asyncio.session import AsyncSession -from app.api.dependencies import get_camera_crud, get_detection_crud, get_jwt, get_sequence_crud -from app.crud import CameraCRUD, DetectionCRUD, SequenceCRUD +from app.api.dependencies import get_alert_crud, get_camera_crud, get_detection_crud, get_jwt, get_sequence_crud +from app.crud import AlertCRUD, CameraCRUD, DetectionCRUD, SequenceCRUD from app.db import get_session -from app.models import AlertSequence, Camera, Detection, Sequence, UserRole +from app.models import AlertSequence, AnnotationType, Camera, Detection, Sequence, UserRole +from app.schemas.alerts import AlertCreate, AlertUpdate from app.schemas.detections import DetectionRead, DetectionSequence, DetectionWithUrl from app.schemas.login import TokenPayload from app.schemas.sequences import SequenceLabel, SequenceRead @@ -158,6 +159,8 @@ async def label_sequence( sequence_id: int = Path(..., gt=0), cameras: CameraCRUD = Depends(get_camera_crud), sequences: SequenceCRUD = Depends(get_sequence_crud), + alerts: AlertCRUD = Depends(get_alert_crud), + session: AsyncSession = Depends(get_session), token_payload: TokenPayload = Security(get_jwt, scopes=[UserRole.ADMIN, UserRole.AGENT]), ) -> Sequence: telemetry_client.capture(token_payload.sub, event="sequence-label", properties={"sequence_id": sequence_id}) @@ -166,4 +169,41 @@ async def label_sequence( if UserRole.ADMIN not in token_payload.scopes: await verify_org_rights(token_payload.organization_id, sequence.camera_id, cameras) - return await sequences.update(sequence_id, payload) + updated = await sequences.update(sequence_id, payload) + + # If sequence is labeled as non-wildfire, remove it from alerts and refresh those alerts + if payload.is_wildfire is not None and payload.is_wildfire != AnnotationType.WILDFIRE_SMOKE: + alert_ids_res = await session.exec(select(AlertSequence.alert_id).where(AlertSequence.sequence_id == sequence_id)) + alert_ids = list(alert_ids_res.all()) + if alert_ids: + delete_links: Any = delete(AlertSequence).where(cast(Any, AlertSequence.sequence_id) == sequence_id) + await session.exec(delete_links) + await session.commit() + for aid in alert_ids: + remaining_res = await session.exec( + select(Sequence) + .join(AlertSequence, cast(Any, AlertSequence.sequence_id) == Sequence.id) + .where(AlertSequence.alert_id == aid) + ) + remaining = remaining_res.all() + if not remaining: + await alerts.delete(aid) + continue + new_start = min(seq.started_at for seq in remaining) + new_last = max(seq.last_seen_at for seq in remaining) + await alerts.update(aid, AlertUpdate(started_at=new_start, last_seen_at=new_last, lat=None, lon=None)) + # Create a fresh alert for this sequence alone + camera = cast(Camera, await cameras.get(sequence.camera_id, strict=True)) + new_alert = await alerts.create( + AlertCreate( + organization_id=camera.organization_id, + started_at=sequence.started_at, + last_seen_at=sequence.last_seen_at, + lat=None, + lon=None, + ) + ) + session.add(AlertSequence(alert_id=new_alert.id, sequence_id=sequence_id)) + await session.commit() + + return updated From 56ad18bd81d633d228456c6e691aacb82a96be68 Mon Sep 17 00:00:00 2001 From: Mateo Date: Tue, 23 Dec 2025 17:13:21 +0100 Subject: [PATCH 28/55] fix alert update --- src/app/api/api_v1/endpoints/sequences.py | 68 +++++++++++++++++++---- 1 file changed, 56 insertions(+), 12 deletions(-) diff --git a/src/app/api/api_v1/endpoints/sequences.py b/src/app/api/api_v1/endpoints/sequences.py index 3edf2808..46407b1b 100644 --- a/src/app/api/api_v1/endpoints/sequences.py +++ b/src/app/api/api_v1/endpoints/sequences.py @@ -6,6 +6,7 @@ from datetime import date, datetime, timedelta from typing import Any, List, Union, cast +import pandas as pd from fastapi import APIRouter, Depends, HTTPException, Path, Query, Security, status from sqlmodel import delete, func, select from sqlmodel.ext.asyncio.session import AsyncSession @@ -19,6 +20,7 @@ from app.schemas.login import TokenPayload from app.schemas.sequences import SequenceLabel, SequenceRead from app.services.storage import s3_service +from app.services.overlap import compute_overlap from app.services.telemetry import telemetry_client router = APIRouter() @@ -32,6 +34,53 @@ async def verify_org_rights( raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Access forbidden.") +async def _refresh_alert_state(alert_id: int, session: AsyncSession, alerts: AlertCRUD) -> None: + remaining_res = await session.exec( + select(Sequence, Camera) + .join(AlertSequence, cast(Any, AlertSequence.sequence_id) == Sequence.id) + .join(Camera, cast(Any, Camera.id) == Sequence.camera_id) + .where(AlertSequence.alert_id == alert_id) + ) + rows = remaining_res.all() + if not rows: + await alerts.delete(alert_id) + return + + seqs = [row[0] for row in rows] + cams = [row[1] for row in rows] + new_start = min(seq.started_at for seq in seqs) + new_last = max(seq.last_seen_at for seq in seqs) + + loc: Union[None, tuple[float, float]] = None + if len(rows) >= 2: + records = [] + for seq, cam in zip(seqs, cams, strict=False): + records.append( + { + "id": seq.id, + "lat": cam.lat, + "lon": cam.lon, + "cone_azimuth": seq.cone_azimuth, + "cone_angle": seq.cone_angle, + "is_wildfire": seq.is_wildfire, + "started_at": seq.started_at, + "last_seen_at": seq.last_seen_at, + } + ) + df = compute_overlap(pd.DataFrame.from_records(records)) + loc = next((loc for locs in df["event_smoke_locations"].tolist() for loc in locs if loc is not None), None) + + await alerts.update( + alert_id, + AlertUpdate( + started_at=new_start, + last_seen_at=new_last, + lat=loc[0] if loc else None, + lon=loc[1] if loc else None, + ), + ) + + @router.get("/{sequence_id}", status_code=status.HTTP_200_OK, summary="Fetch the information of a specific sequence") async def get_sequence( sequence_id: int = Path(..., gt=0), @@ -137,10 +186,13 @@ async def delete_sequence( sequence_id: int = Path(..., gt=0), sequences: SequenceCRUD = Depends(get_sequence_crud), detections: DetectionCRUD = Depends(get_detection_crud), + alerts: AlertCRUD = Depends(get_alert_crud), session: AsyncSession = Depends(get_session), token_payload: TokenPayload = Security(get_jwt, scopes=[UserRole.ADMIN]), ) -> None: telemetry_client.capture(token_payload.sub, event="sequence-deletion", properties={"sequence_id": sequence_id}) + alert_ids_res = await session.exec(select(AlertSequence.alert_id).where(AlertSequence.sequence_id == sequence_id)) + alert_ids = list(alert_ids_res.all()) # Unset the sequence_id in the detections det_ids = await session.exec(select(Detection.id).where(Detection.sequence_id == sequence_id)) for det_id in det_ids.all(): @@ -151,6 +203,9 @@ async def delete_sequence( await session.commit() # Delete the sequence await sequences.delete(sequence_id) + # Refresh affected alerts + for aid in alert_ids: + await _refresh_alert_state(aid, session, alerts) @router.patch("/{sequence_id}/label", status_code=status.HTTP_200_OK, summary="Label the nature of the sequence") @@ -180,18 +235,7 @@ async def label_sequence( await session.exec(delete_links) await session.commit() for aid in alert_ids: - remaining_res = await session.exec( - select(Sequence) - .join(AlertSequence, cast(Any, AlertSequence.sequence_id) == Sequence.id) - .where(AlertSequence.alert_id == aid) - ) - remaining = remaining_res.all() - if not remaining: - await alerts.delete(aid) - continue - new_start = min(seq.started_at for seq in remaining) - new_last = max(seq.last_seen_at for seq in remaining) - await alerts.update(aid, AlertUpdate(started_at=new_start, last_seen_at=new_last, lat=None, lon=None)) + await _refresh_alert_state(aid, session, alerts) # Create a fresh alert for this sequence alone camera = cast(Camera, await cameras.get(sequence.camera_id, strict=True)) new_alert = await alerts.create( From 761fefdda17e474f2aa86b02002a511c3a6095f5 Mon Sep 17 00:00:00 2001 From: Mateo Date: Tue, 23 Dec 2025 17:21:15 +0100 Subject: [PATCH 29/55] style --- src/app/api/api_v1/endpoints/sequences.py | 33 ++++++++++++----------- 1 file changed, 17 insertions(+), 16 deletions(-) diff --git a/src/app/api/api_v1/endpoints/sequences.py b/src/app/api/api_v1/endpoints/sequences.py index 46407b1b..68910ed0 100644 --- a/src/app/api/api_v1/endpoints/sequences.py +++ b/src/app/api/api_v1/endpoints/sequences.py @@ -19,8 +19,8 @@ from app.schemas.detections import DetectionRead, DetectionSequence, DetectionWithUrl from app.schemas.login import TokenPayload from app.schemas.sequences import SequenceLabel, SequenceRead -from app.services.storage import s3_service from app.services.overlap import compute_overlap +from app.services.storage import s3_service from app.services.telemetry import telemetry_client router = APIRouter() @@ -35,12 +35,13 @@ async def verify_org_rights( async def _refresh_alert_state(alert_id: int, session: AsyncSession, alerts: AlertCRUD) -> None: - remaining_res = await session.exec( + remaining_stmt: Any = ( select(Sequence, Camera) .join(AlertSequence, cast(Any, AlertSequence.sequence_id) == Sequence.id) .join(Camera, cast(Any, Camera.id) == Sequence.camera_id) - .where(AlertSequence.alert_id == alert_id) ) + remaining_stmt = remaining_stmt.where(AlertSequence.alert_id == alert_id) + remaining_res = await session.exec(remaining_stmt) rows = remaining_res.all() if not rows: await alerts.delete(alert_id) @@ -55,18 +56,16 @@ async def _refresh_alert_state(alert_id: int, session: AsyncSession, alerts: Ale if len(rows) >= 2: records = [] for seq, cam in zip(seqs, cams, strict=False): - records.append( - { - "id": seq.id, - "lat": cam.lat, - "lon": cam.lon, - "cone_azimuth": seq.cone_azimuth, - "cone_angle": seq.cone_angle, - "is_wildfire": seq.is_wildfire, - "started_at": seq.started_at, - "last_seen_at": seq.last_seen_at, - } - ) + records.append({ + "id": seq.id, + "lat": cam.lat, + "lon": cam.lon, + "cone_azimuth": seq.cone_azimuth, + "cone_angle": seq.cone_angle, + "is_wildfire": seq.is_wildfire, + "started_at": seq.started_at, + "last_seen_at": seq.last_seen_at, + }) df = compute_overlap(pd.DataFrame.from_records(records)) loc = next((loc for locs in df["event_smoke_locations"].tolist() for loc in locs if loc is not None), None) @@ -228,7 +227,9 @@ async def label_sequence( # If sequence is labeled as non-wildfire, remove it from alerts and refresh those alerts if payload.is_wildfire is not None and payload.is_wildfire != AnnotationType.WILDFIRE_SMOKE: - alert_ids_res = await session.exec(select(AlertSequence.alert_id).where(AlertSequence.sequence_id == sequence_id)) + alert_ids_res = await session.exec( + select(AlertSequence.alert_id).where(AlertSequence.sequence_id == sequence_id) + ) alert_ids = list(alert_ids_res.all()) if alert_ids: delete_links: Any = delete(AlertSequence).where(cast(Any, AlertSequence.sequence_id) == sequence_id) From a306c0bf1641e3d98432099cf72b1d010bda779b Mon Sep 17 00:00:00 2001 From: Mateo Date: Tue, 23 Dec 2025 17:22:49 +0100 Subject: [PATCH 30/55] style --- src/app/api/api_v1/endpoints/sequences.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/app/api/api_v1/endpoints/sequences.py b/src/app/api/api_v1/endpoints/sequences.py index 68910ed0..076d4eed 100644 --- a/src/app/api/api_v1/endpoints/sequences.py +++ b/src/app/api/api_v1/endpoints/sequences.py @@ -52,7 +52,7 @@ async def _refresh_alert_state(alert_id: int, session: AsyncSession, alerts: Ale new_start = min(seq.started_at for seq in seqs) new_last = max(seq.last_seen_at for seq in seqs) - loc: Union[None, tuple[float, float]] = None + loc: Union[tuple[float, float], None] = None if len(rows) >= 2: records = [] for seq, cam in zip(seqs, cams, strict=False): From 2ab57fdf44d7310376a83156a498a542521fca4b Mon Sep 17 00:00:00 2001 From: Mateo Date: Tue, 23 Dec 2025 17:38:57 +0100 Subject: [PATCH 31/55] adapt test --- src/tests/endpoints/test_alerts.py | 142 ++++++++++++++++++++++++++++- 1 file changed, 139 insertions(+), 3 deletions(-) diff --git a/src/tests/endpoints/test_alerts.py b/src/tests/endpoints/test_alerts.py index fb83f706..fabb44f4 100644 --- a/src/tests/endpoints/test_alerts.py +++ b/src/tests/endpoints/test_alerts.py @@ -4,13 +4,16 @@ # See LICENSE or go to for full license details. from datetime import datetime, timedelta -from typing import List, Tuple +from typing import Any, List, Tuple, cast -import pytest +import pandas as pd +import pytest # type: ignore from httpx import AsyncClient +from sqlmodel import select from sqlmodel.ext.asyncio.session import AsyncSession -from app.models import Alert, AlertSequence, Sequence +from app.models import Alert, AlertSequence, AnnotationType, Camera, Sequence +from app.services.overlap import compute_overlap async def _create_alert_with_sequences( @@ -136,3 +139,136 @@ async def test_alerts_from_date(async_client: AsyncClient, detection_session: As returned = resp.json() started_times = [item["started_at"] for item in returned] assert started_times == sorted(started_times, reverse=True) + + +@pytest.mark.asyncio +async def test_alert_recompute_after_sequence_relabel(async_client: AsyncClient, detection_session: AsyncSession): + # Build three overlapping sequences on the same camera + now = datetime.utcnow() + camera_id = 1 + seq_specs = [ + {"azimuth": 0.0, "cone_azimuth": 0.0, "cone_angle": 20.0, "offset": 2}, + {"azimuth": 5.0, "cone_azimuth": 5.0, "cone_angle": 20.0, "offset": 1}, + {"azimuth": 350.0, "cone_azimuth": 350.0, "cone_angle": 20.0, "offset": 0}, + ] + seqs: List[Sequence] = [] + for spec in seq_specs: + seq = Sequence( + camera_id=camera_id, + pose_id=None, + azimuth=spec["azimuth"], + is_wildfire=None, + cone_azimuth=spec["cone_azimuth"], + cone_angle=spec["cone_angle"], + started_at=now - timedelta(seconds=10 + spec["offset"]), + last_seen_at=now - timedelta(seconds=spec["offset"]), + ) + detection_session.add(seq) + seqs.append(seq) + await detection_session.commit() + for seq in seqs: + await detection_session.refresh(seq) + + # Compute initial alert location from all three + camera = await detection_session.get(Camera, camera_id) + assert camera is not None + records = [ + { + "id": seq.id, + "lat": camera.lat, + "lon": camera.lon, + "cone_azimuth": seq.cone_azimuth, + "cone_angle": seq.cone_angle, + "is_wildfire": seq.is_wildfire, + "started_at": seq.started_at, + "last_seen_at": seq.last_seen_at, + } + for seq in seqs + ] + df_all = compute_overlap(pd.DataFrame.from_records(records)) + initial_loc = next((loc for locs in df_all["event_smoke_locations"].tolist() for loc in locs if loc is not None), None) + + alert = Alert( + organization_id=1, + lat=initial_loc[0] if initial_loc else None, + lon=initial_loc[1] if initial_loc else None, + started_at=min(seq.started_at for seq in seqs), + last_seen_at=max(seq.last_seen_at for seq in seqs), + ) + detection_session.add(alert) + await detection_session.commit() + await detection_session.refresh(alert) + + for seq in seqs: + detection_session.add(AlertSequence(alert_id=alert.id, sequence_id=seq.id)) + await detection_session.commit() + + auth = pytest.get_token( + pytest.user_table[0]["id"], pytest.user_table[0]["role"].split(), pytest.user_table[0]["organization_id"] + ) + + # Relabel one sequence as non-wildfire -> should be detached, new alert created, old alert recomputed + target_seq = seqs[-1] + resp = await async_client.patch( + f"/sequences/{target_seq.id}/label", + json={"is_wildfire": AnnotationType.OTHER_SMOKE.value}, + headers=auth, + ) + assert resp.status_code == 200, resp.text + + # Fetch alerts and mappings with fresh values + alerts_res = await detection_session.exec(select(Alert).execution_options(populate_existing=True)) + alerts_in_db = alerts_res.all() + assert len(alerts_in_db) == 2 + + mappings_res = await detection_session.exec( + select(AlertSequence.alert_id, AlertSequence.sequence_id).execution_options(populate_existing=True) + ) + mappings = mappings_res.all() + + # Identify alerts + new_alert = next(a for a in alerts_in_db if a.id != alert.id) + updated_alert = next(a for a in alerts_in_db if a.id == alert.id) + + # New alert should only reference relabeled sequence + assert (new_alert.id, target_seq.id) in mappings + assert updated_alert.id != new_alert.id + + # Updated alert should only reference remaining sequences + remaining_ids = {seqs[0].id, seqs[1].id} + remaining_mappings = {(aid, sid) for aid, sid in mappings if aid == updated_alert.id} + assert remaining_mappings == {(updated_alert.id, sid) for sid in remaining_ids} + + # Reload remaining sequences from DB to compare times + remaining_seqs_res = await detection_session.exec( + select(Sequence) + .where(cast(Any, Sequence.id).in_(list(remaining_ids))) + .execution_options(populate_existing=True) + ) + remaining_seqs = remaining_seqs_res.all() + + # Updated alert times recomputed + assert updated_alert.started_at == min(seq.started_at for seq in remaining_seqs) + assert updated_alert.last_seen_at == max(seq.last_seen_at for seq in remaining_seqs) + + # Updated alert location recomputed from remaining sequences + remaining_records = [ + { + "id": seq.id, + "lat": camera.lat, + "lon": camera.lon, + "cone_azimuth": seq.cone_azimuth, + "cone_angle": seq.cone_angle, + "is_wildfire": seq.is_wildfire, + "started_at": seq.started_at, + "last_seen_at": seq.last_seen_at, + } + for seq in remaining_seqs + ] + df_remaining = compute_overlap(pd.DataFrame.from_records(remaining_records)) + remaining_loc = next( + (loc for locs in df_remaining["event_smoke_locations"].tolist() for loc in locs if loc is not None), None + ) + if remaining_loc: + assert updated_alert.lat == pytest.approx(remaining_loc[0]) + assert updated_alert.lon == pytest.approx(remaining_loc[1]) From afda94e47d64177089305953b7b11f907c31652b Mon Sep 17 00:00:00 2001 From: Mateo Date: Tue, 23 Dec 2025 17:40:52 +0100 Subject: [PATCH 32/55] ruff on test --- src/tests/endpoints/test_alerts.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/tests/endpoints/test_alerts.py b/src/tests/endpoints/test_alerts.py index fabb44f4..b6b79304 100644 --- a/src/tests/endpoints/test_alerts.py +++ b/src/tests/endpoints/test_alerts.py @@ -186,7 +186,9 @@ async def test_alert_recompute_after_sequence_relabel(async_client: AsyncClient, for seq in seqs ] df_all = compute_overlap(pd.DataFrame.from_records(records)) - initial_loc = next((loc for locs in df_all["event_smoke_locations"].tolist() for loc in locs if loc is not None), None) + initial_loc = next( + (loc for locs in df_all["event_smoke_locations"].tolist() for loc in locs if loc is not None), None + ) alert = Alert( organization_id=1, From 1d9732fc5104f80baa3a303a4787862ed70c8be5 Mon Sep 17 00:00:00 2001 From: Mateo Date: Wed, 24 Dec 2025 07:50:39 +0100 Subject: [PATCH 33/55] add tests on detections --- src/tests/endpoints/test_detections.py | 114 ++++++++++++++++++++++++- 1 file changed, 113 insertions(+), 1 deletion(-) diff --git a/src/tests/endpoints/test_detections.py b/src/tests/endpoints/test_detections.py index dd4864b1..8ac84bd0 100644 --- a/src/tests/endpoints/test_detections.py +++ b/src/tests/endpoints/test_detections.py @@ -1,9 +1,17 @@ +from datetime import datetime, timedelta from typing import Any, Dict, List, Union -import pytest +import pytest # type: ignore from httpx import AsyncClient +from sqlmodel import select from sqlmodel.ext.asyncio.session import AsyncSession +from app.api.api_v1.endpoints.detections import _attach_sequence_to_alert +from app.core.config import settings +from app.crud import AlertCRUD, CameraCRUD, SequenceCRUD +from app.models import AlertSequence, Camera, Detection, Sequence +from app.services.cones import resolve_cone + @pytest.mark.parametrize( ("user_idx", "cam_idx", "payload", "status_code", "status_detail", "repeat"), @@ -253,3 +261,107 @@ async def test_delete_detection( assert response.json()["detail"] == status_detail if response.status_code // 100 == 2: assert response.json() is None + + +@pytest.mark.asyncio +async def test_create_detection_creates_sequence( + async_client: AsyncClient, detection_session: AsyncSession, monkeypatch +): + # Force sequence creation on first detection + monkeypatch.setattr(settings, "SEQUENCE_MIN_INTERVAL_DETS", 1) + mock_img = b"img" + auth = pytest.get_token(pytest.camera_table[0]["id"], ["camera"], pytest.camera_table[0]["organization_id"]) + payload = { + "azimuth": 120.0, + "pose_id": None, + "bboxes": "[(0.1,0.1,0.2,0.2,0.9)]", + } + resp = await async_client.post( + "/detections", data=payload, files={"file": ("img.png", mock_img, "image/png")}, headers=auth + ) + assert resp.status_code == 201, resp.text + data = resp.json() + assert data["sequence_id"] is not None + + seq_res = await detection_session.get(Sequence, data["sequence_id"]) + assert seq_res is not None + assert seq_res.cone_azimuth is not None + assert seq_res.cone_angle is not None + camera = await detection_session.get(Camera, pytest.camera_table[0]["id"]) + assert camera is not None + expected_cone_azimuth, expected_cone_angle = resolve_cone( + float(payload["azimuth"] if payload["azimuth"] is not None else 0.0), + str(payload["bboxes"]), + camera.angle_of_view, + ) + assert seq_res.cone_azimuth == pytest.approx(expected_cone_azimuth) + assert seq_res.cone_angle == pytest.approx(expected_cone_angle) + # Detection references the sequence + det_res = await detection_session.get(Detection, data["id"]) + assert det_res is not None + assert det_res.sequence_id == seq_res.id + + +@pytest.mark.asyncio +async def test_attach_sequence_to_alert_creates_alert(detection_session: AsyncSession): + seq_crud = SequenceCRUD(detection_session) + alert_crud = AlertCRUD(detection_session) + cam_crud = CameraCRUD(detection_session) + now = datetime.utcnow() + cam1 = await detection_session.get(Camera, 1) + assert cam1 is not None + cam2 = Camera( + organization_id=1, + name="cam-3", + angle_of_view=90.0, + elevation=100.0, + lat=3.7, + lon=-45.0, + is_trustable=True, + last_active_at=now, + last_image=None, + created_at=now, + ) + detection_session.add(cam2) + await detection_session.commit() + await detection_session.refresh(cam2) + + seq1 = Sequence( + camera_id=cam1.id, + pose_id=None, + azimuth=0.0, + cone_azimuth=0.0, + cone_angle=90.0, + is_wildfire=None, + started_at=now - timedelta(seconds=30), + last_seen_at=now - timedelta(seconds=20), + ) + seq2 = Sequence( + camera_id=cam2.id, + pose_id=None, + azimuth=5.0, + cone_azimuth=5.0, + cone_angle=90.0, + is_wildfire=None, + started_at=now - timedelta(seconds=25), + last_seen_at=now - timedelta(seconds=10), + ) + detection_session.add(seq1) + detection_session.add(seq2) + await detection_session.commit() + await detection_session.refresh(seq1) + await detection_session.refresh(seq2) + + await _attach_sequence_to_alert(seq2, cam2, cam_crud, seq_crud, alert_crud) + + alerts = await alert_crud.fetch_all() + assert len(alerts) == 1 + alert = alerts[0] + assert alert.started_at == min(seq1.started_at, seq2.started_at) + assert alert.last_seen_at == max(seq1.last_seen_at, seq2.last_seen_at) + assert alert.lat is not None + assert alert.lon is not None + + mappings_res = await detection_session.exec(select(AlertSequence)) + mappings = mappings_res.all() + assert {(m.alert_id, m.sequence_id) for m in mappings} == {(alert.id, seq1.id), (alert.id, seq2.id)} From 204bbb62e934baa3a4d824b9781c6f3e1353d010 Mon Sep 17 00:00:00 2001 From: Mateo Date: Wed, 24 Dec 2025 08:22:37 +0100 Subject: [PATCH 34/55] increase test on seq --- src/tests/endpoints/test_sequences.py | 161 +++++++++++++++++++++++++- 1 file changed, 160 insertions(+), 1 deletion(-) diff --git a/src/tests/endpoints/test_sequences.py b/src/tests/endpoints/test_sequences.py index dd8aaf57..d08659e6 100644 --- a/src/tests/endpoints/test_sequences.py +++ b/src/tests/endpoints/test_sequences.py @@ -1,9 +1,14 @@ +from datetime import datetime, timedelta from typing import Any, Dict, List, Union -import pytest +import pytest # type: ignore from httpx import AsyncClient +from sqlmodel import select from sqlmodel.ext.asyncio.session import AsyncSession +from app.models import Alert, AlertSequence, Camera, Detection, Sequence +from app.schemas.sequences import SequenceLabel + @pytest.mark.parametrize( ("user_idx", "sequence_id", "status_code", "status_detail", "expected_result"), @@ -216,3 +221,157 @@ async def test_latest_sequences( assert response.json() == expected_result assert all(isinstance(elt["cone_azimuth"], float) for elt in response.json()) assert all(isinstance(elt["cone_angle"], float) for elt in response.json()) + + +@pytest.mark.asyncio +async def test_sequence_label_updates_alerts(async_client: AsyncClient, detection_session: AsyncSession): + # Create a sequence linked to a camera and an alert + camera = await detection_session.get(Camera, 1) + assert camera is not None + now = datetime.utcnow() + seq1 = Sequence( + camera_id=camera.id, + pose_id=None, + azimuth=180.0, + cone_azimuth=170.0, + cone_angle=5.0, + is_wildfire=None, + started_at=now - timedelta(seconds=30), + last_seen_at=now - timedelta(seconds=20), + ) + seq2 = Sequence( + camera_id=camera.id, + pose_id=None, + azimuth=182.0, + cone_azimuth=172.0, + cone_angle=5.0, + is_wildfire=None, + started_at=now - timedelta(seconds=25), + last_seen_at=now - timedelta(seconds=10), + ) + detection_session.add(seq1) + detection_session.add(seq2) + await detection_session.commit() + await detection_session.refresh(seq1) + await detection_session.refresh(seq2) + + alert = Alert( + organization_id=camera.organization_id, + lat=1.0, + lon=2.0, + started_at=min(seq1.started_at, seq2.started_at), + last_seen_at=max(seq1.last_seen_at, seq2.last_seen_at), + ) + detection_session.add(alert) + await detection_session.commit() + await detection_session.refresh(alert) + detection_session.add(AlertSequence(alert_id=alert.id, sequence_id=seq1.id)) + detection_session.add(AlertSequence(alert_id=alert.id, sequence_id=seq2.id)) + await detection_session.commit() + + auth = pytest.get_token( + pytest.user_table[0]["id"], pytest.user_table[0]["role"].split(), pytest.user_table[0]["organization_id"] + ) + + # Keep original timings to avoid accessing expired ORM objects + seq1_start, seq1_last = seq1.started_at, seq1.last_seen_at + seq2_start, seq2_last = seq2.started_at, seq2.last_seen_at + + resp = await async_client.patch( + f"/sequences/{seq1.id}/label", + json={"is_wildfire": SequenceLabel(is_wildfire="other_smoke").is_wildfire}, + headers=auth, + ) + assert resp.status_code == 200, resp.text + + alerts_res = await detection_session.exec(select(Alert).execution_options(populate_existing=True)) + alerts_rows = alerts_res.all() + assert len(alerts_rows) == 2 + mappings_res = await detection_session.exec( + select(AlertSequence.alert_id, AlertSequence.sequence_id).execution_options(populate_existing=True) + ) + mappings = {(aid, sid) for aid, sid in mappings_res.all()} + + row_by_id = {row.id: row for row in alerts_rows} + new_alert_row = next(row for aid, row in row_by_id.items() if aid != alert.id) + updated_alert_row = row_by_id[alert.id] + + assert (new_alert_row.id, seq1.id) in mappings + assert (updated_alert_row.id, seq1.id) not in mappings + assert (updated_alert_row.id, seq2.id) in mappings + + assert updated_alert_row.started_at == seq2_start + assert updated_alert_row.last_seen_at == seq2_last + assert updated_alert_row.lat is None + assert updated_alert_row.lon is None + + assert new_alert_row.started_at == seq1_start + assert new_alert_row.last_seen_at == seq1_last + assert new_alert_row.lat is None + assert new_alert_row.lon is None + + +@pytest.mark.asyncio +async def test_delete_sequence_cleans_alerts_and_detections(async_client: AsyncClient, detection_session: AsyncSession): + camera = await detection_session.get(Camera, 1) + assert camera is not None + now = datetime.utcnow() + seq = Sequence( + camera_id=camera.id, + pose_id=None, + azimuth=45.0, + cone_azimuth=40.0, + cone_angle=10.0, + is_wildfire=None, + started_at=now, + last_seen_at=now, + ) + detection = Detection( + camera_id=camera.id, + pose_id=None, + sequence_id=None, + azimuth=45.0, + bucket_key="tmp", + bboxes="[(0.1,0.1,0.2,0.2,0.9)]", + created_at=now, + ) + detection_session.add(seq) + detection_session.add(detection) + await detection_session.commit() + await detection_session.refresh(seq) + await detection_session.refresh(detection) + + alert = Alert( + organization_id=camera.organization_id, + lat=None, + lon=None, + started_at=now, + last_seen_at=now, + ) + detection_session.add(alert) + await detection_session.commit() + await detection_session.refresh(alert) + detection_session.add(AlertSequence(alert_id=alert.id, sequence_id=seq.id)) + await detection_session.commit() + + # Link detection to sequence + detection.sequence_id = seq.id + detection_session.add(detection) + await detection_session.commit() + + auth = pytest.get_token(pytest.user_table[0]["id"], pytest.user_table[0]["role"].split(), pytest.user_table[0]["organization_id"]) + resp = await async_client.delete(f"/sequences/{seq.id}", headers=auth) + assert resp.status_code == 200, resp.text + + # Alert and mapping should be gone + mappings_res = await detection_session.exec(select(AlertSequence)) + assert mappings_res.all() == [] + alerts_res = await detection_session.exec(select(Alert)) + assert alerts_res.all() == [] + + # Detection should have sequence_id cleared + det_res = await detection_session.exec( + select(Detection).where(Detection.id == detection.id).execution_options(populate_existing=True) + ) + det = det_res.one() + assert det.sequence_id is None From 662300e8aea5d68548815687642011293c55960b Mon Sep 17 00:00:00 2001 From: Mateo Date: Wed, 24 Dec 2025 09:28:33 +0100 Subject: [PATCH 35/55] ruff --- src/tests/endpoints/test_sequences.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/tests/endpoints/test_sequences.py b/src/tests/endpoints/test_sequences.py index d08659e6..220727e8 100644 --- a/src/tests/endpoints/test_sequences.py +++ b/src/tests/endpoints/test_sequences.py @@ -359,7 +359,9 @@ async def test_delete_sequence_cleans_alerts_and_detections(async_client: AsyncC detection_session.add(detection) await detection_session.commit() - auth = pytest.get_token(pytest.user_table[0]["id"], pytest.user_table[0]["role"].split(), pytest.user_table[0]["organization_id"]) + auth = pytest.get_token( + pytest.user_table[0]["id"], pytest.user_table[0]["role"].split(), pytest.user_table[0]["organization_id"] + ) resp = await async_client.delete(f"/sequences/{seq.id}", headers=auth) assert resp.status_code == 200, resp.text From 3d3fa3537a88f402fef6d977848c0aa7aa09dabd Mon Sep 17 00:00:00 2001 From: Mateo Date: Tue, 30 Dec 2025 19:34:28 +0100 Subject: [PATCH 36/55] limit lat and lon --- src/app/models.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/app/models.py b/src/app/models.py index a6be132c..3e8c9daa 100644 --- a/src/app/models.py +++ b/src/app/models.py @@ -96,8 +96,8 @@ class Alert(SQLModel, table=True): __tablename__ = "alerts" id: int = Field(None, primary_key=True) organization_id: int = Field(..., foreign_key="organizations.id", nullable=False) - lat: Union[float, None] = Field(default=None) - lon: Union[float, None] = Field(default=None) + lat: Union[float, None] = Field(default=None, gt=-90, lt=90, nullable=True) + lon: Union[float, None] = Field(default=None, gt=-180, lt=180, nullable=True) started_at: datetime = Field(..., nullable=False) last_seen_at: datetime = Field(..., nullable=False) From ea1994a0185f5ac2bb290447cfa9781caf5c0bfb Mon Sep 17 00:00:00 2001 From: Mateo Date: Tue, 30 Dec 2025 19:35:44 +0100 Subject: [PATCH 37/55] rename fonction --- src/app/services/overlap.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/app/services/overlap.py b/src/app/services/overlap.py index d09137c0..5ff1bacb 100644 --- a/src/app/services/overlap.py +++ b/src/app/services/overlap.py @@ -124,7 +124,7 @@ def _build_cone_polygon( return Polygon([(lon, lat), *outer_points]).buffer(0) -def _project_polygon_to_3857(polygon: Polygon) -> Polygon: +def _project_polygon_from_4326_to_3857(polygon: Polygon) -> Polygon: """ Project a polygon from EPSG:4326 to EPSG:3857. @@ -168,7 +168,7 @@ def get_projected_cone(row: pd.Series, r_km: float, r_min_km: float) -> Polygon: float(r_km), float(r_min_km), ) - return _project_polygon_to_3857(poly) + return _project_polygon_from_4326_to_3857(poly) def _compute_localized_groups_from_cliques( From 7c78792fff3a8fdad4ad3b57be7b6a88f8272a84 Mon Sep 17 00:00:00 2001 From: Mateo Date: Sat, 3 Jan 2026 09:33:01 +0100 Subject: [PATCH 38/55] rename to sequence_azimuth --- src/app/api/api_v1/endpoints/detections.py | 6 +++--- src/app/api/api_v1/endpoints/sequences.py | 2 +- src/app/models.py | 2 +- src/app/services/overlap.py | 6 +++--- src/tests/conftest.py | 4 ++-- src/tests/endpoints/test_alerts.py | 18 +++++++++--------- src/tests/endpoints/test_detections.py | 10 +++++----- src/tests/endpoints/test_sequences.py | 10 +++++----- src/tests/services/test_overlap.py | 4 ++-- 9 files changed, 31 insertions(+), 31 deletions(-) diff --git a/src/app/api/api_v1/endpoints/detections.py b/src/app/api/api_v1/endpoints/detections.py index c8e304d6..321f3358 100644 --- a/src/app/api/api_v1/endpoints/detections.py +++ b/src/app/api/api_v1/endpoints/detections.py @@ -88,13 +88,13 @@ async def _attach_sequence_to_alert( records = [] for seq in recent_sequences: cam = camera_by_id.get(seq.camera_id) - if cam is None or seq.cone_azimuth is None or seq.cone_angle is None: + if cam is None or seq.sequence_azimuth is None or seq.cone_angle is None: continue records.append({ "id": int(seq.id), "lat": float(cam.lat), "lon": float(cam.lon), - "cone_azimuth": float(seq.cone_azimuth), + "sequence_azimuth": float(seq.sequence_azimuth), "cone_angle": float(seq.cone_angle), "is_wildfire": seq.is_wildfire, "started_at": seq.started_at, @@ -256,7 +256,7 @@ async def create_detection( camera_id=token_payload.sub, pose_id=pose_id, azimuth=det.azimuth, - cone_azimuth=cone_azimuth, + sequence_azimuth=cone_azimuth, cone_angle=cone_angle, started_at=dets_[0].created_at, last_seen_at=det.created_at, diff --git a/src/app/api/api_v1/endpoints/sequences.py b/src/app/api/api_v1/endpoints/sequences.py index 076d4eed..25e6358d 100644 --- a/src/app/api/api_v1/endpoints/sequences.py +++ b/src/app/api/api_v1/endpoints/sequences.py @@ -60,7 +60,7 @@ async def _refresh_alert_state(alert_id: int, session: AsyncSession, alerts: Ale "id": seq.id, "lat": cam.lat, "lon": cam.lon, - "cone_azimuth": seq.cone_azimuth, + "sequence_azimuth": seq.sequence_azimuth, "cone_angle": seq.cone_angle, "is_wildfire": seq.is_wildfire, "started_at": seq.started_at, diff --git a/src/app/models.py b/src/app/models.py index 3e8c9daa..b36aa16c 100644 --- a/src/app/models.py +++ b/src/app/models.py @@ -86,7 +86,7 @@ class Sequence(SQLModel, table=True): pose_id: Union[int, None] = Field(None, foreign_key="poses.id", nullable=True) azimuth: float = Field(..., ge=0, lt=360) is_wildfire: Union[AnnotationType, None] = None - cone_azimuth: Union[float, None] = Field(None, nullable=True) + sequence_azimuth: Union[float, None] = Field(None, nullable=True) cone_angle: Union[float, None] = Field(None, nullable=True) started_at: datetime = Field(..., nullable=False) last_seen_at: datetime = Field(..., nullable=False) diff --git a/src/app/services/overlap.py b/src/app/services/overlap.py index 5ff1bacb..0d99af4b 100644 --- a/src/app/services/overlap.py +++ b/src/app/services/overlap.py @@ -149,7 +149,7 @@ def get_projected_cone(row: pd.Series, r_km: float, r_min_km: float) -> Polygon: Parameters ---------- row : pd.Series - Row with fields: lat, lon, cone_azimuth, cone_angle. + Row with fields: lat, lon, sequence_azimuth, cone_angle. r_km : float Outer radius of the camera detection cone in kilometers. r_min_km : float @@ -163,7 +163,7 @@ def get_projected_cone(row: pd.Series, r_km: float, r_min_km: float) -> Polygon: poly = _build_cone_polygon( float(row["lat"]), float(row["lon"]), - float(row["cone_azimuth"]), + float(row["sequence_azimuth"]), float(row["cone_angle"]), float(r_km), float(r_min_km), @@ -277,7 +277,7 @@ def compute_overlap( Parameters ---------- api_sequences : pd.DataFrame - Input with fields: id, lat, lon, cone_azimuth, cone_angle, is_wildfire, + Input with fields: id, lat, lon, sequence_azimuth, cone_angle, is_wildfire, started_at, last_seen_at. r_km : float Outer radius of the camera detection cone in kilometers. diff --git a/src/tests/conftest.py b/src/tests/conftest.py index 276360b0..5458216e 100644 --- a/src/tests/conftest.py +++ b/src/tests/conftest.py @@ -166,7 +166,7 @@ "pose_id": 1, "azimuth": 43.7, "is_wildfire": "wildfire_smoke", - "cone_azimuth": 34.6, + "sequence_azimuth": 34.6, "cone_angle": 54.8, "started_at": datetime.strptime("2023-11-07T15:08:19.226673", dt_format), "last_seen_at": datetime.strptime("2023-11-07T15:28:19.226673", dt_format), @@ -177,7 +177,7 @@ "pose_id": 3, "azimuth": 74.8, "is_wildfire": None, - "cone_azimuth": 65.7, + "sequence_azimuth": 65.7, "cone_angle": 54.8, "started_at": datetime.strptime("2023-11-07T16:08:19.226673", dt_format), "last_seen_at": datetime.strptime("2023-11-07T16:08:19.226673", dt_format), diff --git a/src/tests/endpoints/test_alerts.py b/src/tests/endpoints/test_alerts.py index b6b79304..15454a2d 100644 --- a/src/tests/endpoints/test_alerts.py +++ b/src/tests/endpoints/test_alerts.py @@ -26,7 +26,7 @@ async def _create_alert_with_sequences( "pose_id": None, "azimuth": 180.0, "is_wildfire": None, - "cone_azimuth": 163.4, + "sequence_azimuth": 163.4, "cone_angle": 1.0, }, { @@ -34,7 +34,7 @@ async def _create_alert_with_sequences( "pose_id": None, "azimuth": 25.0, "is_wildfire": None, - "cone_azimuth": 8.3, + "sequence_azimuth": 8.3, "cone_angle": 0.8, }, { @@ -42,7 +42,7 @@ async def _create_alert_with_sequences( "pose_id": None, "azimuth": 276.0, "is_wildfire": None, - "cone_azimuth": 276.5, + "sequence_azimuth": 276.5, "cone_angle": 3.0, }, ] @@ -147,9 +147,9 @@ async def test_alert_recompute_after_sequence_relabel(async_client: AsyncClient, now = datetime.utcnow() camera_id = 1 seq_specs = [ - {"azimuth": 0.0, "cone_azimuth": 0.0, "cone_angle": 20.0, "offset": 2}, - {"azimuth": 5.0, "cone_azimuth": 5.0, "cone_angle": 20.0, "offset": 1}, - {"azimuth": 350.0, "cone_azimuth": 350.0, "cone_angle": 20.0, "offset": 0}, + {"azimuth": 0.0, "sequence_azimuth": 0.0, "cone_angle": 20.0, "offset": 2}, + {"azimuth": 5.0, "sequence_azimuth": 5.0, "cone_angle": 20.0, "offset": 1}, + {"azimuth": 350.0, "sequence_azimuth": 350.0, "cone_angle": 20.0, "offset": 0}, ] seqs: List[Sequence] = [] for spec in seq_specs: @@ -158,7 +158,7 @@ async def test_alert_recompute_after_sequence_relabel(async_client: AsyncClient, pose_id=None, azimuth=spec["azimuth"], is_wildfire=None, - cone_azimuth=spec["cone_azimuth"], + sequence_azimuth=spec["sequence_azimuth"], cone_angle=spec["cone_angle"], started_at=now - timedelta(seconds=10 + spec["offset"]), last_seen_at=now - timedelta(seconds=spec["offset"]), @@ -177,7 +177,7 @@ async def test_alert_recompute_after_sequence_relabel(async_client: AsyncClient, "id": seq.id, "lat": camera.lat, "lon": camera.lon, - "cone_azimuth": seq.cone_azimuth, + "sequence_azimuth": seq.sequence_azimuth, "cone_angle": seq.cone_angle, "is_wildfire": seq.is_wildfire, "started_at": seq.started_at, @@ -259,7 +259,7 @@ async def test_alert_recompute_after_sequence_relabel(async_client: AsyncClient, "id": seq.id, "lat": camera.lat, "lon": camera.lon, - "cone_azimuth": seq.cone_azimuth, + "sequence_azimuth": seq.sequence_azimuth, "cone_angle": seq.cone_angle, "is_wildfire": seq.is_wildfire, "started_at": seq.started_at, diff --git a/src/tests/endpoints/test_detections.py b/src/tests/endpoints/test_detections.py index 8ac84bd0..f4141da0 100644 --- a/src/tests/endpoints/test_detections.py +++ b/src/tests/endpoints/test_detections.py @@ -285,16 +285,16 @@ async def test_create_detection_creates_sequence( seq_res = await detection_session.get(Sequence, data["sequence_id"]) assert seq_res is not None - assert seq_res.cone_azimuth is not None + assert seq_res.sequence_azimuth is not None assert seq_res.cone_angle is not None camera = await detection_session.get(Camera, pytest.camera_table[0]["id"]) assert camera is not None - expected_cone_azimuth, expected_cone_angle = resolve_cone( + expected_sequence_azimuth, expected_cone_angle = resolve_cone( float(payload["azimuth"] if payload["azimuth"] is not None else 0.0), str(payload["bboxes"]), camera.angle_of_view, ) - assert seq_res.cone_azimuth == pytest.approx(expected_cone_azimuth) + assert seq_res.sequence_azimuth == pytest.approx(expected_sequence_azimuth) assert seq_res.cone_angle == pytest.approx(expected_cone_angle) # Detection references the sequence det_res = await detection_session.get(Detection, data["id"]) @@ -330,7 +330,7 @@ async def test_attach_sequence_to_alert_creates_alert(detection_session: AsyncSe camera_id=cam1.id, pose_id=None, azimuth=0.0, - cone_azimuth=0.0, + sequence_azimuth=0.0, cone_angle=90.0, is_wildfire=None, started_at=now - timedelta(seconds=30), @@ -340,7 +340,7 @@ async def test_attach_sequence_to_alert_creates_alert(detection_session: AsyncSe camera_id=cam2.id, pose_id=None, azimuth=5.0, - cone_azimuth=5.0, + sequence_azimuth=5.0, cone_angle=90.0, is_wildfire=None, started_at=now - timedelta(seconds=25), diff --git a/src/tests/endpoints/test_sequences.py b/src/tests/endpoints/test_sequences.py index 220727e8..e3953bde 100644 --- a/src/tests/endpoints/test_sequences.py +++ b/src/tests/endpoints/test_sequences.py @@ -182,7 +182,7 @@ async def test_fetch_sequences_from_date( assert response.json()["detail"] == status_detail if response.status_code // 100 == 2: assert response.json() == expected_result - assert all(isinstance(elt["cone_azimuth"], float) for elt in response.json()) + assert all(isinstance(elt["sequence_azimuth"], float) for elt in response.json()) assert all(isinstance(elt["cone_angle"], float) for elt in response.json()) @@ -219,7 +219,7 @@ async def test_latest_sequences( assert response.json()["detail"] == status_detail if response.status_code // 100 == 2: assert response.json() == expected_result - assert all(isinstance(elt["cone_azimuth"], float) for elt in response.json()) + assert all(isinstance(elt["sequence_azimuth"], float) for elt in response.json()) assert all(isinstance(elt["cone_angle"], float) for elt in response.json()) @@ -233,7 +233,7 @@ async def test_sequence_label_updates_alerts(async_client: AsyncClient, detectio camera_id=camera.id, pose_id=None, azimuth=180.0, - cone_azimuth=170.0, + sequence_azimuth=170.0, cone_angle=5.0, is_wildfire=None, started_at=now - timedelta(seconds=30), @@ -243,7 +243,7 @@ async def test_sequence_label_updates_alerts(async_client: AsyncClient, detectio camera_id=camera.id, pose_id=None, azimuth=182.0, - cone_azimuth=172.0, + sequence_azimuth=172.0, cone_angle=5.0, is_wildfire=None, started_at=now - timedelta(seconds=25), @@ -320,7 +320,7 @@ async def test_delete_sequence_cleans_alerts_and_detections(async_client: AsyncC camera_id=camera.id, pose_id=None, azimuth=45.0, - cone_azimuth=40.0, + sequence_azimuth=40.0, cone_angle=10.0, is_wildfire=None, started_at=now, diff --git a/src/tests/services/test_overlap.py b/src/tests/services/test_overlap.py index 1298ed10..c8f95c76 100644 --- a/src/tests/services/test_overlap.py +++ b/src/tests/services/test_overlap.py @@ -14,7 +14,7 @@ def _make_sequence( id_: int, lat: float, lon: float, - cone_azimuth: float, + sequence_azimuth: float, cone_angle: float, started_at: datetime, last_seen_at: datetime, @@ -24,7 +24,7 @@ def _make_sequence( "id": id_, "lat": lat, "lon": lon, - "cone_azimuth": cone_azimuth, + "sequence_azimuth": sequence_azimuth, "cone_angle": cone_angle, "is_wildfire": is_wildfire, "started_at": started_at, From a50fca88e3acf91864e4cb18704d82c8bb733ef9 Mon Sep 17 00:00:00 2001 From: Mateo Date: Sat, 3 Jan 2026 09:43:47 +0100 Subject: [PATCH 39/55] rename sequence camera azimuth --- src/app/api/api_v1/endpoints/detections.py | 4 ++-- src/app/models.py | 2 +- src/tests/conftest.py | 4 ++-- src/tests/endpoints/test_alerts.py | 14 +++++++------- src/tests/endpoints/test_detections.py | 4 ++-- src/tests/endpoints/test_sequences.py | 6 +++--- 6 files changed, 17 insertions(+), 17 deletions(-) diff --git a/src/app/api/api_v1/endpoints/detections.py b/src/app/api/api_v1/endpoints/detections.py index 321f3358..9ad68029 100644 --- a/src/app/api/api_v1/endpoints/detections.py +++ b/src/app/api/api_v1/endpoints/detections.py @@ -218,7 +218,7 @@ async def create_detection( # Sequence handling # Check if there is a sequence that was seen recently sequence = await sequences.fetch_all( - filters=[("camera_id", token_payload.sub), ("azimuth", det.azimuth)], + filters=[("camera_id", token_payload.sub), ("camera_azimuth", det.azimuth)], inequality_pair=( "last_seen_at", ">", @@ -255,7 +255,7 @@ async def create_detection( Sequence( camera_id=token_payload.sub, pose_id=pose_id, - azimuth=det.azimuth, + camera_azimuth=det.azimuth, sequence_azimuth=cone_azimuth, cone_angle=cone_angle, started_at=dets_[0].created_at, diff --git a/src/app/models.py b/src/app/models.py index b36aa16c..71529bf9 100644 --- a/src/app/models.py +++ b/src/app/models.py @@ -84,7 +84,7 @@ class Sequence(SQLModel, table=True): id: int = Field(None, primary_key=True) camera_id: int = Field(..., foreign_key="cameras.id", nullable=False) pose_id: Union[int, None] = Field(None, foreign_key="poses.id", nullable=True) - azimuth: float = Field(..., ge=0, lt=360) + camera_azimuth: float = Field(..., ge=0, lt=360) is_wildfire: Union[AnnotationType, None] = None sequence_azimuth: Union[float, None] = Field(None, nullable=True) cone_angle: Union[float, None] = Field(None, nullable=True) diff --git a/src/tests/conftest.py b/src/tests/conftest.py index 5458216e..b583b686 100644 --- a/src/tests/conftest.py +++ b/src/tests/conftest.py @@ -164,7 +164,7 @@ "id": 1, "camera_id": 1, "pose_id": 1, - "azimuth": 43.7, + "camera_azimuth": 43.7, "is_wildfire": "wildfire_smoke", "sequence_azimuth": 34.6, "cone_angle": 54.8, @@ -175,7 +175,7 @@ "id": 2, "camera_id": 2, "pose_id": 3, - "azimuth": 74.8, + "camera_azimuth": 74.8, "is_wildfire": None, "sequence_azimuth": 65.7, "cone_angle": 54.8, diff --git a/src/tests/endpoints/test_alerts.py b/src/tests/endpoints/test_alerts.py index 15454a2d..44a96129 100644 --- a/src/tests/endpoints/test_alerts.py +++ b/src/tests/endpoints/test_alerts.py @@ -24,7 +24,7 @@ async def _create_alert_with_sequences( { "camera_id": camera_id, "pose_id": None, - "azimuth": 180.0, + "camera_azimuth": 180.0, "is_wildfire": None, "sequence_azimuth": 163.4, "cone_angle": 1.0, @@ -32,7 +32,7 @@ async def _create_alert_with_sequences( { "camera_id": camera_id, "pose_id": None, - "azimuth": 25.0, + "camera_azimuth": 25.0, "is_wildfire": None, "sequence_azimuth": 8.3, "cone_angle": 0.8, @@ -40,7 +40,7 @@ async def _create_alert_with_sequences( { "camera_id": camera_id, "pose_id": None, - "azimuth": 276.0, + "camera_azimuth": 276.0, "is_wildfire": None, "sequence_azimuth": 276.5, "cone_angle": 3.0, @@ -147,16 +147,16 @@ async def test_alert_recompute_after_sequence_relabel(async_client: AsyncClient, now = datetime.utcnow() camera_id = 1 seq_specs = [ - {"azimuth": 0.0, "sequence_azimuth": 0.0, "cone_angle": 20.0, "offset": 2}, - {"azimuth": 5.0, "sequence_azimuth": 5.0, "cone_angle": 20.0, "offset": 1}, - {"azimuth": 350.0, "sequence_azimuth": 350.0, "cone_angle": 20.0, "offset": 0}, + {"camera_azimuth": 0.0, "sequence_azimuth": 0.0, "cone_angle": 20.0, "offset": 2}, + {"camera_azimuth": 5.0, "sequence_azimuth": 5.0, "cone_angle": 20.0, "offset": 1}, + {"camera_azimuth": 350.0, "sequence_azimuth": 350.0, "cone_angle": 20.0, "offset": 0}, ] seqs: List[Sequence] = [] for spec in seq_specs: seq = Sequence( camera_id=camera_id, pose_id=None, - azimuth=spec["azimuth"], + camera_azimuth=spec["camera_azimuth"], is_wildfire=None, sequence_azimuth=spec["sequence_azimuth"], cone_angle=spec["cone_angle"], diff --git a/src/tests/endpoints/test_detections.py b/src/tests/endpoints/test_detections.py index f4141da0..42d5225c 100644 --- a/src/tests/endpoints/test_detections.py +++ b/src/tests/endpoints/test_detections.py @@ -329,7 +329,7 @@ async def test_attach_sequence_to_alert_creates_alert(detection_session: AsyncSe seq1 = Sequence( camera_id=cam1.id, pose_id=None, - azimuth=0.0, + camera_azimuth=0.0, sequence_azimuth=0.0, cone_angle=90.0, is_wildfire=None, @@ -339,7 +339,7 @@ async def test_attach_sequence_to_alert_creates_alert(detection_session: AsyncSe seq2 = Sequence( camera_id=cam2.id, pose_id=None, - azimuth=5.0, + camera_azimuth=5.0, sequence_azimuth=5.0, cone_angle=90.0, is_wildfire=None, diff --git a/src/tests/endpoints/test_sequences.py b/src/tests/endpoints/test_sequences.py index e3953bde..6ba3022e 100644 --- a/src/tests/endpoints/test_sequences.py +++ b/src/tests/endpoints/test_sequences.py @@ -232,7 +232,7 @@ async def test_sequence_label_updates_alerts(async_client: AsyncClient, detectio seq1 = Sequence( camera_id=camera.id, pose_id=None, - azimuth=180.0, + camera_azimuth=180.0, sequence_azimuth=170.0, cone_angle=5.0, is_wildfire=None, @@ -242,7 +242,7 @@ async def test_sequence_label_updates_alerts(async_client: AsyncClient, detectio seq2 = Sequence( camera_id=camera.id, pose_id=None, - azimuth=182.0, + camera_azimuth=182.0, sequence_azimuth=172.0, cone_angle=5.0, is_wildfire=None, @@ -319,7 +319,7 @@ async def test_delete_sequence_cleans_alerts_and_detections(async_client: AsyncC seq = Sequence( camera_id=camera.id, pose_id=None, - azimuth=45.0, + camera_azimuth=45.0, sequence_azimuth=40.0, cone_angle=10.0, is_wildfire=None, From 420ac018ee1c9d3c5175472d8bad73c05a2e753a Mon Sep 17 00:00:00 2001 From: Mateo Date: Sat, 3 Jan 2026 09:47:32 +0100 Subject: [PATCH 40/55] add AlertBase --- src/app/schemas/alerts.py | 20 +++++++++++--------- 1 file changed, 11 insertions(+), 9 deletions(-) diff --git a/src/app/schemas/alerts.py b/src/app/schemas/alerts.py index 6aa5da5c..292b632a 100644 --- a/src/app/schemas/alerts.py +++ b/src/app/schemas/alerts.py @@ -8,23 +8,25 @@ from pydantic import BaseModel, Field -__all__ = ["AlertCreate", "AlertRead", "AlertUpdate"] +__all__ = ["AlertBase", "AlertCreate", "AlertRead", "AlertUpdate"] -class AlertCreate(BaseModel): - organization_id: int = Field(..., gt=0) +class AlertBase(BaseModel): + organization_id: Optional[int] = Field(None, gt=0) lat: Optional[float] = None lon: Optional[float] = None + started_at: Optional[datetime] = None + last_seen_at: Optional[datetime] = None + + +class AlertCreate(AlertBase): + organization_id: int = Field(..., gt=0) started_at: datetime last_seen_at: datetime -class AlertUpdate(BaseModel): - organization_id: Optional[int] = Field(None, gt=0) - lat: Optional[float] = None - lon: Optional[float] = None - started_at: Optional[datetime] = None - last_seen_at: Optional[datetime] = None +class AlertUpdate(AlertBase): + pass class AlertRead(AlertCreate): From 3c671244b42f9c4cdb1da1019f38773c20e42bdb Mon Sep 17 00:00:00 2001 From: Mateo Date: Sat, 3 Jan 2026 11:02:59 +0100 Subject: [PATCH 41/55] adapt e2e --- scripts/test_e2e.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/test_e2e.py b/scripts/test_e2e.py index 18303c91..c17d3d31 100644 --- a/scripts/test_e2e.py +++ b/scripts/test_e2e.py @@ -149,7 +149,7 @@ def main(args): assert sequence["camera_id"] == cam_id assert sequence["started_at"] == response.json()["created_at"] assert sequence["last_seen_at"] > sequence["started_at"] - assert sequence["azimuth"] == response.json()["azimuth"] + assert sequence["camera_azimuth"] == response.json()["azimuth"] # Fetch the latest sequence assert len(api_request("get", f"{args.endpoint}/sequences/unlabeled/latest", agent_auth)) == 1 # Fetch from date From 06895c0dbcdb35e25cd54627c8d27c8f8fb420ad Mon Sep 17 00:00:00 2001 From: Mateo Date: Sat, 3 Jan 2026 11:18:46 +0100 Subject: [PATCH 42/55] new headers --- src/app/api/api_v1/endpoints/alerts.py | 2 +- src/app/api/api_v1/endpoints/sequences.py | 2 +- src/app/crud/crud_alert.py | 2 +- src/app/crud/crud_sequence.py | 2 +- src/app/schemas/alerts.py | 2 +- src/app/services/cones.py | 2 +- src/app/services/overlap.py | 2 +- src/tests/endpoints/test_alerts.py | 2 +- src/tests/services/test_overlap.py | 2 +- 9 files changed, 9 insertions(+), 9 deletions(-) diff --git a/src/app/api/api_v1/endpoints/alerts.py b/src/app/api/api_v1/endpoints/alerts.py index 476cc7a6..8b5d9e38 100644 --- a/src/app/api/api_v1/endpoints/alerts.py +++ b/src/app/api/api_v1/endpoints/alerts.py @@ -1,4 +1,4 @@ -# Copyright (C) 2025, Pyronear. +# Copyright (C) 2025-2026-2026-2026-2026, Pyronear. # This program is licensed under the Apache License 2.0. # See LICENSE or go to for full license details. diff --git a/src/app/api/api_v1/endpoints/sequences.py b/src/app/api/api_v1/endpoints/sequences.py index 252e2a0a..f11219b3 100644 --- a/src/app/api/api_v1/endpoints/sequences.py +++ b/src/app/api/api_v1/endpoints/sequences.py @@ -1,4 +1,4 @@ -# Copyright (C) 2025-2026, Pyronear. +# Copyright (C) 2025-2026-2026-2026, Pyronear. # This program is licensed under the Apache License 2.0. # See LICENSE or go to for full license details. diff --git a/src/app/crud/crud_alert.py b/src/app/crud/crud_alert.py index 0419be96..3b58fc8d 100644 --- a/src/app/crud/crud_alert.py +++ b/src/app/crud/crud_alert.py @@ -1,4 +1,4 @@ -# Copyright (C) 2025, Pyronear. +# Copyright (C) 2025-2026, Pyronear. # This program is licensed under the Apache License 2.0. # See LICENSE or go to for full license details. diff --git a/src/app/crud/crud_sequence.py b/src/app/crud/crud_sequence.py index a501a24b..26463d79 100644 --- a/src/app/crud/crud_sequence.py +++ b/src/app/crud/crud_sequence.py @@ -1,4 +1,4 @@ -# Copyright (C) 2025-2026, Pyronear. +# Copyright (C) 2025-2026-2026, Pyronear. # This program is licensed under the Apache License 2.0. # See LICENSE or go to for full license details. diff --git a/src/app/schemas/alerts.py b/src/app/schemas/alerts.py index 292b632a..74b752f1 100644 --- a/src/app/schemas/alerts.py +++ b/src/app/schemas/alerts.py @@ -1,4 +1,4 @@ -# Copyright (C) 2025, Pyronear. +# Copyright (C) 2025-2026, Pyronear. # This program is licensed under the Apache License 2.0. # See LICENSE or go to for full license details. diff --git a/src/app/services/cones.py b/src/app/services/cones.py index 46e4bd55..67ea37e0 100644 --- a/src/app/services/cones.py +++ b/src/app/services/cones.py @@ -1,4 +1,4 @@ -# Copyright (C) 2025, Pyronear. +# Copyright (C) 2025-2026, Pyronear. # This program is licensed under the Apache License 2.0. # See LICENSE or go to for full license details. diff --git a/src/app/services/overlap.py b/src/app/services/overlap.py index 0d99af4b..376ee25a 100644 --- a/src/app/services/overlap.py +++ b/src/app/services/overlap.py @@ -1,4 +1,4 @@ -# Copyright (C) 2020-2025, Pyronear. +# Copyright (C) 2020-2026, Pyronear. # This program is licensed under the Apache License 2.0. # See LICENSE or go to for full license details. diff --git a/src/tests/endpoints/test_alerts.py b/src/tests/endpoints/test_alerts.py index 44a96129..6e6dade4 100644 --- a/src/tests/endpoints/test_alerts.py +++ b/src/tests/endpoints/test_alerts.py @@ -1,4 +1,4 @@ -# Copyright (C) 2025, Pyronear. +# Copyright (C) 2025-2026, Pyronear. # # This program is licensed under the Apache License 2.0. # See LICENSE or go to for full license details. diff --git a/src/tests/services/test_overlap.py b/src/tests/services/test_overlap.py index c8f95c76..5b216c5e 100644 --- a/src/tests/services/test_overlap.py +++ b/src/tests/services/test_overlap.py @@ -1,4 +1,4 @@ -# Copyright (C) 2025, Pyronear. +# Copyright (C) 2025-2026, Pyronear. # # This program is licensed under the Apache License 2.0. # See LICENSE or go to for full license details. From e71b4673f44b31b69a2c61eaf4edebf33b4e92a9 Mon Sep 17 00:00:00 2001 From: Mateo Date: Sat, 3 Jan 2026 11:30:14 +0100 Subject: [PATCH 43/55] header fix --- src/app/api/api_v1/endpoints/alerts.py | 1 + src/app/api/api_v1/endpoints/sequences.py | 1 + src/app/crud/crud_sequence.py | 1 + 3 files changed, 3 insertions(+) diff --git a/src/app/api/api_v1/endpoints/alerts.py b/src/app/api/api_v1/endpoints/alerts.py index 8b5d9e38..6eec4333 100644 --- a/src/app/api/api_v1/endpoints/alerts.py +++ b/src/app/api/api_v1/endpoints/alerts.py @@ -3,6 +3,7 @@ # This program is licensed under the Apache License 2.0. # See LICENSE or go to for full license details. + from datetime import date, datetime, timedelta from typing import Any, List, Union, cast diff --git a/src/app/api/api_v1/endpoints/sequences.py b/src/app/api/api_v1/endpoints/sequences.py index f11219b3..dc5b4c2d 100644 --- a/src/app/api/api_v1/endpoints/sequences.py +++ b/src/app/api/api_v1/endpoints/sequences.py @@ -3,6 +3,7 @@ # This program is licensed under the Apache License 2.0. # See LICENSE or go to for full license details. + from datetime import date, datetime, timedelta from typing import Any, List, Union, cast diff --git a/src/app/crud/crud_sequence.py b/src/app/crud/crud_sequence.py index 26463d79..90b96cda 100644 --- a/src/app/crud/crud_sequence.py +++ b/src/app/crud/crud_sequence.py @@ -3,6 +3,7 @@ # This program is licensed under the Apache License 2.0. # See LICENSE or go to for full license details. + from typing import Union from sqlmodel.ext.asyncio.session import AsyncSession From 549ee5d15bc02f2717657e565211afa255924ce7 Mon Sep 17 00:00:00 2001 From: Mateo Date: Sat, 3 Jan 2026 11:33:31 +0100 Subject: [PATCH 44/55] header fix --- src/app/api/api_v1/endpoints/alerts.py | 2 +- src/app/api/api_v1/endpoints/sequences.py | 2 +- src/app/crud/crud_sequence.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/src/app/api/api_v1/endpoints/alerts.py b/src/app/api/api_v1/endpoints/alerts.py index 6eec4333..79aa81c0 100644 --- a/src/app/api/api_v1/endpoints/alerts.py +++ b/src/app/api/api_v1/endpoints/alerts.py @@ -1,4 +1,4 @@ -# Copyright (C) 2025-2026-2026-2026-2026, Pyronear. +# Copyright (C) 2025-2026, Pyronear. # This program is licensed under the Apache License 2.0. # See LICENSE or go to for full license details. diff --git a/src/app/api/api_v1/endpoints/sequences.py b/src/app/api/api_v1/endpoints/sequences.py index dc5b4c2d..f8fa5b7a 100644 --- a/src/app/api/api_v1/endpoints/sequences.py +++ b/src/app/api/api_v1/endpoints/sequences.py @@ -1,4 +1,4 @@ -# Copyright (C) 2025-2026-2026-2026, Pyronear. +# Copyright (C) 2025-2026, Pyronear. # This program is licensed under the Apache License 2.0. # See LICENSE or go to for full license details. diff --git a/src/app/crud/crud_sequence.py b/src/app/crud/crud_sequence.py index 90b96cda..ed622e06 100644 --- a/src/app/crud/crud_sequence.py +++ b/src/app/crud/crud_sequence.py @@ -1,4 +1,4 @@ -# Copyright (C) 2025-2026-2026, Pyronear. +# Copyright (C) 2025-2026, Pyronear. # This program is licensed under the Apache License 2.0. # See LICENSE or go to for full license details. From 7a9b17ace34b8ddf13083e321009daee8e7a577c Mon Sep 17 00:00:00 2001 From: Mateo Date: Sun, 4 Jan 2026 11:03:18 +0100 Subject: [PATCH 45/55] add triangulation test --- src/tests/endpoints/test_alerts.py | 164 ++++++++++++++++++++++++++++- 1 file changed, 163 insertions(+), 1 deletion(-) diff --git a/src/tests/endpoints/test_alerts.py b/src/tests/endpoints/test_alerts.py index 6e6dade4..52bf1c02 100644 --- a/src/tests/endpoints/test_alerts.py +++ b/src/tests/endpoints/test_alerts.py @@ -12,7 +12,8 @@ from sqlmodel import select from sqlmodel.ext.asyncio.session import AsyncSession -from app.models import Alert, AlertSequence, AnnotationType, Camera, Sequence +from app.core.config import settings +from app.models import Alert, AlertSequence, AnnotationType, Camera, Organization, Sequence from app.services.overlap import compute_overlap @@ -274,3 +275,164 @@ async def test_alert_recompute_after_sequence_relabel(async_client: AsyncClient, if remaining_loc: assert updated_alert.lat == pytest.approx(remaining_loc[0]) assert updated_alert.lon == pytest.approx(remaining_loc[1]) + + +@pytest.mark.asyncio +async def test_triangulation_creates_single_alert( + async_client: AsyncClient, detection_session: AsyncSession, mock_img: bytes +): + organization = await detection_session.get(Organization, 1) + assert organization is not None + organization.name = "sdis-77" + detection_session.add(organization) + await detection_session.commit() + await detection_session.refresh(organization) + + camera_specs = [ + { + "name": "croix-augas", + "lat": 48.4267, + "lon": 2.7109, + "azimuth": 190.0, + "bboxes": "[(0,0.530,0.018,0.553,0.183)]", + }, + { + "name": "nemours", + "lat": 48.2605, + "lon": 2.7064, + "azimuth": 25.0, + "bboxes": "[(0.184,0.425,0.199,0.447,0.557)]", + }, + { + "name": "moret-sur-loing", + "lat": 48.3792, + "lon": 2.8208, + "azimuth": 280.0, + "bboxes": "[(0.408,0.462,0.463,0.496,0.498)]", + }, + ] + cameras: List[Camera] = [] + for spec in camera_specs: + camera = Camera( + organization_id=organization.id, + name=spec["name"], + angle_of_view=54.2, + elevation=110.0, + lat=spec["lat"], + lon=spec["lon"], + is_trustable=True, + ) + detection_session.add(camera) + cameras.append(camera) + await detection_session.commit() + for camera in cameras: + await detection_session.refresh(camera) + + for _ in range(settings.SEQUENCE_MIN_INTERVAL_DETS): + for camera, spec in zip(cameras, camera_specs, strict=False): + auth = pytest.get_token(camera.id, ["camera"], organization.id) + response = await async_client.post( + "/detections", + data={"azimuth": spec["azimuth"], "bboxes": spec["bboxes"]}, + files={"file": ("logo.png", mock_img, "image/png")}, + headers=auth, + ) + assert response.status_code == 201, response.text + + camera_ids = [camera.id for camera in cameras] + seqs_res = await detection_session.exec( + select(Sequence).where(cast(Any, Sequence.camera_id).in_(camera_ids)).execution_options(populate_existing=True) + ) + sequences = sorted(seqs_res.all(), key=lambda seq: seq.id) + assert len(sequences) == len(cameras) + + seq_ids = {seq.id for seq in sequences} + mappings_res = await detection_session.exec( + select(AlertSequence.alert_id, AlertSequence.sequence_id).where( + cast(Any, AlertSequence.sequence_id).in_(list(seq_ids)) + ) + ) + mappings = set(mappings_res.all()) + alert_ids = {aid for aid, _ in mappings} + assert len(alert_ids) == 1 + assert {sid for _, sid in mappings} == seq_ids + + alert_res = await detection_session.exec(select(Alert).where(Alert.id == next(iter(alert_ids)))) + alert = alert_res.one() + assert alert.organization_id == organization.id + + camera_by_id = {camera.id: camera for camera in cameras} + records = [ + { + "id": seq.id, + "lat": camera_by_id[seq.camera_id].lat, + "lon": camera_by_id[seq.camera_id].lon, + "sequence_azimuth": seq.sequence_azimuth, + "cone_angle": seq.cone_angle, + "is_wildfire": seq.is_wildfire, + "started_at": seq.started_at, + "last_seen_at": seq.last_seen_at, + } + for seq in sequences + ] + df = compute_overlap(pd.DataFrame.from_records(records)) + expected_loc = None + for groups, locations in zip(df["event_groups"], df["event_smoke_locations"], strict=False): + for idx, group in enumerate(groups): + if set(group) == seq_ids: + if idx < len(locations): + expected_loc = locations[idx] + break + if expected_loc is not None: + break + + assert expected_loc is not None + assert alert.lat == pytest.approx(expected_loc[0]) + assert alert.lon == pytest.approx(expected_loc[1]) + + auth = pytest.get_token( + pytest.user_table[0]["id"], pytest.user_table[0]["role"].split(), pytest.user_table[0]["organization_id"] + ) + initial_alert_id = next(iter(alert_ids)) + initial_mappings = set(mappings) + + resp = await async_client.patch( + f"/sequences/{sequences[0].id}/label", + json={"is_wildfire": AnnotationType.WILDFIRE_SMOKE.value}, + headers=auth, + ) + assert resp.status_code == 200, resp.text + + mappings_res = await detection_session.exec( + select(AlertSequence.alert_id, AlertSequence.sequence_id).where( + cast(Any, AlertSequence.sequence_id).in_(list(seq_ids)) + ) + ) + mappings_after_wildfire = set(mappings_res.all()) + alert_ids_after_wildfire = {aid for aid, _ in mappings_after_wildfire} + assert alert_ids_after_wildfire == {initial_alert_id} + assert mappings_after_wildfire == initial_mappings + + resp = await async_client.patch( + f"/sequences/{sequences[1].id}/label", + json={"is_wildfire": AnnotationType.OTHER_SMOKE.value}, + headers=auth, + ) + assert resp.status_code == 200, resp.text + + mappings_res = await detection_session.exec( + select(AlertSequence.alert_id, AlertSequence.sequence_id).where( + cast(Any, AlertSequence.sequence_id).in_(list(seq_ids)) + ) + ) + mappings_after_other = set(mappings_res.all()) + alert_ids_after_other = {aid for aid, _ in mappings_after_other} + assert len(alert_ids_after_other) == 2 + new_alert_ids = alert_ids_after_other - {initial_alert_id} + assert len(new_alert_ids) == 1 + new_alert_id = next(iter(new_alert_ids)) + + assert {sid for aid, sid in mappings_after_other if aid == new_alert_id} == {sequences[1].id} + remaining_ids = {seq.id for seq in sequences if seq.id != sequences[1].id} + updated_mappings = {(aid, sid) for aid, sid in mappings_after_other if aid == initial_alert_id} + assert updated_mappings == {(initial_alert_id, sid) for sid in remaining_ids} From 54c2ec50602f62ae7a1bdeec7905313f5e926e4e Mon Sep 17 00:00:00 2001 From: Mateo Date: Sun, 4 Jan 2026 11:09:56 +0100 Subject: [PATCH 46/55] drop dupicate --- src/tests/endpoints/test_alerts.py | 135 ----------------------------- 1 file changed, 135 deletions(-) diff --git a/src/tests/endpoints/test_alerts.py b/src/tests/endpoints/test_alerts.py index 52bf1c02..534fc4b2 100644 --- a/src/tests/endpoints/test_alerts.py +++ b/src/tests/endpoints/test_alerts.py @@ -142,141 +142,6 @@ async def test_alerts_from_date(async_client: AsyncClient, detection_session: As assert started_times == sorted(started_times, reverse=True) -@pytest.mark.asyncio -async def test_alert_recompute_after_sequence_relabel(async_client: AsyncClient, detection_session: AsyncSession): - # Build three overlapping sequences on the same camera - now = datetime.utcnow() - camera_id = 1 - seq_specs = [ - {"camera_azimuth": 0.0, "sequence_azimuth": 0.0, "cone_angle": 20.0, "offset": 2}, - {"camera_azimuth": 5.0, "sequence_azimuth": 5.0, "cone_angle": 20.0, "offset": 1}, - {"camera_azimuth": 350.0, "sequence_azimuth": 350.0, "cone_angle": 20.0, "offset": 0}, - ] - seqs: List[Sequence] = [] - for spec in seq_specs: - seq = Sequence( - camera_id=camera_id, - pose_id=None, - camera_azimuth=spec["camera_azimuth"], - is_wildfire=None, - sequence_azimuth=spec["sequence_azimuth"], - cone_angle=spec["cone_angle"], - started_at=now - timedelta(seconds=10 + spec["offset"]), - last_seen_at=now - timedelta(seconds=spec["offset"]), - ) - detection_session.add(seq) - seqs.append(seq) - await detection_session.commit() - for seq in seqs: - await detection_session.refresh(seq) - - # Compute initial alert location from all three - camera = await detection_session.get(Camera, camera_id) - assert camera is not None - records = [ - { - "id": seq.id, - "lat": camera.lat, - "lon": camera.lon, - "sequence_azimuth": seq.sequence_azimuth, - "cone_angle": seq.cone_angle, - "is_wildfire": seq.is_wildfire, - "started_at": seq.started_at, - "last_seen_at": seq.last_seen_at, - } - for seq in seqs - ] - df_all = compute_overlap(pd.DataFrame.from_records(records)) - initial_loc = next( - (loc for locs in df_all["event_smoke_locations"].tolist() for loc in locs if loc is not None), None - ) - - alert = Alert( - organization_id=1, - lat=initial_loc[0] if initial_loc else None, - lon=initial_loc[1] if initial_loc else None, - started_at=min(seq.started_at for seq in seqs), - last_seen_at=max(seq.last_seen_at for seq in seqs), - ) - detection_session.add(alert) - await detection_session.commit() - await detection_session.refresh(alert) - - for seq in seqs: - detection_session.add(AlertSequence(alert_id=alert.id, sequence_id=seq.id)) - await detection_session.commit() - - auth = pytest.get_token( - pytest.user_table[0]["id"], pytest.user_table[0]["role"].split(), pytest.user_table[0]["organization_id"] - ) - - # Relabel one sequence as non-wildfire -> should be detached, new alert created, old alert recomputed - target_seq = seqs[-1] - resp = await async_client.patch( - f"/sequences/{target_seq.id}/label", - json={"is_wildfire": AnnotationType.OTHER_SMOKE.value}, - headers=auth, - ) - assert resp.status_code == 200, resp.text - - # Fetch alerts and mappings with fresh values - alerts_res = await detection_session.exec(select(Alert).execution_options(populate_existing=True)) - alerts_in_db = alerts_res.all() - assert len(alerts_in_db) == 2 - - mappings_res = await detection_session.exec( - select(AlertSequence.alert_id, AlertSequence.sequence_id).execution_options(populate_existing=True) - ) - mappings = mappings_res.all() - - # Identify alerts - new_alert = next(a for a in alerts_in_db if a.id != alert.id) - updated_alert = next(a for a in alerts_in_db if a.id == alert.id) - - # New alert should only reference relabeled sequence - assert (new_alert.id, target_seq.id) in mappings - assert updated_alert.id != new_alert.id - - # Updated alert should only reference remaining sequences - remaining_ids = {seqs[0].id, seqs[1].id} - remaining_mappings = {(aid, sid) for aid, sid in mappings if aid == updated_alert.id} - assert remaining_mappings == {(updated_alert.id, sid) for sid in remaining_ids} - - # Reload remaining sequences from DB to compare times - remaining_seqs_res = await detection_session.exec( - select(Sequence) - .where(cast(Any, Sequence.id).in_(list(remaining_ids))) - .execution_options(populate_existing=True) - ) - remaining_seqs = remaining_seqs_res.all() - - # Updated alert times recomputed - assert updated_alert.started_at == min(seq.started_at for seq in remaining_seqs) - assert updated_alert.last_seen_at == max(seq.last_seen_at for seq in remaining_seqs) - - # Updated alert location recomputed from remaining sequences - remaining_records = [ - { - "id": seq.id, - "lat": camera.lat, - "lon": camera.lon, - "sequence_azimuth": seq.sequence_azimuth, - "cone_angle": seq.cone_angle, - "is_wildfire": seq.is_wildfire, - "started_at": seq.started_at, - "last_seen_at": seq.last_seen_at, - } - for seq in remaining_seqs - ] - df_remaining = compute_overlap(pd.DataFrame.from_records(remaining_records)) - remaining_loc = next( - (loc for locs in df_remaining["event_smoke_locations"].tolist() for loc in locs if loc is not None), None - ) - if remaining_loc: - assert updated_alert.lat == pytest.approx(remaining_loc[0]) - assert updated_alert.lon == pytest.approx(remaining_loc[1]) - - @pytest.mark.asyncio async def test_triangulation_creates_single_alert( async_client: AsyncClient, detection_session: AsyncSession, mock_img: bytes From 0b80c10fb0d72004e0c66141b917017dfad95bc5 Mon Sep 17 00:00:00 2001 From: Mateo Date: Sun, 28 Dec 2025 10:53:59 +0100 Subject: [PATCH 47/55] use pose_id --- client/pyroclient/client.py | 14 +++--- src/app/api/api_v1/endpoints/detections.py | 39 +++++++++-------- src/app/models.py | 1 - src/app/schemas/detections.py | 14 +----- src/tests/conftest.py | 4 -- src/tests/endpoints/test_detections.py | 51 ++++++++-------------- 6 files changed, 46 insertions(+), 77 deletions(-) diff --git a/client/pyroclient/client.py b/client/pyroclient/client.py index 800d91f3..fb784a15 100644 --- a/client/pyroclient/client.py +++ b/client/pyroclient/client.py @@ -4,7 +4,7 @@ # See LICENSE or go to for full license details. from enum import Enum -from typing import Dict, List, Optional, Tuple +from typing import Dict, List, Tuple from urllib.parse import urljoin import requests @@ -215,22 +215,20 @@ def delete_pose(self, pose_id: int) -> Response: def create_detection( self, media: bytes, - azimuth: float, bboxes: List[Tuple[float, float, float, float, float]], - pose_id: Optional[int] = None, + pose_id: int, ) -> Response: """Notify the detection of a wildfire on the picture taken by a camera. >>> from pyroclient import Client >>> api_client = Client("MY_CAM_TOKEN") >>> with open("path/to/my/file.ext", "rb") as f: data = f.read() - >>> response = api_client.create_detection(data, azimuth=124.2, bboxes=[(.1,.1,.5,.8,.5)], pose_id=12) + >>> response = api_client.create_detection(data, bboxes=[(.1,.1,.5,.8,.5)], pose_id=12) Args: media: byte data of the picture - azimuth: the azimuth of the camera when the picture was taken bboxes: list of tuples where each tuple is a relative coordinate in order xmin, ymin, xmax, ymax, conf - pose_id: optional, pose_id of the detection + pose_id: pose_id of the detection Returns: HTTP response @@ -238,11 +236,9 @@ def create_detection( if not isinstance(bboxes, (list, tuple)) or len(bboxes) == 0 or len(bboxes) > 5: raise ValueError("bboxes must be a non-empty list of tuples with a maximum of 5 boxes") data = { - "azimuth": azimuth, "bboxes": _dump_bbox_to_json(bboxes), } - if pose_id is not None: - data["pose_id"] = pose_id + data["pose_id"] = pose_id return requests.post( urljoin(self._route_prefix, ClientRoute.DETECTIONS_CREATE), headers=self.headers, diff --git a/src/app/api/api_v1/endpoints/detections.py b/src/app/api/api_v1/endpoints/detections.py index 1053d507..1bdc2d16 100644 --- a/src/app/api/api_v1/endpoints/detections.py +++ b/src/app/api/api_v1/endpoints/detections.py @@ -29,12 +29,13 @@ get_detection_crud, get_jwt, get_organization_crud, + get_pose_crud, get_sequence_crud, get_webhook_crud, ) from app.core.config import settings -from app.crud import AlertCRUD, CameraCRUD, DetectionCRUD, OrganizationCRUD, SequenceCRUD, WebhookCRUD -from app.models import Alert, AlertSequence, Camera, Detection, Organization, Role, Sequence, UserRole +from app.crud import AlertCRUD, CameraCRUD, DetectionCRUD, OrganizationCRUD, PoseCRUD, SequenceCRUD, WebhookCRUD +from app.models import Alert, AlertSequence, Camera, Detection, Organization, Pose, Role, Sequence, UserRole from app.schemas.alerts import AlertCreate, AlertUpdate from app.schemas.detections import ( BOXES_PATTERN, @@ -188,12 +189,12 @@ async def create_detection( min_length=2, max_length=settings.MAX_BBOX_STR_LENGTH, ), - azimuth: float = Form(..., ge=0, lt=360, description="angle between north and direction in degrees"), - pose_id: Optional[int] = Form(None, gt=0, description="pose id of the detection"), + pose_id: int = Form(..., gt=0, description="pose id of the detection"), file: UploadFile = File(..., alias="file"), detections: DetectionCRUD = Depends(get_detection_crud), webhooks: WebhookCRUD = Depends(get_webhook_crud), organizations: OrganizationCRUD = Depends(get_organization_crud), + poses: PoseCRUD = Depends(get_pose_crud), sequences: SequenceCRUD = Depends(get_sequence_crud), alerts: AlertCRUD = Depends(get_alert_crud), cameras: CameraCRUD = Depends(get_camera_crud), @@ -210,20 +211,22 @@ async def create_detection( # Upload media bucket_key = await upload_file(file, token_payload.organization_id, token_payload.sub) + pose = cast(Pose, await poses.get(pose_id, strict=True)) + if pose.camera_id != token_payload.sub: + raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Access forbidden.") + det = await detections.create( DetectionCreate( - camera_id=token_payload.sub, pose_id=pose_id, bucket_key=bucket_key, azimuth=azimuth, bboxes=bboxes + camera_id=token_payload.sub, pose_id=pose_id, bucket_key=bucket_key, bboxes=bboxes ) ) # Sequence handling # Check if there is a sequence that was seen recently + seq_filters: List[tuple[str, Any]] = [("camera_id", token_payload.sub), ("pose_id", pose_id)] + sequence = await sequences.fetch_all( - filters=[("camera_id", token_payload.sub), ("camera_azimuth", det.azimuth)], - inequality_pair=( - "last_seen_at", - ">", - datetime.utcnow() - timedelta(seconds=settings.SEQUENCE_RELAXATION_SECONDS), - ), + filters=seq_filters, + inequality_pair=("last_seen_at", ">", datetime.utcnow() - timedelta(seconds=settings.SEQUENCE_RELAXATION_SECONDS)), order_by="last_seen_at", order_desc=True, limit=1, @@ -235,13 +238,11 @@ async def create_detection( await detections.update(det.id, DetectionSequence(sequence_id=sequence[0].id)) else: # Check if we've reached the threshold of detections per interval + det_filters: List[tuple[str, Any]] = [("camera_id", token_payload.sub), ("pose_id", pose_id)] + dets_ = await detections.fetch_all( - filters=[("camera_id", token_payload.sub), ("azimuth", det.azimuth)], - inequality_pair=( - "created_at", - ">", - datetime.utcnow() - timedelta(seconds=settings.SEQUENCE_MIN_INTERVAL_SECONDS), - ), + filters=det_filters, + inequality_pair=("created_at", ">", datetime.utcnow() - timedelta(seconds=settings.SEQUENCE_MIN_INTERVAL_SECONDS)), order_by="created_at", order_desc=False, limit=settings.SEQUENCE_MIN_INTERVAL_DETS, @@ -249,13 +250,13 @@ async def create_detection( if len(dets_) >= settings.SEQUENCE_MIN_INTERVAL_DETS: camera = cast(Camera, await cameras.get(det.camera_id, strict=True)) - cone_azimuth, cone_angle = resolve_cone(det.azimuth, dets_[0].bboxes, camera.angle_of_view) + cone_azimuth, cone_angle = resolve_cone(pose.azimuth, dets_[0].bboxes, camera.angle_of_view) # Create new sequence sequence_ = await sequences.create( Sequence( camera_id=token_payload.sub, pose_id=pose_id, - camera_azimuth=det.azimuth, + camera_azimuth=pose.azimuth, sequence_azimuth=cone_azimuth, cone_angle=cone_angle, started_at=dets_[0].created_at, diff --git a/src/app/models.py b/src/app/models.py index 301e19ab..02d187a5 100644 --- a/src/app/models.py +++ b/src/app/models.py @@ -73,7 +73,6 @@ class Detection(SQLModel, table=True): camera_id: int = Field(..., foreign_key="cameras.id", nullable=False) pose_id: Union[int, None] = Field(None, foreign_key="poses.id", nullable=True) sequence_id: Union[int, None] = Field(None, foreign_key="sequences.id", nullable=True) - azimuth: float = Field(..., ge=0, lt=360) bucket_key: str bboxes: str = Field(..., min_length=2, max_length=settings.MAX_BBOX_STR_LENGTH, nullable=False) created_at: datetime = Field(default_factory=datetime.utcnow, nullable=False) diff --git a/src/app/schemas/detections.py b/src/app/schemas/detections.py index 39c337b9..d6fb5a54 100644 --- a/src/app/schemas/detections.py +++ b/src/app/schemas/detections.py @@ -11,23 +11,13 @@ from app.core.config import settings from app.models import AnnotationType, Detection -__all__ = ["Azimuth", "DetectionCreate", "DetectionLabel", "DetectionRead", "DetectionUrl", "DetectionWithUrl"] +__all__ = ["DetectionCreate", "DetectionLabel", "DetectionRead", "DetectionUrl", "DetectionWithUrl"] class DetectionLabel(BaseModel): is_wildfire: AnnotationType -class Azimuth(BaseModel): - azimuth: float = Field( - ..., - ge=0, - lt=360, - description="angle between north and direction in degrees", - json_schema_extra={"examples": [110]}, - ) - - # Regex for a float between 0 and 1, with a maximum of 3 decimals FLOAT_PATTERN = r"(0?\.[0-9]{1,3}|0|1)" BOX_PATTERN = rf"\({FLOAT_PATTERN},{FLOAT_PATTERN},{FLOAT_PATTERN},{FLOAT_PATTERN},{FLOAT_PATTERN}\)" @@ -35,7 +25,7 @@ class Azimuth(BaseModel): COMPILED_BOXES_PATTERN = re.compile(BOXES_PATTERN) -class DetectionCreate(Azimuth): +class DetectionCreate(BaseModel): camera_id: int = Field(..., gt=0) pose_id: Optional[int] = Field(None, gt=0) bucket_key: str diff --git a/src/tests/conftest.py b/src/tests/conftest.py index b583b686..b3b781f4 100644 --- a/src/tests/conftest.py +++ b/src/tests/conftest.py @@ -122,7 +122,6 @@ "camera_id": 1, "pose_id": 1, "sequence_id": 1, - "azimuth": 43.7, "bucket_key": "my_file", "bboxes": "[(.1,.1,.7,.8,.9)]", "created_at": datetime.strptime("2023-11-07T15:08:19.226673", dt_format), @@ -132,7 +131,6 @@ "camera_id": 1, "pose_id": 1, "sequence_id": 1, - "azimuth": 43.7, "bucket_key": "my_file", "bboxes": "[(.1,.1,.7,.8,.9)]", "created_at": datetime.strptime("2023-11-07T15:18:19.226673", dt_format), @@ -142,7 +140,6 @@ "camera_id": 1, "pose_id": 1, "sequence_id": 1, - "azimuth": 43.7, "bucket_key": "my_file", "bboxes": "[(.1,.1,.7,.8,.9)]", "created_at": datetime.strptime("2023-11-07T15:28:19.226673", dt_format), @@ -152,7 +149,6 @@ "camera_id": 2, "pose_id": 3, "sequence_id": 2, - "azimuth": 74.8, "bucket_key": "my_file", "bboxes": "[(.1,.1,.7,.8,.9)]", "created_at": datetime.strptime("2023-11-07T16:08:19.226673", dt_format), diff --git a/src/tests/endpoints/test_detections.py b/src/tests/endpoints/test_detections.py index 42d5225c..a59b30aa 100644 --- a/src/tests/endpoints/test_detections.py +++ b/src/tests/endpoints/test_detections.py @@ -9,35 +9,25 @@ from app.api.api_v1.endpoints.detections import _attach_sequence_to_alert from app.core.config import settings from app.crud import AlertCRUD, CameraCRUD, SequenceCRUD -from app.models import AlertSequence, Camera, Detection, Sequence +from app.models import AlertSequence, Camera, Detection, Pose, Sequence from app.services.cones import resolve_cone @pytest.mark.parametrize( ("user_idx", "cam_idx", "payload", "status_code", "status_detail", "repeat"), [ - (None, None, {"azimuth": 45.6, "bboxes": "[(0.6,0.6,0.7,0.7,0.6)]"}, 401, "Not authenticated", None), - (0, None, {"azimuth": 45.6, "bboxes": "[(0.6,0.6,0.7,0.7,0.6)]"}, 403, "Incompatible token scope.", None), - (1, None, {"azimuth": 45.6, "bboxes": "[(0.6,0.6,0.7,0.7,0.6)]"}, 403, "Incompatible token scope.", None), - (2, None, {"azimuth": 45.6, "bboxes": "[(0.6,0.6,0.7,0.7,0.6)]"}, 403, "Incompatible token scope.", None), - (None, 0, {"azimuth": "hello"}, 422, None, None), + (None, None, {"pose_id": 1, "bboxes": "[(0.6,0.6,0.7,0.7,0.6)]"}, 401, "Not authenticated", None), + (0, None, {"pose_id": 1, "bboxes": "[(0.6,0.6,0.7,0.7,0.6)]"}, 403, "Incompatible token scope.", None), + (1, None, {"pose_id": 1, "bboxes": "[(0.6,0.6,0.7,0.7,0.6)]"}, 403, "Incompatible token scope.", None), + (2, None, {"pose_id": 1, "bboxes": "[(0.6,0.6,0.7,0.7,0.6)]"}, 403, "Incompatible token scope.", None), (None, 0, {}, 422, None, None), - (None, 0, {"azimuth": 45.6, "bboxes": []}, 422, None, None), - (None, 1, {"azimuth": 45.6, "bboxes": (0.6, 0.6, 0.6, 0.6, 0.6)}, 422, None, None), - (None, 1, {"azimuth": 45.6, "bboxes": "[(0.6, 0.6, 0.6, 0.6, 0.6)]"}, 422, None, None), - (None, 1, {"azimuth": 360, "bboxes": "[(0.6,0.6,0.7,0.7,0.6)]"}, 422, None, None), + (None, 0, {"pose_id": 3, "bboxes": []}, 422, None, None), + (None, 1, {"pose_id": 3, "bboxes": (0.6, 0.6, 0.6, 0.6, 0.6)}, 422, None, None), + (None, 1, {"pose_id": 3, "bboxes": "[(0.6, 0.6, 0.6, 0.6, 0.6)]"}, 422, None, None), ( None, 1, - {"azimuth": 45.6, "bboxes": "[(0.6,0.6,0.7,0.7,0.6)]", "pose_id": 3, "sequence_id": None}, - 201, - None, - 0, - ), - ( - None, - 1, - {"azimuth": 0, "bboxes": "[(0.6,0.6,0.7,0.7,0.6)]", "pose_id": 3, "sequence_id": None}, + {"pose_id": 3, "bboxes": "[(0.6,0.6,0.7,0.7,0.6)]"}, 201, None, 0, @@ -46,7 +36,7 @@ ( None, 1, - {"azimuth": 45.6, "bboxes": "[(0.6,0.6,0.7,0.7,0.6)]", "pose_id": 3, "sequence_id": None}, + {"pose_id": 3, "bboxes": "[(0.6,0.6,0.7,0.7,0.6)]"}, 201, None, 2, @@ -86,13 +76,11 @@ async def test_create_detection( if isinstance(status_detail, str): assert response.json()["detail"] == status_detail if response.status_code // 100 == 2: - assert { - k: v - for k, v in response.json().items() - if k not in {"created_at", "updated_at", "id", "bucket_key", "camera_id"} - } == payload - assert response.json()["id"] == max(entry["id"] for entry in pytest.detection_table) + 1 - assert response.json()["camera_id"] == pytest.camera_table[cam_idx]["id"] + data = response.json() + assert data["pose_id"] == payload.get("pose_id") + assert data["bboxes"] == payload["bboxes"] + assert data["id"] == max(entry["id"] for entry in pytest.detection_table) + 1 + assert data["camera_id"] == pytest.camera_table[cam_idx]["id"] if isinstance(repeat, int) and repeat > 0: det_ids = [response.json()["id"]] for _ in range(repeat): @@ -272,8 +260,7 @@ async def test_create_detection_creates_sequence( mock_img = b"img" auth = pytest.get_token(pytest.camera_table[0]["id"], ["camera"], pytest.camera_table[0]["organization_id"]) payload = { - "azimuth": 120.0, - "pose_id": None, + "pose_id": 1, "bboxes": "[(0.1,0.1,0.2,0.2,0.9)]", } resp = await async_client.post( @@ -289,10 +276,10 @@ async def test_create_detection_creates_sequence( assert seq_res.cone_angle is not None camera = await detection_session.get(Camera, pytest.camera_table[0]["id"]) assert camera is not None + pose = await detection_session.get(Pose, payload["pose_id"]) + assert pose is not None expected_sequence_azimuth, expected_cone_angle = resolve_cone( - float(payload["azimuth"] if payload["azimuth"] is not None else 0.0), - str(payload["bboxes"]), - camera.angle_of_view, + pose.azimuth, str(payload["bboxes"]), camera.angle_of_view ) assert seq_res.sequence_azimuth == pytest.approx(expected_sequence_azimuth) assert seq_res.cone_angle == pytest.approx(expected_cone_angle) From 436b9f7e03abf2f3151d020277040782b4332da3 Mon Sep 17 00:00:00 2001 From: Mateo Date: Sun, 28 Dec 2025 11:05:32 +0100 Subject: [PATCH 48/55] style --- src/app/api/api_v1/endpoints/detections.py | 16 ++++++++++++---- 1 file changed, 12 insertions(+), 4 deletions(-) diff --git a/src/app/api/api_v1/endpoints/detections.py b/src/app/api/api_v1/endpoints/detections.py index 1bdc2d16..11c6d780 100644 --- a/src/app/api/api_v1/endpoints/detections.py +++ b/src/app/api/api_v1/endpoints/detections.py @@ -5,7 +5,7 @@ from datetime import datetime, timedelta -from typing import Any, List, Optional, cast +from typing import Any, List, cast import pandas as pd from fastapi import ( @@ -194,10 +194,10 @@ async def create_detection( detections: DetectionCRUD = Depends(get_detection_crud), webhooks: WebhookCRUD = Depends(get_webhook_crud), organizations: OrganizationCRUD = Depends(get_organization_crud), - poses: PoseCRUD = Depends(get_pose_crud), sequences: SequenceCRUD = Depends(get_sequence_crud), alerts: AlertCRUD = Depends(get_alert_crud), cameras: CameraCRUD = Depends(get_camera_crud), + poses: PoseCRUD = Depends(get_pose_crud), token_payload: TokenPayload = Security(get_jwt, scopes=[Role.CAMERA]), ) -> Detection: telemetry_client.capture(f"camera|{token_payload.sub}", event="detections-create") @@ -226,7 +226,11 @@ async def create_detection( sequence = await sequences.fetch_all( filters=seq_filters, - inequality_pair=("last_seen_at", ">", datetime.utcnow() - timedelta(seconds=settings.SEQUENCE_RELAXATION_SECONDS)), + inequality_pair=( + "last_seen_at", + ">", + datetime.utcnow() - timedelta(seconds=settings.SEQUENCE_RELAXATION_SECONDS), + ), order_by="last_seen_at", order_desc=True, limit=1, @@ -242,7 +246,11 @@ async def create_detection( dets_ = await detections.fetch_all( filters=det_filters, - inequality_pair=("created_at", ">", datetime.utcnow() - timedelta(seconds=settings.SEQUENCE_MIN_INTERVAL_SECONDS)), + inequality_pair=( + "created_at", + ">", + datetime.utcnow() - timedelta(seconds=settings.SEQUENCE_MIN_INTERVAL_SECONDS), + ), order_by="created_at", order_desc=False, limit=settings.SEQUENCE_MIN_INTERVAL_DETS, From 2778efbdf8e29f910e8ded1b3210441ed4ca7f74 Mon Sep 17 00:00:00 2001 From: Mateo Date: Sun, 28 Dec 2025 11:14:32 +0100 Subject: [PATCH 49/55] adapt test --- src/tests/endpoints/test_detections.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/tests/endpoints/test_detections.py b/src/tests/endpoints/test_detections.py index a59b30aa..19df1f1c 100644 --- a/src/tests/endpoints/test_detections.py +++ b/src/tests/endpoints/test_detections.py @@ -22,8 +22,8 @@ (2, None, {"pose_id": 1, "bboxes": "[(0.6,0.6,0.7,0.7,0.6)]"}, 403, "Incompatible token scope.", None), (None, 0, {}, 422, None, None), (None, 0, {"pose_id": 3, "bboxes": []}, 422, None, None), - (None, 1, {"pose_id": 3, "bboxes": (0.6, 0.6, 0.6, 0.6, 0.6)}, 422, None, None), - (None, 1, {"pose_id": 3, "bboxes": "[(0.6, 0.6, 0.6, 0.6, 0.6)]"}, 422, None, None), + (None, 1, {"pose_id": 1, "bboxes": (0.6, 0.6, 0.6, 0.6, 0.6)}, 422, None, None), + (None, 1, {"pose_id": 1, "bboxes": "[(0.6, 0.6, 0.6, 0.6, 0.6)]"}, 422, None, None), ( None, 1, From 7ea95b6ccce929c83c1918233c4a910bc2449330 Mon Sep 17 00:00:00 2001 From: Mateo Date: Sun, 28 Dec 2025 12:56:08 +0100 Subject: [PATCH 50/55] split seq based on bbox --- src/app/api/api_v1/endpoints/sequences.py | 2 +- src/app/models.py | 3 ++- src/app/schemas/detections.py | 3 ++- 3 files changed, 5 insertions(+), 3 deletions(-) diff --git a/src/app/api/api_v1/endpoints/sequences.py b/src/app/api/api_v1/endpoints/sequences.py index f8fa5b7a..ab4a259d 100644 --- a/src/app/api/api_v1/endpoints/sequences.py +++ b/src/app/api/api_v1/endpoints/sequences.py @@ -53,7 +53,7 @@ async def _refresh_alert_state(alert_id: int, session: AsyncSession, alerts: Ale new_start = min(seq.started_at for seq in seqs) new_last = max(seq.last_seen_at for seq in seqs) - loc: Union[tuple[float, float], None] = None + loc: tuple[float, float] | None = None if len(rows) >= 2: records = [] for seq, cam in zip(seqs, cams, strict=False): diff --git a/src/app/models.py b/src/app/models.py index 02d187a5..68799436 100644 --- a/src/app/models.py +++ b/src/app/models.py @@ -71,10 +71,11 @@ class Detection(SQLModel, table=True): __tablename__ = "detections" id: int = Field(None, primary_key=True) camera_id: int = Field(..., foreign_key="cameras.id", nullable=False) - pose_id: Union[int, None] = Field(None, foreign_key="poses.id", nullable=True) + pose_id: int = Field(..., foreign_key="poses.id", nullable=False) sequence_id: Union[int, None] = Field(None, foreign_key="sequences.id", nullable=True) bucket_key: str bboxes: str = Field(..., min_length=2, max_length=settings.MAX_BBOX_STR_LENGTH, nullable=False) + others_bboxes: Union[str, None] = Field(default=None, nullable=True) created_at: datetime = Field(default_factory=datetime.utcnow, nullable=False) diff --git a/src/app/schemas/detections.py b/src/app/schemas/detections.py index d6fb5a54..501a6dcd 100644 --- a/src/app/schemas/detections.py +++ b/src/app/schemas/detections.py @@ -27,7 +27,7 @@ class DetectionLabel(BaseModel): class DetectionCreate(BaseModel): camera_id: int = Field(..., gt=0) - pose_id: Optional[int] = Field(None, gt=0) + pose_id: int = Field(..., gt=0) bucket_key: str bboxes: str = Field( ..., @@ -36,6 +36,7 @@ class DetectionCreate(BaseModel): description="string representation of list of tuples where each tuple is a relative coordinate in order xmin, ymin, xmax, ymax, conf", json_schema_extra={"examples": ["[(0.1, 0.1, 0.9, 0.9, 0.5)]"]}, ) + others_bboxes: Optional[str] = Field(None) class DetectionUrl(BaseModel): From 1f526810200e6eae3c8af34ee879f31a0a11fd56 Mon Sep 17 00:00:00 2001 From: Mateo Date: Sun, 28 Dec 2025 12:56:30 +0100 Subject: [PATCH 51/55] adapt test --- src/tests/conftest.py | 4 ++ src/tests/endpoints/test_detections.py | 67 +++++++++++++++++++++++++- src/tests/endpoints/test_sequences.py | 3 +- 3 files changed, 71 insertions(+), 3 deletions(-) diff --git a/src/tests/conftest.py b/src/tests/conftest.py index b3b781f4..347cb97d 100644 --- a/src/tests/conftest.py +++ b/src/tests/conftest.py @@ -124,6 +124,7 @@ "sequence_id": 1, "bucket_key": "my_file", "bboxes": "[(.1,.1,.7,.8,.9)]", + "others_bboxes": None, "created_at": datetime.strptime("2023-11-07T15:08:19.226673", dt_format), }, { @@ -133,6 +134,7 @@ "sequence_id": 1, "bucket_key": "my_file", "bboxes": "[(.1,.1,.7,.8,.9)]", + "others_bboxes": None, "created_at": datetime.strptime("2023-11-07T15:18:19.226673", dt_format), }, { @@ -142,6 +144,7 @@ "sequence_id": 1, "bucket_key": "my_file", "bboxes": "[(.1,.1,.7,.8,.9)]", + "others_bboxes": None, "created_at": datetime.strptime("2023-11-07T15:28:19.226673", dt_format), }, { @@ -151,6 +154,7 @@ "sequence_id": 2, "bucket_key": "my_file", "bboxes": "[(.1,.1,.7,.8,.9)]", + "others_bboxes": None, "created_at": datetime.strptime("2023-11-07T16:08:19.226673", dt_format), }, ] diff --git a/src/tests/endpoints/test_detections.py b/src/tests/endpoints/test_detections.py index 19df1f1c..b7d40773 100644 --- a/src/tests/endpoints/test_detections.py +++ b/src/tests/endpoints/test_detections.py @@ -1,3 +1,4 @@ +from ast import literal_eval from datetime import datetime, timedelta from typing import Any, Dict, List, Union @@ -41,6 +42,15 @@ None, 2, ), + # multiple bboxes produce multiple detections + ( + None, + 1, + {"pose_id": 3, "bboxes": "[(0.6,0.6,0.7,0.7,0.6),(0.2,0.2,0.3,0.3,0.8)]"}, + 201, + None, + 0, + ), ], ) @pytest.mark.asyncio @@ -78,9 +88,18 @@ async def test_create_detection( if response.status_code // 100 == 2: data = response.json() assert data["pose_id"] == payload.get("pose_id") - assert data["bboxes"] == payload["bboxes"] + if isinstance(payload.get("bboxes"), str): + boxes = literal_eval(payload["bboxes"]) + if len(boxes) > 1: + assert literal_eval(data["bboxes"])[0] == tuple(boxes[0]) + assert data["others_bboxes"] is not None + else: + assert data["bboxes"] == payload["bboxes"] + assert data.get("others_bboxes") is None assert data["id"] == max(entry["id"] for entry in pytest.detection_table) + 1 assert data["camera_id"] == pytest.camera_table[cam_idx]["id"] + created_ids: List[int] = [] + created_ids.append(response.json()["id"]) if response.status_code // 100 == 2 else None if isinstance(repeat, int) and repeat > 0: det_ids = [response.json()["id"]] for _ in range(repeat): @@ -104,6 +123,52 @@ async def test_create_detection( ) assert response.status_code == 200 assert response.json()["sequence_id"] == sequence_id + created_ids.extend(det_ids) + + # Multi-bbox input should create multiple detections + if response.status_code == 201 and isinstance(payload.get("bboxes"), str) and repeat in (0, None): + boxes = literal_eval(payload["bboxes"]) + if len(boxes) <= 1: + return + bucket_key = response.json()["bucket_key"] + latest_res = await detection_session.exec( + select(Detection).where(Detection.bucket_key == bucket_key).order_by(Detection.id.desc()).limit(len(boxes)) # type: ignore[attr-defined] + ) + dets = latest_res.all() + assert len(dets) == len(boxes) + assert all(det.bucket_key == bucket_key for det in dets) + assert all(det.pose_id == payload["pose_id"] for det in dets) + assert any(det.others_bboxes is not None for det in dets) + + +@pytest.mark.asyncio +async def test_create_detection_creates_new_sequence_on_bbox_split( + async_client: AsyncClient, + detection_session: AsyncSession, + mock_img: bytes, +): + auth = pytest.get_token( + pytest.camera_table[0]["id"], + ["camera"], + pytest.camera_table[0]["organization_id"], + ) + + payload1 = {"pose_id": 1, "bboxes": "[(0.1,0.1,0.2,0.2,0.9)]"} + resp1 = await async_client.post( + "/detections", data=payload1, files={"file": ("logo.png", mock_img, "image/png")}, headers=auth + ) + assert resp1.status_code == 201, resp1.text + seq_id_1 = resp1.json()["sequence_id"] + assert isinstance(seq_id_1, int) + + payload2 = {"pose_id": 1, "bboxes": "[(0.6,0.6,0.7,0.7,0.9)]"} + resp2 = await async_client.post( + "/detections", data=payload2, files={"file": ("logo.png", mock_img, "image/png")}, headers=auth + ) + assert resp2.status_code == 201, resp2.text + seq_id_2 = resp2.json()["sequence_id"] + assert isinstance(seq_id_2, int) + assert seq_id_2 != seq_id_1 @pytest.mark.parametrize( diff --git a/src/tests/endpoints/test_sequences.py b/src/tests/endpoints/test_sequences.py index 6ba3022e..bef9801a 100644 --- a/src/tests/endpoints/test_sequences.py +++ b/src/tests/endpoints/test_sequences.py @@ -328,9 +328,8 @@ async def test_delete_sequence_cleans_alerts_and_detections(async_client: AsyncC ) detection = Detection( camera_id=camera.id, - pose_id=None, + pose_id=1, sequence_id=None, - azimuth=45.0, bucket_key="tmp", bboxes="[(0.1,0.1,0.2,0.2,0.9)]", created_at=now, From b3a8b1da5e634f4e2b5289936e6ddd0fd96973ba Mon Sep 17 00:00:00 2001 From: Mateo Date: Sun, 28 Dec 2025 13:01:05 +0100 Subject: [PATCH 52/55] prevent UnboundLocalError --- src/app/api/api_v1/endpoints/detections.py | 1 - 1 file changed, 1 deletion(-) diff --git a/src/app/api/api_v1/endpoints/detections.py b/src/app/api/api_v1/endpoints/detections.py index 11c6d780..b18c9e41 100644 --- a/src/app/api/api_v1/endpoints/detections.py +++ b/src/app/api/api_v1/endpoints/detections.py @@ -235,7 +235,6 @@ async def create_detection( order_desc=True, limit=1, ) - if len(sequence) == 1: # Add detection to existing sequence await sequences.update(sequence[0].id, SequenceUpdate(last_seen_at=det.created_at)) From 44034e3bc02cf70ba44cc4f7b0f382e0f7729d32 Mon Sep 17 00:00:00 2001 From: Mateo Date: Sun, 28 Dec 2025 13:05:11 +0100 Subject: [PATCH 53/55] fix mypy client --- client/pyroclient/client.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/client/pyroclient/client.py b/client/pyroclient/client.py index fb784a15..709f0fb4 100644 --- a/client/pyroclient/client.py +++ b/client/pyroclient/client.py @@ -235,10 +235,10 @@ def create_detection( """ if not isinstance(bboxes, (list, tuple)) or len(bboxes) == 0 or len(bboxes) > 5: raise ValueError("bboxes must be a non-empty list of tuples with a maximum of 5 boxes") - data = { + data: Dict[str, str] = { "bboxes": _dump_bbox_to_json(bboxes), } - data["pose_id"] = pose_id + data["pose_id"] = str(pose_id) return requests.post( urljoin(self._route_prefix, ClientRoute.DETECTIONS_CREATE), headers=self.headers, From 35eb7fa2614c001be652991fe5a9e6005ec096bf Mon Sep 17 00:00:00 2001 From: Mateo Date: Mon, 29 Dec 2025 09:45:08 +0100 Subject: [PATCH 54/55] complete client --- client/pyroclient/client.py | 19 ++++++++++++++++--- 1 file changed, 16 insertions(+), 3 deletions(-) diff --git a/client/pyroclient/client.py b/client/pyroclient/client.py index 709f0fb4..a564fb9a 100644 --- a/client/pyroclient/client.py +++ b/client/pyroclient/client.py @@ -176,7 +176,17 @@ def create_pose( timeout=self.timeout, ) - def patch_pose( + def get_current_poses(self, camera_id: int | None = None) -> Response: + """Fetch poses, optionally filtered by camera_id.""" + params: Dict[str, int] | None = {"camera_id": camera_id} if camera_id is not None else None + return requests.get( + urljoin(self._route_prefix, ClientRoute.POSES_CREATE), + headers=self.headers, + params=params, + timeout=self.timeout, + ) + + def update_pose( self, pose_id: int, azimuth: float | None = None, @@ -184,9 +194,9 @@ def patch_pose( ) -> Response: """Update a pose - >>> api_client.patch_pose(pose_id=1, azimuth=90.0) + >>> api_client.update_pose(pose_id=1, azimuth=90.0) """ - payload = {} + payload: Dict[str, float | int] = {} if azimuth is not None: payload["azimuth"] = azimuth if patrol_id is not None: @@ -199,6 +209,9 @@ def patch_pose( timeout=self.timeout, ) + # Backward compatibility alias + patch_pose = update_pose + def delete_pose(self, pose_id: int) -> Response: """Delete a pose From 0cd98894055189e2593223115c773c686a03ba6b Mon Sep 17 00:00:00 2001 From: Mateo Date: Mon, 29 Dec 2025 17:42:26 +0100 Subject: [PATCH 55/55] style --- client/pyroclient/client.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/client/pyroclient/client.py b/client/pyroclient/client.py index a564fb9a..e32c2873 100644 --- a/client/pyroclient/client.py +++ b/client/pyroclient/client.py @@ -176,9 +176,9 @@ def create_pose( timeout=self.timeout, ) - def get_current_poses(self, camera_id: int | None = None) -> Response: - """Fetch poses, optionally filtered by camera_id.""" - params: Dict[str, int] | None = {"camera_id": camera_id} if camera_id is not None else None + def get_current_poses(self) -> Response: + """Fetch poses for the authenticated camera.""" + params: Dict[str, int] | None = None return requests.get( urljoin(self._route_prefix, ClientRoute.POSES_CREATE), headers=self.headers,