diff --git a/poetry.lock b/poetry.lock index 3ab1cc95..d052e730 100644 --- a/poetry.lock +++ b/poetry.lock @@ -199,18 +199,18 @@ tests = ["pytest (>=3.2.1,!=3.3.0)"] [[package]] name = "boto3" -version = "1.42.13" +version = "1.42.14" description = "The AWS SDK for Python" optional = false python-versions = ">=3.9" groups = ["main"] files = [ - {file = "boto3-1.42.13-py3-none-any.whl", hash = "sha256:9d6aad3fa8b90567006bf7b32efa26489fc306fbe63946eaf57b72356a45761d"}, - {file = "boto3-1.42.13.tar.gz", hash = "sha256:4c9a62dcb5c3f905630fe99fb4b81131da84c5c92eedcc81a89cbd924c1c524f"}, + {file = "boto3-1.42.14-py3-none-any.whl", hash = "sha256:bfcc665227bb4432a235cb4adb47719438d6472e5ccbf7f09512046c3f749670"}, + {file = "boto3-1.42.14.tar.gz", hash = "sha256:a5d005667b480c844ed3f814a59f199ce249d0f5669532a17d06200c0a93119c"}, ] [package.dependencies] -botocore = ">=1.42.13,<1.43.0" +botocore = ">=1.42.14,<1.43.0" jmespath = ">=0.7.1,<2.0.0" s3transfer = ">=0.16.0,<0.17.0" @@ -219,14 +219,14 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "botocore" -version = "1.42.13" +version = "1.42.14" description = "Low-level, data-driven core of boto 3." optional = false python-versions = ">=3.9" groups = ["main"] files = [ - {file = "botocore-1.42.13-py3-none-any.whl", hash = "sha256:b750b2de4a2478db9718a02395cb9da8698901ba02378d60037d6369ecb6bb88"}, - {file = "botocore-1.42.13.tar.gz", hash = "sha256:7e4cf14bd5719b60600fb45d2bb3ae140feb3c182a863b93093aafce7f93cfee"}, + {file = "botocore-1.42.14-py3-none-any.whl", hash = "sha256:efe89adfafa00101390ec2c371d453b3359d5f9690261bc3bd70131e0d453e8e"}, + {file = "botocore-1.42.14.tar.gz", hash = "sha256:cf5bebb580803c6cfd9886902ca24834b42ecaa808da14fb8cd35ad523c9f621"}, ] [package.dependencies] @@ -640,26 +640,26 @@ files = [ [[package]] name = "fastapi" -version = "0.125.0" +version = "0.127.0" description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production" optional = false python-versions = ">=3.9" groups = ["main"] files = [ - {file = "fastapi-0.125.0-py3-none-any.whl", hash = "sha256:2570ec4f3aecf5cca8f0428aed2398b774fcdfee6c2116f86e80513f2f86a7a1"}, - {file = "fastapi-0.125.0.tar.gz", hash = "sha256:16b532691a33e2c5dee1dac32feb31dc6eb41a3dd4ff29a95f9487cb21c054c0"}, + {file = "fastapi-0.127.0-py3-none-any.whl", hash = "sha256:725aa2bb904e2eff8031557cf4b9b77459bfedd63cae8427634744fd199f6a49"}, + {file = "fastapi-0.127.0.tar.gz", hash = "sha256:5a9246e03dcd1fdb19f1396db30894867c1d630f5107dc167dcbc5ed1ea7d259"}, ] [package.dependencies] annotated-doc = ">=0.0.2" -pydantic = ">=1.7.4,<1.8 || >1.8,<1.8.1 || >1.8.1,<2.0.0 || >2.0.0,<2.0.1 || >2.0.1,<2.1.0 || >2.1.0,<3.0.0" +pydantic = ">=2.7.0" starlette = ">=0.40.0,<0.51.0" typing-extensions = ">=4.8.0" [package.extras] all = ["email-validator (>=2.0.0)", "fastapi-cli[standard] (>=0.0.8)", "httpx (>=0.23.0,<1.0.0)", "itsdangerous (>=1.1.0)", "jinja2 (>=3.1.5)", "orjson (>=3.2.1)", "pydantic-extra-types (>=2.0.0)", "pydantic-settings (>=2.0.0)", "python-multipart (>=0.0.18)", "pyyaml (>=5.3.1)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0)", "uvicorn[standard] (>=0.12.0)"] -standard = ["email-validator (>=2.0.0)", "fastapi-cli[standard] (>=0.0.8)", "httpx (>=0.23.0,<1.0.0)", "jinja2 (>=3.1.5)", "python-multipart (>=0.0.18)", "uvicorn[standard] (>=0.12.0)"] -standard-no-fastapi-cloud-cli = ["email-validator (>=2.0.0)", "fastapi-cli[standard-no-fastapi-cloud-cli] (>=0.0.8)", "httpx (>=0.23.0,<1.0.0)", "jinja2 (>=3.1.5)", "python-multipart (>=0.0.18)", "uvicorn[standard] (>=0.12.0)"] +standard = ["email-validator (>=2.0.0)", "fastapi-cli[standard] (>=0.0.8)", "httpx (>=0.23.0,<1.0.0)", "jinja2 (>=3.1.5)", "pydantic-extra-types (>=2.0.0)", "pydantic-settings (>=2.0.0)", "python-multipart (>=0.0.18)", "uvicorn[standard] (>=0.12.0)"] +standard-no-fastapi-cloud-cli = ["email-validator (>=2.0.0)", "fastapi-cli[standard-no-fastapi-cloud-cli] (>=0.0.8)", "httpx (>=0.23.0,<1.0.0)", "jinja2 (>=3.1.5)", "pydantic-extra-types (>=2.0.0)", "pydantic-settings (>=2.0.0)", "python-multipart (>=0.0.18)", "uvicorn[standard] (>=0.12.0)"] [[package]] name = "filelock" @@ -673,6 +673,42 @@ files = [ {file = "filelock-3.20.1.tar.gz", hash = "sha256:b8360948b351b80f420878d8516519a2204b07aefcdcfd24912a5d33127f188c"}, ] +[[package]] +name = "geographiclib" +version = "2.1" +description = "The geodesic routines from GeographicLib" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "geographiclib-2.1-py3-none-any.whl", hash = "sha256:e2a873b9b9e7fc38721ad73d5f4e6c9ed140d428a339970f505c07056997d40b"}, + {file = "geographiclib-2.1.tar.gz", hash = "sha256:6a6545e6262d0ed3522e13c515713718797e37ed8c672c31ad7b249f372ef108"}, +] + +[[package]] +name = "geopy" +version = "2.4.1" +description = "Python Geocoding Toolbox" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "geopy-2.4.1-py3-none-any.whl", hash = "sha256:ae8b4bc5c1131820f4d75fce9d4aaaca0c85189b3aa5d64c3dcaf5e3b7b882a7"}, + {file = "geopy-2.4.1.tar.gz", hash = "sha256:50283d8e7ad07d89be5cb027338c6365a32044df3ae2556ad3f52f4840b3d0d1"}, +] + +[package.dependencies] +geographiclib = ">=1.52,<3" + +[package.extras] +aiohttp = ["aiohttp"] +dev = ["coverage", "flake8 (>=5.0,<5.1)", "isort (>=5.10.0,<5.11.0)", "pytest (>=3.10)", "pytest-asyncio (>=0.17)", "readme-renderer", "sphinx (<=4.3.2)", "sphinx-issues", "sphinx-rtd-theme (>=0.5.0)"] +dev-docs = ["readme-renderer", "sphinx (<=4.3.2)", "sphinx-issues", "sphinx-rtd-theme (>=0.5.0)"] +dev-lint = ["flake8 (>=5.0,<5.1)", "isort (>=5.10.0,<5.11.0)"] +dev-test = ["coverage", "pytest (>=3.10)", "pytest-asyncio (>=0.17)", "sphinx (<=4.3.2)"] +requests = ["requests (>=2.16.2)", "urllib3 (>=1.24.2)"] +timezone = ["pytz"] + [[package]] name = "greenlet" version = "3.3.0" @@ -1074,16 +1110,110 @@ files = [ {file = "mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558"}, ] +[[package]] +name = "networkx" +version = "3.6" +description = "Python package for creating and manipulating graphs and networks" +optional = false +python-versions = ">=3.11" +groups = ["main"] +markers = "python_version >= \"3.12\"" +files = [ + {file = "networkx-3.6-py3-none-any.whl", hash = "sha256:cdb395b105806062473d3be36458d8f1459a4e4b98e236a66c3a48996e07684f"}, + {file = "networkx-3.6.tar.gz", hash = "sha256:285276002ad1f7f7da0f7b42f004bcba70d381e936559166363707fdad3d72ad"}, +] + +[package.extras] +benchmarking = ["asv", "virtualenv"] +default = ["matplotlib (>=3.8)", "numpy (>=1.25)", "pandas (>=2.0)", "scipy (>=1.11.2)"] +developer = ["mypy (>=1.15)", "pre-commit (>=4.1)"] +doc = ["intersphinx-registry", "myst-nb (>=1.1)", "numpydoc (>=1.8.0)", "pillow (>=10)", "pydata-sphinx-theme (>=0.16)", "sphinx (>=8.0)", "sphinx-gallery (>=0.18)", "texext (>=0.6.7)"] +example = ["cairocffi (>=1.7)", "contextily (>=1.6)", "igraph (>=0.11)", "iplotx (>=0.9.0)", "momepy (>=0.7.2)", "osmnx (>=2.0.0)", "scikit-learn (>=1.5)", "seaborn (>=0.13)"] +extra = ["lxml (>=4.6)", "pydot (>=3.0.1)", "pygraphviz (>=1.14)", "sympy (>=1.10)"] +release = ["build (>=0.10)", "changelist (==0.5)", "twine (>=4.0)", "wheel (>=0.40)"] +test = ["pytest (>=7.2)", "pytest-cov (>=4.0)", "pytest-xdist (>=3.0)"] +test-extras = ["pytest-mpl", "pytest-randomly"] + +[[package]] +name = "networkx" +version = "3.6.1" +description = "Python package for creating and manipulating graphs and networks" +optional = false +python-versions = "!=3.14.1,>=3.11" +groups = ["main"] +markers = "python_version == \"3.11\"" +files = [ + {file = "networkx-3.6.1-py3-none-any.whl", hash = "sha256:d47fbf302e7d9cbbb9e2555a0d267983d2aa476bac30e90dfbe5669bd57f3762"}, + {file = "networkx-3.6.1.tar.gz", hash = "sha256:26b7c357accc0c8cde558ad486283728b65b6a95d85ee1cd66bafab4c8168509"}, +] + +[package.extras] +benchmarking = ["asv", "virtualenv"] +default = ["matplotlib (>=3.8)", "numpy (>=1.25)", "pandas (>=2.0)", "scipy (>=1.11.2)"] +developer = ["mypy (>=1.15)", "pre-commit (>=4.1)"] +doc = ["intersphinx-registry", "myst-nb (>=1.1)", "numpydoc (>=1.8.0)", "pillow (>=10)", "pydata-sphinx-theme (>=0.16)", "sphinx (>=8.0)", "sphinx-gallery (>=0.18)", "texext (>=0.6.7)"] +example = ["cairocffi (>=1.7)", "contextily (>=1.6)", "igraph (>=0.11)", "iplotx (>=0.9.0)", "momepy (>=0.7.2)", "osmnx (>=2.0.0)", "scikit-learn (>=1.5)", "seaborn (>=0.13)"] +extra = ["lxml (>=4.6)", "pydot (>=3.0.1)", "pygraphviz (>=1.14)", "sympy (>=1.10)"] +release = ["build (>=0.10)", "changelist (==0.5)", "twine (>=4.0)", "wheel (>=0.40)"] +test = ["pytest (>=7.2)", "pytest-cov (>=4.0)", "pytest-xdist (>=3.0)"] +test-extras = ["pytest-mpl", "pytest-randomly"] + [[package]] name = "nodeenv" -version = "1.9.1" +version = "1.10.0" description = "Node.js virtual environment builder" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" groups = ["quality"] files = [ - {file = "nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9"}, - {file = "nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f"}, + {file = "nodeenv-1.10.0-py2.py3-none-any.whl", hash = "sha256:5bb13e3eed2923615535339b3c620e76779af4cb4c6a90deccc9e36b274d3827"}, + {file = "nodeenv-1.10.0.tar.gz", hash = "sha256:996c191ad80897d076bdfba80a41994c2b47c68e224c542b48feba42ba00f8bb"}, +] + +[[package]] +name = "numpy" +version = "1.26.4" +description = "Fundamental package for array computing in Python" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0"}, + {file = "numpy-1.26.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a"}, + {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d209d8969599b27ad20994c8e41936ee0964e6da07478d6c35016bc386b66ad4"}, + {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffa75af20b44f8dba823498024771d5ac50620e6915abac414251bd971b4529f"}, + {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:62b8e4b1e28009ef2846b4c7852046736bab361f7aeadeb6a5b89ebec3c7055a"}, + {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a4abb4f9001ad2858e7ac189089c42178fcce737e4169dc61321660f1a96c7d2"}, + {file = "numpy-1.26.4-cp310-cp310-win32.whl", hash = "sha256:bfe25acf8b437eb2a8b2d49d443800a5f18508cd811fea3181723922a8a82b07"}, + {file = "numpy-1.26.4-cp310-cp310-win_amd64.whl", hash = "sha256:b97fe8060236edf3662adfc2c633f56a08ae30560c56310562cb4f95500022d5"}, + {file = "numpy-1.26.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c66707fabe114439db9068ee468c26bbdf909cac0fb58686a42a24de1760c71"}, + {file = "numpy-1.26.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:edd8b5fe47dab091176d21bb6de568acdd906d1887a4584a15a9a96a1dca06ef"}, + {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab55401287bfec946ced39700c053796e7cc0e3acbef09993a9ad2adba6ca6e"}, + {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:666dbfb6ec68962c033a450943ded891bed2d54e6755e35e5835d63f4f6931d5"}, + {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:96ff0b2ad353d8f990b63294c8986f1ec3cb19d749234014f4e7eb0112ceba5a"}, + {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:60dedbb91afcbfdc9bc0b1f3f402804070deed7392c23eb7a7f07fa857868e8a"}, + {file = "numpy-1.26.4-cp311-cp311-win32.whl", hash = "sha256:1af303d6b2210eb850fcf03064d364652b7120803a0b872f5211f5234b399f20"}, + {file = "numpy-1.26.4-cp311-cp311-win_amd64.whl", hash = "sha256:cd25bcecc4974d09257ffcd1f098ee778f7834c3ad767fe5db785be9a4aa9cb2"}, + {file = "numpy-1.26.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b3ce300f3644fb06443ee2222c2201dd3a89ea6040541412b8fa189341847218"}, + {file = "numpy-1.26.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:03a8c78d01d9781b28a6989f6fa1bb2c4f2d51201cf99d3dd875df6fbd96b23b"}, + {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9fad7dcb1aac3c7f0584a5a8133e3a43eeb2fe127f47e3632d43d677c66c102b"}, + {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675d61ffbfa78604709862923189bad94014bef562cc35cf61d3a07bba02a7ed"}, + {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ab47dbe5cc8210f55aa58e4805fe224dac469cde56b9f731a4c098b91917159a"}, + {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1dda2e7b4ec9dd512f84935c5f126c8bd8b9f2fc001e9f54af255e8c5f16b0e0"}, + {file = "numpy-1.26.4-cp312-cp312-win32.whl", hash = "sha256:50193e430acfc1346175fcbdaa28ffec49947a06918b7b92130744e81e640110"}, + {file = "numpy-1.26.4-cp312-cp312-win_amd64.whl", hash = "sha256:08beddf13648eb95f8d867350f6a018a4be2e5ad54c8d8caed89ebca558b2818"}, + {file = "numpy-1.26.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7349ab0fa0c429c82442a27a9673fc802ffdb7c7775fad780226cb234965e53c"}, + {file = "numpy-1.26.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:52b8b60467cd7dd1e9ed082188b4e6bb35aa5cdd01777621a1658910745b90be"}, + {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5241e0a80d808d70546c697135da2c613f30e28251ff8307eb72ba696945764"}, + {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f870204a840a60da0b12273ef34f7051e98c3b5961b61b0c2c1be6dfd64fbcd3"}, + {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:679b0076f67ecc0138fd2ede3a8fd196dddc2ad3254069bcb9faf9a79b1cebcd"}, + {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:47711010ad8555514b434df65f7d7b076bb8261df1ca9bb78f53d3b2db02e95c"}, + {file = "numpy-1.26.4-cp39-cp39-win32.whl", hash = "sha256:a354325ee03388678242a4d7ebcd08b5c727033fcff3b2f536aea978e15ee9e6"}, + {file = "numpy-1.26.4-cp39-cp39-win_amd64.whl", hash = "sha256:3373d5d70a5fe74a2c1bb6d2cfd9609ecf686d47a2d7b1d37a8f3b6bf6003aea"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:afedb719a9dcfc7eaf2287b839d8198e06dcd4cb5d276a3df279231138e83d30"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95a7476c59002f2f6c590b9b7b998306fba6a5aa646b1e22ddfeaf8f78c3a29c"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7e50d0a0cc3189f9cb0aeb3a6a6af18c16f59f004b866cd2be1c14b36134a4a0"}, + {file = "numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010"}, ] [[package]] @@ -1098,6 +1228,105 @@ files = [ {file = "packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f"}, ] +[[package]] +name = "pandas" +version = "2.3.3" +description = "Powerful data structures for data analysis, time series, and statistics" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "pandas-2.3.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:376c6446ae31770764215a6c937f72d917f214b43560603cd60da6408f183b6c"}, + {file = "pandas-2.3.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e19d192383eab2f4ceb30b412b22ea30690c9e618f78870357ae1d682912015a"}, + {file = "pandas-2.3.3-cp310-cp310-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5caf26f64126b6c7aec964f74266f435afef1c1b13da3b0636c7518a1fa3e2b1"}, + {file = "pandas-2.3.3-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dd7478f1463441ae4ca7308a70e90b33470fa593429f9d4c578dd00d1fa78838"}, + {file = "pandas-2.3.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:4793891684806ae50d1288c9bae9330293ab4e083ccd1c5e383c34549c6e4250"}, + {file = "pandas-2.3.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:28083c648d9a99a5dd035ec125d42439c6c1c525098c58af0fc38dd1a7a1b3d4"}, + {file = "pandas-2.3.3-cp310-cp310-win_amd64.whl", hash = "sha256:503cf027cf9940d2ceaa1a93cfb5f8c8c7e6e90720a2850378f0b3f3b1e06826"}, + {file = "pandas-2.3.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:602b8615ebcc4a0c1751e71840428ddebeb142ec02c786e8ad6b1ce3c8dec523"}, + {file = "pandas-2.3.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8fe25fc7b623b0ef6b5009149627e34d2a4657e880948ec3c840e9402e5c1b45"}, + {file = "pandas-2.3.3-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b468d3dad6ff947df92dcb32ede5b7bd41a9b3cceef0a30ed925f6d01fb8fa66"}, + {file = "pandas-2.3.3-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b98560e98cb334799c0b07ca7967ac361a47326e9b4e5a7dfb5ab2b1c9d35a1b"}, + {file = "pandas-2.3.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1d37b5848ba49824e5c30bedb9c830ab9b7751fd049bc7914533e01c65f79791"}, + {file = "pandas-2.3.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:db4301b2d1f926ae677a751eb2bd0e8c5f5319c9cb3f88b0becbbb0b07b34151"}, + {file = "pandas-2.3.3-cp311-cp311-win_amd64.whl", hash = "sha256:f086f6fe114e19d92014a1966f43a3e62285109afe874f067f5abbdcbb10e59c"}, + {file = "pandas-2.3.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6d21f6d74eb1725c2efaa71a2bfc661a0689579b58e9c0ca58a739ff0b002b53"}, + {file = "pandas-2.3.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3fd2f887589c7aa868e02632612ba39acb0b8948faf5cc58f0850e165bd46f35"}, + {file = "pandas-2.3.3-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ecaf1e12bdc03c86ad4a7ea848d66c685cb6851d807a26aa245ca3d2017a1908"}, + {file = "pandas-2.3.3-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b3d11d2fda7eb164ef27ffc14b4fcab16a80e1ce67e9f57e19ec0afaf715ba89"}, + {file = "pandas-2.3.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a68e15f780eddf2b07d242e17a04aa187a7ee12b40b930bfdd78070556550e98"}, + {file = "pandas-2.3.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:371a4ab48e950033bcf52b6527eccb564f52dc826c02afd9a1bc0ab731bba084"}, + {file = "pandas-2.3.3-cp312-cp312-win_amd64.whl", hash = "sha256:a16dcec078a01eeef8ee61bf64074b4e524a2a3f4b3be9326420cabe59c4778b"}, + {file = "pandas-2.3.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:56851a737e3470de7fa88e6131f41281ed440d29a9268dcbf0002da5ac366713"}, + {file = "pandas-2.3.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:bdcd9d1167f4885211e401b3036c0c8d9e274eee67ea8d0758a256d60704cfe8"}, + {file = "pandas-2.3.3-cp313-cp313-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e32e7cc9af0f1cc15548288a51a3b681cc2a219faa838e995f7dc53dbab1062d"}, + {file = "pandas-2.3.3-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:318d77e0e42a628c04dc56bcef4b40de67918f7041c2b061af1da41dcff670ac"}, + {file = "pandas-2.3.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4e0a175408804d566144e170d0476b15d78458795bb18f1304fb94160cabf40c"}, + {file = "pandas-2.3.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:93c2d9ab0fc11822b5eece72ec9587e172f63cff87c00b062f6e37448ced4493"}, + {file = "pandas-2.3.3-cp313-cp313-win_amd64.whl", hash = "sha256:f8bfc0e12dc78f777f323f55c58649591b2cd0c43534e8355c51d3fede5f4dee"}, + {file = "pandas-2.3.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:75ea25f9529fdec2d2e93a42c523962261e567d250b0013b16210e1d40d7c2e5"}, + {file = "pandas-2.3.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:74ecdf1d301e812db96a465a525952f4dde225fdb6d8e5a521d47e1f42041e21"}, + {file = "pandas-2.3.3-cp313-cp313t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6435cb949cb34ec11cc9860246ccb2fdc9ecd742c12d3304989017d53f039a78"}, + {file = "pandas-2.3.3-cp313-cp313t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:900f47d8f20860de523a1ac881c4c36d65efcb2eb850e6948140fa781736e110"}, + {file = "pandas-2.3.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:a45c765238e2ed7d7c608fc5bc4a6f88b642f2f01e70c0c23d2224dd21829d86"}, + {file = "pandas-2.3.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:c4fc4c21971a1a9f4bdb4c73978c7f7256caa3e62b323f70d6cb80db583350bc"}, + {file = "pandas-2.3.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:ee15f284898e7b246df8087fc82b87b01686f98ee67d85a17b7ab44143a3a9a0"}, + {file = "pandas-2.3.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:1611aedd912e1ff81ff41c745822980c49ce4a7907537be8692c8dbc31924593"}, + {file = "pandas-2.3.3-cp314-cp314-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6d2cefc361461662ac48810cb14365a365ce864afe85ef1f447ff5a1e99ea81c"}, + {file = "pandas-2.3.3-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ee67acbbf05014ea6c763beb097e03cd629961c8a632075eeb34247120abcb4b"}, + {file = "pandas-2.3.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:c46467899aaa4da076d5abc11084634e2d197e9460643dd455ac3db5856b24d6"}, + {file = "pandas-2.3.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:6253c72c6a1d990a410bc7de641d34053364ef8bcd3126f7e7450125887dffe3"}, + {file = "pandas-2.3.3-cp314-cp314-win_amd64.whl", hash = "sha256:1b07204a219b3b7350abaae088f451860223a52cfb8a6c53358e7948735158e5"}, + {file = "pandas-2.3.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:2462b1a365b6109d275250baaae7b760fd25c726aaca0054649286bcfbb3e8ec"}, + {file = "pandas-2.3.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:0242fe9a49aa8b4d78a4fa03acb397a58833ef6199e9aa40a95f027bb3a1b6e7"}, + {file = "pandas-2.3.3-cp314-cp314t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a21d830e78df0a515db2b3d2f5570610f5e6bd2e27749770e8bb7b524b89b450"}, + {file = "pandas-2.3.3-cp314-cp314t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2e3ebdb170b5ef78f19bfb71b0dc5dc58775032361fa188e814959b74d726dd5"}, + {file = "pandas-2.3.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:d051c0e065b94b7a3cea50eb1ec32e912cd96dba41647eb24104b6c6c14c5788"}, + {file = "pandas-2.3.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:3869faf4bd07b3b66a9f462417d0ca3a9df29a9f6abd5d0d0dbab15dac7abe87"}, + {file = "pandas-2.3.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c503ba5216814e295f40711470446bc3fd00f0faea8a086cbc688808e26f92a2"}, + {file = "pandas-2.3.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a637c5cdfa04b6d6e2ecedcb81fc52ffb0fd78ce2ebccc9ea964df9f658de8c8"}, + {file = "pandas-2.3.3-cp39-cp39-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:854d00d556406bffe66a4c0802f334c9ad5a96b4f1f868adf036a21b11ef13ff"}, + {file = "pandas-2.3.3-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bf1f8a81d04ca90e32a0aceb819d34dbd378a98bf923b6398b9a3ec0bf44de29"}, + {file = "pandas-2.3.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:23ebd657a4d38268c7dfbdf089fbc31ea709d82e4923c5ffd4fbd5747133ce73"}, + {file = "pandas-2.3.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:5554c929ccc317d41a5e3d1234f3be588248e61f08a74dd17c9eabb535777dc9"}, + {file = "pandas-2.3.3-cp39-cp39-win_amd64.whl", hash = "sha256:d3e28b3e83862ccf4d85ff19cf8c20b2ae7e503881711ff2d534dc8f761131aa"}, + {file = "pandas-2.3.3.tar.gz", hash = "sha256:e05e1af93b977f7eafa636d043f9f94c7ee3ac81af99c13508215942e64c993b"}, +] + +[package.dependencies] +numpy = [ + {version = ">=1.23.2", markers = "python_version == \"3.11\""}, + {version = ">=1.26.0", markers = "python_version >= \"3.12\""}, +] +python-dateutil = ">=2.8.2" +pytz = ">=2020.1" +tzdata = ">=2022.7" + +[package.extras] +all = ["PyQt5 (>=5.15.9)", "SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)", "beautifulsoup4 (>=4.11.2)", "bottleneck (>=1.3.6)", "dataframe-api-compat (>=0.1.7)", "fastparquet (>=2022.12.0)", "fsspec (>=2022.11.0)", "gcsfs (>=2022.11.0)", "html5lib (>=1.1)", "hypothesis (>=6.46.1)", "jinja2 (>=3.1.2)", "lxml (>=4.9.2)", "matplotlib (>=3.6.3)", "numba (>=0.56.4)", "numexpr (>=2.8.4)", "odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "pandas-gbq (>=0.19.0)", "psycopg2 (>=2.9.6)", "pyarrow (>=10.0.1)", "pymysql (>=1.0.2)", "pyreadstat (>=1.2.0)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "qtpy (>=2.3.0)", "s3fs (>=2022.11.0)", "scipy (>=1.10.0)", "tables (>=3.8.0)", "tabulate (>=0.9.0)", "xarray (>=2022.12.0)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)", "zstandard (>=0.19.0)"] +aws = ["s3fs (>=2022.11.0)"] +clipboard = ["PyQt5 (>=5.15.9)", "qtpy (>=2.3.0)"] +compression = ["zstandard (>=0.19.0)"] +computation = ["scipy (>=1.10.0)", "xarray (>=2022.12.0)"] +consortium-standard = ["dataframe-api-compat (>=0.1.7)"] +excel = ["odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)"] +feather = ["pyarrow (>=10.0.1)"] +fss = ["fsspec (>=2022.11.0)"] +gcp = ["gcsfs (>=2022.11.0)", "pandas-gbq (>=0.19.0)"] +hdf5 = ["tables (>=3.8.0)"] +html = ["beautifulsoup4 (>=4.11.2)", "html5lib (>=1.1)", "lxml (>=4.9.2)"] +mysql = ["SQLAlchemy (>=2.0.0)", "pymysql (>=1.0.2)"] +output-formatting = ["jinja2 (>=3.1.2)", "tabulate (>=0.9.0)"] +parquet = ["pyarrow (>=10.0.1)"] +performance = ["bottleneck (>=1.3.6)", "numba (>=0.56.4)", "numexpr (>=2.8.4)"] +plot = ["matplotlib (>=3.6.3)"] +postgresql = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "psycopg2 (>=2.9.6)"] +pyarrow = ["pyarrow (>=10.0.1)"] +spss = ["pyreadstat (>=1.2.0)"] +sql-other = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)"] +test = ["hypothesis (>=6.46.1)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)"] +xml = ["lxml (>=4.9.2)"] + [[package]] name = "passlib" version = "1.7.4" @@ -1454,6 +1683,74 @@ dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.4.0)", "pre-commit", "pyte docs = ["sphinx", "sphinx-rtd-theme", "zope.interface"] tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] +[[package]] +name = "pyproj" +version = "3.7.2" +description = "Python interface to PROJ (cartographic projections and coordinate transformations library)" +optional = false +python-versions = ">=3.11" +groups = ["main"] +files = [ + {file = "pyproj-3.7.2-cp311-cp311-macosx_13_0_x86_64.whl", hash = "sha256:2514d61f24c4e0bb9913e2c51487ecdaeca5f8748d8313c933693416ca41d4d5"}, + {file = "pyproj-3.7.2-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:8693ca3892d82e70de077701ee76dd13d7bca4ae1c9d1e739d72004df015923a"}, + {file = "pyproj-3.7.2-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:5e26484d80fea56273ed1555abaea161e9661d81a6c07815d54b8e883d4ceb25"}, + {file = "pyproj-3.7.2-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:281cb92847814e8018010c48b4069ff858a30236638631c1a91dd7bfa68f8a8a"}, + {file = "pyproj-3.7.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:9c8577f0b7bb09118ec2e57e3babdc977127dd66326d6c5d755c76b063e6d9dc"}, + {file = "pyproj-3.7.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a23f59904fac3a5e7364b3aa44d288234af267ca041adb2c2b14a903cd5d3ac5"}, + {file = "pyproj-3.7.2-cp311-cp311-win32.whl", hash = "sha256:f2af4ed34b2cf3e031a2d85b067a3ecbd38df073c567e04b52fa7a0202afde8a"}, + {file = "pyproj-3.7.2-cp311-cp311-win_amd64.whl", hash = "sha256:0b7cb633565129677b2a183c4d807c727d1c736fcb0568a12299383056e67433"}, + {file = "pyproj-3.7.2-cp311-cp311-win_arm64.whl", hash = "sha256:38b08d85e3a38e455625b80e9eb9f78027c8e2649a21dec4df1f9c3525460c71"}, + {file = "pyproj-3.7.2-cp312-cp312-macosx_13_0_x86_64.whl", hash = "sha256:0a9bb26a6356fb5b033433a6d1b4542158fb71e3c51de49b4c318a1dff3aeaab"}, + {file = "pyproj-3.7.2-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:567caa03021178861fad27fabde87500ec6d2ee173dd32f3e2d9871e40eebd68"}, + {file = "pyproj-3.7.2-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:c203101d1dc3c038a56cff0447acc515dd29d6e14811406ac539c21eed422b2a"}, + {file = "pyproj-3.7.2-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:1edc34266c0c23ced85f95a1ee8b47c9035eae6aca5b6b340327250e8e281630"}, + {file = "pyproj-3.7.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:aa9f26c21bc0e2dc3d224cb1eb4020cf23e76af179a7c66fea49b828611e4260"}, + {file = "pyproj-3.7.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f9428b318530625cb389b9ddc9c51251e172808a4af79b82809376daaeabe5e9"}, + {file = "pyproj-3.7.2-cp312-cp312-win32.whl", hash = "sha256:b3d99ed57d319da042f175f4554fc7038aa4bcecc4ac89e217e350346b742c9d"}, + {file = "pyproj-3.7.2-cp312-cp312-win_amd64.whl", hash = "sha256:11614a054cd86a2ed968a657d00987a86eeb91fdcbd9ad3310478685dc14a128"}, + {file = "pyproj-3.7.2-cp312-cp312-win_arm64.whl", hash = "sha256:509a146d1398bafe4f53273398c3bb0b4732535065fa995270e52a9d3676bca3"}, + {file = "pyproj-3.7.2-cp313-cp313-macosx_13_0_x86_64.whl", hash = "sha256:19466e529b1b15eeefdf8ff26b06fa745856c044f2f77bf0edbae94078c1dfa1"}, + {file = "pyproj-3.7.2-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:c79b9b84c4a626c5dc324c0d666be0bfcebd99f7538d66e8898c2444221b3da7"}, + {file = "pyproj-3.7.2-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:ceecf374cacca317bc09e165db38ac548ee3cad07c3609442bd70311c59c21aa"}, + {file = "pyproj-3.7.2-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:5141a538ffdbe4bfd157421828bb2e07123a90a7a2d6f30fa1462abcfb5ce681"}, + {file = "pyproj-3.7.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f000841e98ea99acbb7b8ca168d67773b0191de95187228a16110245c5d954d5"}, + {file = "pyproj-3.7.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8115faf2597f281a42ab608ceac346b4eb1383d3b45ab474fd37341c4bf82a67"}, + {file = "pyproj-3.7.2-cp313-cp313-win32.whl", hash = "sha256:f18c0579dd6be00b970cb1a6719197fceecc407515bab37da0066f0184aafdf3"}, + {file = "pyproj-3.7.2-cp313-cp313-win_amd64.whl", hash = "sha256:bb41c29d5f60854b1075853fe80c58950b398d4ebb404eb532536ac8d2834ed7"}, + {file = "pyproj-3.7.2-cp313-cp313-win_arm64.whl", hash = "sha256:2b617d573be4118c11cd96b8891a0b7f65778fa7733ed8ecdb297a447d439100"}, + {file = "pyproj-3.7.2-cp313-cp313t-macosx_13_0_x86_64.whl", hash = "sha256:d27b48f0e81beeaa2b4d60c516c3a1cfbb0c7ff6ef71256d8e9c07792f735279"}, + {file = "pyproj-3.7.2-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:55a3610d75023c7b1c6e583e48ef8f62918e85a2ae81300569d9f104d6684bb6"}, + {file = "pyproj-3.7.2-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:8d7349182fa622696787cc9e195508d2a41a64765da9b8a6bee846702b9e6220"}, + {file = "pyproj-3.7.2-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:d230b186eb876ed4f29a7c5ee310144c3a0e44e89e55f65fb3607e13f6db337c"}, + {file = "pyproj-3.7.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:237499c7862c578d0369e2b8ac56eec550e391a025ff70e2af8417139dabb41c"}, + {file = "pyproj-3.7.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:8c225f5978abd506fd9a78eaaf794435e823c9156091cabaab5374efb29d7f69"}, + {file = "pyproj-3.7.2-cp313-cp313t-win32.whl", hash = "sha256:2da731876d27639ff9d2d81c151f6ab90a1546455fabd93368e753047be344a2"}, + {file = "pyproj-3.7.2-cp313-cp313t-win_amd64.whl", hash = "sha256:f54d91ae18dd23b6c0ab48126d446820e725419da10617d86a1b69ada6d881d3"}, + {file = "pyproj-3.7.2-cp313-cp313t-win_arm64.whl", hash = "sha256:fc52ba896cfc3214dc9f9ca3c0677a623e8fdd096b257c14a31e719d21ff3fdd"}, + {file = "pyproj-3.7.2-cp314-cp314-macosx_13_0_x86_64.whl", hash = "sha256:2aaa328605ace41db050d06bac1adc11f01b71fe95c18661497763116c3a0f02"}, + {file = "pyproj-3.7.2-cp314-cp314-macosx_14_0_arm64.whl", hash = "sha256:35dccbce8201313c596a970fde90e33605248b66272595c061b511c8100ccc08"}, + {file = "pyproj-3.7.2-cp314-cp314-manylinux_2_28_aarch64.whl", hash = "sha256:25b0b7cb0042444c29a164b993c45c1b8013d6c48baa61dc1160d834a277e83b"}, + {file = "pyproj-3.7.2-cp314-cp314-manylinux_2_28_x86_64.whl", hash = "sha256:85def3a6388e9ba51f964619aa002a9d2098e77c6454ff47773bb68871024281"}, + {file = "pyproj-3.7.2-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:b1bccefec3875ab81eabf49059e2b2ea77362c178b66fd3528c3e4df242f1516"}, + {file = "pyproj-3.7.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:d5371ca114d6990b675247355a801925814eca53e6c4b2f1b5c0a956336ee36e"}, + {file = "pyproj-3.7.2-cp314-cp314-win32.whl", hash = "sha256:77f066626030f41be543274f5ac79f2a511fe89860ecd0914f22131b40a0ec25"}, + {file = "pyproj-3.7.2-cp314-cp314-win_amd64.whl", hash = "sha256:5a964da1696b8522806f4276ab04ccfff8f9eb95133a92a25900697609d40112"}, + {file = "pyproj-3.7.2-cp314-cp314-win_arm64.whl", hash = "sha256:e258ab4dbd3cf627809067c0ba8f9884ea76c8e5999d039fb37a1619c6c3e1f6"}, + {file = "pyproj-3.7.2-cp314-cp314t-macosx_13_0_x86_64.whl", hash = "sha256:bbbac2f930c6d266f70ec75df35ef851d96fdb3701c674f42fd23a9314573b37"}, + {file = "pyproj-3.7.2-cp314-cp314t-macosx_14_0_arm64.whl", hash = "sha256:b7544e0a3d6339dc9151e9c8f3ea62a936ab7cc446a806ec448bbe86aebb979b"}, + {file = "pyproj-3.7.2-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:f7f5133dca4c703e8acadf6f30bc567d39a42c6af321e7f81975c2518f3ed357"}, + {file = "pyproj-3.7.2-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:5aff3343038d7426aa5076f07feb88065f50e0502d1b0d7c22ddfdd2c75a3f81"}, + {file = "pyproj-3.7.2-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:b0552178c61f2ac1c820d087e8ba6e62b29442debddbb09d51c4bf8acc84d888"}, + {file = "pyproj-3.7.2-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:47d87db2d2c436c5fd0409b34d70bb6cdb875cca2ebe7a9d1c442367b0ab8d59"}, + {file = "pyproj-3.7.2-cp314-cp314t-win32.whl", hash = "sha256:c9b6f1d8ad3e80a0ee0903a778b6ece7dca1d1d40f6d114ae01bc8ddbad971aa"}, + {file = "pyproj-3.7.2-cp314-cp314t-win_amd64.whl", hash = "sha256:1914e29e27933ba6f9822663ee0600f169014a2859f851c054c88cf5ea8a333c"}, + {file = "pyproj-3.7.2-cp314-cp314t-win_arm64.whl", hash = "sha256:d9d25bae416a24397e0d85739f84d323b55f6511e45a522dd7d7eae70d10c7e4"}, + {file = "pyproj-3.7.2.tar.gz", hash = "sha256:39a0cf1ecc7e282d1d30f36594ebd55c9fae1fda8a2622cee5d100430628f88c"}, +] + +[package.dependencies] +certifi = "*" + [[package]] name = "pytest" version = "8.3.5" @@ -1586,6 +1883,18 @@ files = [ [package.extras] dev = ["atomicwrites (==1.2.1)", "attrs (==19.2.0)", "coverage (==6.5.0)", "hatch", "invoke (==2.2.0)", "more-itertools (==4.3.0)", "pbr (==4.3.0)", "pluggy (==1.0.0)", "py (==1.11.0)", "pytest (==7.2.0)", "pytest-cov (==4.0.0)", "pytest-timeout (==2.1.0)", "pyyaml (==5.1)"] +[[package]] +name = "pytz" +version = "2025.2" +description = "World timezone definitions, modern and historical" +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "pytz-2025.2-py2.py3-none-any.whl", hash = "sha256:5ddf76296dd8c44c26eb8f4b6f35488f3ccbf6fbbd7adee0b7262d43f0ec2f00"}, + {file = "pytz-2025.2.tar.gz", hash = "sha256:360b9e3dbb49a209c21ad61809c7fb453643e048b38924c765813546746e81c3"}, +] + [[package]] name = "pyyaml" version = "6.0.3" @@ -1821,6 +2130,80 @@ statsig = ["statsig (>=0.55.3)"] tornado = ["tornado (>=6)"] unleash = ["UnleashClient (>=6.0.1)"] +[[package]] +name = "shapely" +version = "2.1.2" +description = "Manipulation and analysis of geometric objects" +optional = false +python-versions = ">=3.10" +groups = ["main"] +files = [ + {file = "shapely-2.1.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7ae48c236c0324b4e139bea88a306a04ca630f49be66741b340729d380d8f52f"}, + {file = "shapely-2.1.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:eba6710407f1daa8e7602c347dfc94adc02205ec27ed956346190d66579eb9ea"}, + {file = "shapely-2.1.2-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ef4a456cc8b7b3d50ccec29642aa4aeda959e9da2fe9540a92754770d5f0cf1f"}, + {file = "shapely-2.1.2-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:e38a190442aacc67ff9f75ce60aec04893041f16f97d242209106d502486a142"}, + {file = "shapely-2.1.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:40d784101f5d06a1fd30b55fc11ea58a61be23f930d934d86f19a180909908a4"}, + {file = "shapely-2.1.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f6f6cd5819c50d9bcf921882784586aab34a4bd53e7553e175dece6db513a6f0"}, + {file = "shapely-2.1.2-cp310-cp310-win32.whl", hash = "sha256:fe9627c39c59e553c90f5bc3128252cb85dc3b3be8189710666d2f8bc3a5503e"}, + {file = "shapely-2.1.2-cp310-cp310-win_amd64.whl", hash = "sha256:1d0bfb4b8f661b3b4ec3565fa36c340bfb1cda82087199711f86a88647d26b2f"}, + {file = "shapely-2.1.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:91121757b0a36c9aac3427a651a7e6567110a4a67c97edf04f8d55d4765f6618"}, + {file = "shapely-2.1.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:16a9c722ba774cf50b5d4541242b4cce05aafd44a015290c82ba8a16931ff63d"}, + {file = "shapely-2.1.2-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cc4f7397459b12c0b196c9efe1f9d7e92463cbba142632b4cc6d8bbbbd3e2b09"}, + {file = "shapely-2.1.2-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:136ab87b17e733e22f0961504d05e77e7be8c9b5a8184f685b4a91a84efe3c26"}, + {file = "shapely-2.1.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:16c5d0fc45d3aa0a69074979f4f1928ca2734fb2e0dde8af9611e134e46774e7"}, + {file = "shapely-2.1.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6ddc759f72b5b2b0f54a7e7cde44acef680a55019eb52ac63a7af2cf17cb9cd2"}, + {file = "shapely-2.1.2-cp311-cp311-win32.whl", hash = "sha256:2fa78b49485391224755a856ed3b3bd91c8455f6121fee0db0e71cefb07d0ef6"}, + {file = "shapely-2.1.2-cp311-cp311-win_amd64.whl", hash = "sha256:c64d5c97b2f47e3cd9b712eaced3b061f2b71234b3fc263e0fcf7d889c6559dc"}, + {file = "shapely-2.1.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:fe2533caae6a91a543dec62e8360fe86ffcdc42a7c55f9dfd0128a977a896b94"}, + {file = "shapely-2.1.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ba4d1333cc0bc94381d6d4308d2e4e008e0bd128bdcff5573199742ee3634359"}, + {file = "shapely-2.1.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0bd308103340030feef6c111d3eb98d50dc13feea33affc8a6f9fa549e9458a3"}, + {file = "shapely-2.1.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:1e7d4d7ad262a48bb44277ca12c7c78cb1b0f56b32c10734ec9a1d30c0b0c54b"}, + {file = "shapely-2.1.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e9eddfe513096a71896441a7c37db72da0687b34752c4e193577a145c71736fc"}, + {file = "shapely-2.1.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:980c777c612514c0cf99bc8a9de6d286f5e186dcaf9091252fcd444e5638193d"}, + {file = "shapely-2.1.2-cp312-cp312-win32.whl", hash = "sha256:9111274b88e4d7b54a95218e243282709b330ef52b7b86bc6aaf4f805306f454"}, + {file = "shapely-2.1.2-cp312-cp312-win_amd64.whl", hash = "sha256:743044b4cfb34f9a67205cee9279feaf60ba7d02e69febc2afc609047cb49179"}, + {file = "shapely-2.1.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b510dda1a3672d6879beb319bc7c5fd302c6c354584690973c838f46ec3e0fa8"}, + {file = "shapely-2.1.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:8cff473e81017594d20ec55d86b54bc635544897e13a7cfc12e36909c5309a2a"}, + {file = "shapely-2.1.2-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:fe7b77dc63d707c09726b7908f575fc04ff1d1ad0f3fb92aec212396bc6cfe5e"}, + {file = "shapely-2.1.2-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:7ed1a5bbfb386ee8332713bf7508bc24e32d24b74fc9a7b9f8529a55db9f4ee6"}, + {file = "shapely-2.1.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a84e0582858d841d54355246ddfcbd1fce3179f185da7470f41ce39d001ee1af"}, + {file = "shapely-2.1.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:dc3487447a43d42adcdf52d7ac73804f2312cbfa5d433a7d2c506dcab0033dfd"}, + {file = "shapely-2.1.2-cp313-cp313-win32.whl", hash = "sha256:9c3a3c648aedc9f99c09263b39f2d8252f199cb3ac154fadc173283d7d111350"}, + {file = "shapely-2.1.2-cp313-cp313-win_amd64.whl", hash = "sha256:ca2591bff6645c216695bdf1614fca9c82ea1144d4a7591a466fef64f28f0715"}, + {file = "shapely-2.1.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:2d93d23bdd2ed9dc157b46bc2f19b7da143ca8714464249bef6771c679d5ff40"}, + {file = "shapely-2.1.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:01d0d304b25634d60bd7cf291828119ab55a3bab87dc4af1e44b07fb225f188b"}, + {file = "shapely-2.1.2-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8d8382dd120d64b03698b7298b89611a6ea6f55ada9d39942838b79c9bc89801"}, + {file = "shapely-2.1.2-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:19efa3611eef966e776183e338b2d7ea43569ae99ab34f8d17c2c054d3205cc0"}, + {file = "shapely-2.1.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:346ec0c1a0fcd32f57f00e4134d1200e14bf3f5ae12af87ba83ca275c502498c"}, + {file = "shapely-2.1.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:6305993a35989391bd3476ee538a5c9a845861462327efe00dd11a5c8c709a99"}, + {file = "shapely-2.1.2-cp313-cp313t-win32.whl", hash = "sha256:c8876673449f3401f278c86eb33224c5764582f72b653a415d0e6672fde887bf"}, + {file = "shapely-2.1.2-cp313-cp313t-win_amd64.whl", hash = "sha256:4a44bc62a10d84c11a7a3d7c1c4fe857f7477c3506e24c9062da0db0ae0c449c"}, + {file = "shapely-2.1.2-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:9a522f460d28e2bf4e12396240a5fc1518788b2fcd73535166d748399ef0c223"}, + {file = "shapely-2.1.2-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:1ff629e00818033b8d71139565527ced7d776c269a49bd78c9df84e8f852190c"}, + {file = "shapely-2.1.2-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:f67b34271dedc3c653eba4e3d7111aa421d5be9b4c4c7d38d30907f796cb30df"}, + {file = "shapely-2.1.2-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:21952dc00df38a2c28375659b07a3979d22641aeb104751e769c3ee825aadecf"}, + {file = "shapely-2.1.2-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:1f2f33f486777456586948e333a56ae21f35ae273be99255a191f5c1fa302eb4"}, + {file = "shapely-2.1.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:cf831a13e0d5a7eb519e96f58ec26e049b1fad411fc6fc23b162a7ce04d9cffc"}, + {file = "shapely-2.1.2-cp314-cp314-win32.whl", hash = "sha256:61edcd8d0d17dd99075d320a1dd39c0cb9616f7572f10ef91b4b5b00c4aeb566"}, + {file = "shapely-2.1.2-cp314-cp314-win_amd64.whl", hash = "sha256:a444e7afccdb0999e203b976adb37ea633725333e5b119ad40b1ca291ecf311c"}, + {file = "shapely-2.1.2-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:5ebe3f84c6112ad3d4632b1fd2290665aa75d4cef5f6c5d77c4c95b324527c6a"}, + {file = "shapely-2.1.2-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:5860eb9f00a1d49ebb14e881f5caf6c2cf472c7fd38bd7f253bbd34f934eb076"}, + {file = "shapely-2.1.2-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b705c99c76695702656327b819c9660768ec33f5ce01fa32b2af62b56ba400a1"}, + {file = "shapely-2.1.2-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:a1fd0ea855b2cf7c9cddaf25543e914dd75af9de08785f20ca3085f2c9ca60b0"}, + {file = "shapely-2.1.2-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:df90e2db118c3671a0754f38e36802db75fe0920d211a27481daf50a711fdf26"}, + {file = "shapely-2.1.2-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:361b6d45030b4ac64ddd0a26046906c8202eb60d0f9f53085f5179f1d23021a0"}, + {file = "shapely-2.1.2-cp314-cp314t-win32.whl", hash = "sha256:b54df60f1fbdecc8ebc2c5b11870461a6417b3d617f555e5033f1505d36e5735"}, + {file = "shapely-2.1.2-cp314-cp314t-win_amd64.whl", hash = "sha256:0036ac886e0923417932c2e6369b6c52e38e0ff5d9120b90eef5cd9a5fc5cae9"}, + {file = "shapely-2.1.2.tar.gz", hash = "sha256:2ed4ecb28320a433db18a5bf029986aa8afcfd740745e78847e330d5d94922a9"}, +] + +[package.dependencies] +numpy = ">=1.21" + +[package.extras] +docs = ["matplotlib", "numpydoc (==1.1.*)", "sphinx", "sphinx-book-theme", "sphinx-remove-toctrees"] +test = ["pytest", "pytest-cov", "scipy-doctest"] + [[package]] name = "six" version = "1.17.0" @@ -2054,6 +2437,18 @@ files = [ [package.dependencies] typing-extensions = ">=4.12.0" +[[package]] +name = "tzdata" +version = "2025.3" +description = "Provider of IANA time zone data" +optional = false +python-versions = ">=2" +groups = ["main"] +files = [ + {file = "tzdata-2025.3-py2.py3-none-any.whl", hash = "sha256:06a47e5700f3081aab02b2e513160914ff0694bce9947d6b76ebd6bf57cfc5d1"}, + {file = "tzdata-2025.3.tar.gz", hash = "sha256:de39c2ca5dc7b0344f2eba86f49d614019d29f060fc4ebc8a417896a620b56a7"}, +] + [[package]] name = "urllib3" version = "2.6.2" @@ -2074,14 +2469,14 @@ zstd = ["backports-zstd (>=1.0.0) ; python_version < \"3.14\""] [[package]] name = "uvicorn" -version = "0.38.0" +version = "0.40.0" description = "The lightning-fast ASGI server." optional = false -python-versions = ">=3.9" +python-versions = ">=3.10" groups = ["main"] files = [ - {file = "uvicorn-0.38.0-py3-none-any.whl", hash = "sha256:48c0afd214ceb59340075b4a052ea1ee91c16fbc2a9b1469cca0e54566977b02"}, - {file = "uvicorn-0.38.0.tar.gz", hash = "sha256:fd97093bdd120a2609fc0d3afe931d4d4ad688b6e75f0f929fde1bc36fe0e91d"}, + {file = "uvicorn-0.40.0-py3-none-any.whl", hash = "sha256:c6c8f55bc8bf13eb6fa9ff87ad62308bbbc33d0b67f84293151efe87e0d5f2ee"}, + {file = "uvicorn-0.40.0.tar.gz", hash = "sha256:839676675e87e73694518b5574fd0f24c9d97b46bea16df7b8c05ea1a51071ea"}, ] [package.dependencies] @@ -2115,4 +2510,4 @@ test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess [metadata] lock-version = "2.1" python-versions = "^3.11" -content-hash = "3f7fd812d4b7a4a4b0abde381dcaafcecef23514d717879c83510f2f2507d0ba" +content-hash = "aa48b3d2633da3dff397471f72c63e0bbce90edde9c7a9235101518e549770b6" diff --git a/pyproject.toml b/pyproject.toml index 9d799bca..8dbc7e61 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -29,6 +29,12 @@ python-multipart = "==0.0.7" python-magic = "^0.4.17" boto3 = "^1.26.0" httpx = "^0.24.0" +geopy = "^2.4.0" +networkx = "^3.2.0" +numpy = "^1.26.0" +pandas = "^2.2.0" +pyproj = "^3.6.0" +shapely = "^2.0.0" [tool.poetry.group.quality] optional = true @@ -154,6 +160,20 @@ check_untyped_defs = true implicit_reexport = false explicit_package_bases = true plugins = ["pydantic.mypy"] +[[tool.mypy.overrides]] +module = [ + "pandas", + "pandas.*", + "numpy", + "pyproj", + "shapely.*", + "geopy.*", + "passlib", + "passlib.*", + "requests", + "requests.*", +] +ignore_missing_imports = true [[tool.mypy.overrides]] module = [ diff --git a/scripts/test_e2e.py b/scripts/test_e2e.py index afdeb0cc..416332e0 100644 --- a/scripts/test_e2e.py +++ b/scripts/test_e2e.py @@ -149,7 +149,7 @@ def main(args): assert sequence["camera_id"] == cam_id assert sequence["started_at"] == response.json()["created_at"] assert sequence["last_seen_at"] > sequence["started_at"] - assert sequence["azimuth"] == response.json()["azimuth"] + assert sequence["camera_azimuth"] == response.json()["azimuth"] # Fetch the latest sequence assert len(api_request("get", f"{args.endpoint}/sequences/unlabeled/latest", agent_auth)) == 1 # Fetch from date diff --git a/src/app/api/api_v1/endpoints/alerts.py b/src/app/api/api_v1/endpoints/alerts.py new file mode 100644 index 00000000..79aa81c0 --- /dev/null +++ b/src/app/api/api_v1/endpoints/alerts.py @@ -0,0 +1,135 @@ +# Copyright (C) 2025-2026, Pyronear. + +# This program is licensed under the Apache License 2.0. +# See LICENSE or go to for full license details. + + +from datetime import date, datetime, timedelta +from typing import Any, List, Union, cast + +from fastapi import APIRouter, Depends, HTTPException, Path, Query, Security, status +from sqlalchemy import asc, desc +from sqlmodel import delete, func, select +from sqlmodel.ext.asyncio.session import AsyncSession + +from app.api.dependencies import get_alert_crud, get_jwt +from app.crud import AlertCRUD +from app.db import get_session +from app.models import Alert, AlertSequence, Sequence, UserRole +from app.schemas.alerts import AlertRead +from app.schemas.login import TokenPayload +from app.services.telemetry import telemetry_client + +router = APIRouter() + + +def verify_org_rights(organization_id: int, alert: Alert) -> None: + if organization_id != alert.organization_id: + raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Access forbidden.") + + +@router.get("/{alert_id}", status_code=status.HTTP_200_OK, summary="Fetch the information of a specific alert") +async def get_alert( + alert_id: int = Path(..., gt=0), + alerts: AlertCRUD = Depends(get_alert_crud), + token_payload: TokenPayload = Security(get_jwt, scopes=[UserRole.ADMIN, UserRole.AGENT, UserRole.USER]), +) -> AlertRead: + telemetry_client.capture(token_payload.sub, event="alerts-get", properties={"alert_id": alert_id}) + alert = cast(Alert, await alerts.get(alert_id, strict=True)) + + if UserRole.ADMIN not in token_payload.scopes: + verify_org_rights(token_payload.organization_id, alert) + + return AlertRead(**alert.model_dump()) + + +@router.get( + "/{alert_id}/sequences", status_code=status.HTTP_200_OK, summary="Fetch the sequences associated to an alert" +) +async def fetch_alert_sequences( + alert_id: int = Path(..., gt=0), + limit: int = Query(10, description="Maximum number of sequences to fetch", ge=1, le=100), + order_desc: bool = Query(True, description="Whether to order the sequences by last_seen_at in descending order"), + alerts: AlertCRUD = Depends(get_alert_crud), + session: AsyncSession = Depends(get_session), + token_payload: TokenPayload = Security(get_jwt, scopes=[UserRole.ADMIN, UserRole.AGENT, UserRole.USER]), +) -> List[Sequence]: + telemetry_client.capture(token_payload.sub, event="alerts-sequences-get", properties={"alert_id": alert_id}) + alert = cast(Alert, await alerts.get(alert_id, strict=True)) + if UserRole.ADMIN not in token_payload.scopes: + verify_org_rights(token_payload.organization_id, alert) + + order_clause: Any = desc(cast(Any, Sequence.last_seen_at)) if order_desc else asc(cast(Any, Sequence.last_seen_at)) + + seq_stmt: Any = select(Sequence).join(AlertSequence, cast(Any, AlertSequence.sequence_id == Sequence.id)) + seq_stmt = seq_stmt.where(AlertSequence.alert_id == alert_id).order_by(order_clause).limit(limit) + + res = await session.exec(seq_stmt) + return list(res.all()) + + +@router.get( + "/unlabeled/latest", + status_code=status.HTTP_200_OK, + summary="Fetch all the alerts with unlabeled sequences from the last 24 hours", +) +async def fetch_latest_unlabeled_alerts( + session: AsyncSession = Depends(get_session), + token_payload: TokenPayload = Security(get_jwt, scopes=[UserRole.ADMIN, UserRole.AGENT, UserRole.USER]), +) -> List[AlertRead]: + telemetry_client.capture(token_payload.sub, event="alerts-fetch-latest") + + alerts_stmt: Any = select(Alert).join(AlertSequence, cast(Any, AlertSequence.alert_id == Alert.id)) + alerts_stmt = alerts_stmt.join(Sequence, cast(Any, Sequence.id == AlertSequence.sequence_id)) + alerts_stmt = ( + alerts_stmt.where(Alert.organization_id == token_payload.organization_id) + .where(Sequence.last_seen_at > datetime.utcnow() - timedelta(hours=24)) + .where(Sequence.is_wildfire.is_(None)) # type: ignore[union-attr] + .order_by(Alert.started_at.desc()) # type: ignore[attr-defined] + .limit(15) + ) + alerts_res = await session.exec(alerts_stmt) + return [AlertRead(**a.model_dump()) for a in alerts_res.unique().all()] # unique to deduplicate joins + + +@router.get("/all/fromdate", status_code=status.HTTP_200_OK, summary="Fetch all the alerts for a specific date") +async def fetch_alerts_from_date( + from_date: date = Query(), + limit: Union[int, None] = Query(15, description="Maximum number of alerts to fetch"), + offset: Union[int, None] = Query(0, description="Number of alerts to skip before starting to fetch"), + session: AsyncSession = Depends(get_session), + token_payload: TokenPayload = Security(get_jwt, scopes=[UserRole.ADMIN, UserRole.AGENT, UserRole.USER]), +) -> List[AlertRead]: + telemetry_client.capture(token_payload.sub, event="alerts-fetch-from-date") + + alerts_stmt: Any = ( + select(Alert) + .where(Alert.organization_id == token_payload.organization_id) + .where(func.date(Alert.started_at) == from_date) + .order_by(Alert.started_at.desc()) # type: ignore[attr-defined] + .limit(limit) + .offset(offset) + ) + alerts_res = await session.exec(alerts_stmt) + return [AlertRead(**a.model_dump()) for a in alerts_res.all()] + + +@router.delete("/{alert_id}", status_code=status.HTTP_200_OK, summary="Delete an alert") +async def delete_alert( + alert_id: int = Path(..., gt=0), + alerts: AlertCRUD = Depends(get_alert_crud), + session: AsyncSession = Depends(get_session), + token_payload: TokenPayload = Security(get_jwt, scopes=[UserRole.ADMIN]), +) -> None: + telemetry_client.capture(token_payload.sub, event="alert-deletion", properties={"alert_id": alert_id}) + + # Ensure alert exists and org is valid + alert = cast(Alert, await alerts.get(alert_id, strict=True)) + verify_org_rights(token_payload.organization_id, alert) + + # Delete associations + delete_stmt: Any = delete(AlertSequence).where(AlertSequence.alert_id == cast(Any, alert_id)) + await session.exec(delete_stmt) + await session.commit() + # Delete alert + await alerts.delete(alert_id) diff --git a/src/app/api/api_v1/endpoints/detections.py b/src/app/api/api_v1/endpoints/detections.py index 337903ec..1053d507 100644 --- a/src/app/api/api_v1/endpoints/detections.py +++ b/src/app/api/api_v1/endpoints/detections.py @@ -5,8 +5,9 @@ from datetime import datetime, timedelta -from typing import List, Optional, cast +from typing import Any, List, Optional, cast +import pandas as pd from fastapi import ( APIRouter, BackgroundTasks, @@ -19,9 +20,11 @@ UploadFile, status, ) +from sqlmodel import select from app.api.dependencies import ( dispatch_webhook, + get_alert_crud, get_camera_crud, get_detection_crud, get_jwt, @@ -30,17 +33,21 @@ get_webhook_crud, ) from app.core.config import settings -from app.crud import CameraCRUD, DetectionCRUD, OrganizationCRUD, SequenceCRUD, WebhookCRUD -from app.models import Camera, Detection, Organization, Role, Sequence, UserRole +from app.crud import AlertCRUD, CameraCRUD, DetectionCRUD, OrganizationCRUD, SequenceCRUD, WebhookCRUD +from app.models import Alert, AlertSequence, Camera, Detection, Organization, Role, Sequence, UserRole +from app.schemas.alerts import AlertCreate, AlertUpdate from app.schemas.detections import ( BOXES_PATTERN, COMPILED_BOXES_PATTERN, DetectionCreate, + DetectionRead, DetectionSequence, DetectionUrl, ) from app.schemas.login import TokenPayload from app.schemas.sequences import SequenceUpdate +from app.services.cones import resolve_cone +from app.services.overlap import compute_overlap from app.services.slack import slack_client from app.services.storage import s3_service, upload_file from app.services.telegram import telegram_client @@ -49,6 +56,128 @@ router = APIRouter() +async def _attach_sequence_to_alert( + sequence_: Sequence, + camera: Camera, + cameras: CameraCRUD, + sequences: SequenceCRUD, + alerts: AlertCRUD, +) -> None: + """Assign the given sequence to an alert based on cone/time overlap.""" + org_cameras = await cameras.fetch_all(filters=("organization_id", camera.organization_id)) + camera_by_id = {cam.id: cam for cam in org_cameras} + + if sequence_.camera_id not in camera_by_id: + camera_by_id[sequence_.camera_id] = camera + + # Fetch recent sequences for the organization based on recency of last_seen_at + recent_sequences = await sequences.fetch_all( + in_pair=("camera_id", list(camera_by_id.keys())), + inequality_pair=( + "last_seen_at", + ">", + datetime.utcnow() - timedelta(seconds=settings.SEQUENCE_RELAXATION_SECONDS), + ), + ) + + # Ensure the newly created sequence is present + if all(seq.id != sequence_.id for seq in recent_sequences): + recent_sequences.append(sequence_) + + # Build DataFrame for overlap computation + records = [] + for seq in recent_sequences: + cam = camera_by_id.get(seq.camera_id) + if cam is None or seq.sequence_azimuth is None or seq.cone_angle is None: + continue + records.append({ + "id": int(seq.id), + "lat": float(cam.lat), + "lon": float(cam.lon), + "sequence_azimuth": float(seq.sequence_azimuth), + "cone_angle": float(seq.cone_angle), + "is_wildfire": seq.is_wildfire, + "started_at": seq.started_at, + "last_seen_at": seq.last_seen_at, + }) + + if not records: + return + + df = compute_overlap(pd.DataFrame.from_records(records)) + row = df[df["id"] == int(sequence_.id)] + if row.empty: + return + groups = row.iloc[0]["event_groups"] + locations = row.iloc[0].get("event_smoke_locations", []) + group_locations = {tuple(g): locations[idx] if idx < len(locations) else None for idx, g in enumerate(groups)} + + seq_by_id = {seq.id: seq for seq in recent_sequences} + seq_ids = list(seq_by_id.keys()) + + # Existing alert links + session = sequences.session + mapping: dict[int, set[int]] = {} + if seq_ids: + stmt: Any = select(AlertSequence.alert_id, AlertSequence.sequence_id).where( + AlertSequence.sequence_id.in_(seq_ids) # type: ignore[attr-defined] + ) + res = await session.exec(stmt) + for aid, sid in res: + mapping.setdefault(int(sid), set()).add(int(aid)) + + to_link: List[AlertSequence] = [] + + for g in groups: + g_tuple = tuple(g) + location = group_locations.get(g_tuple) + start_at = min(seq_by_id[int(sid)].started_at for sid in g_tuple if int(sid) in seq_by_id) + last_seen_at = max(seq_by_id[int(sid)].last_seen_at for sid in g_tuple if int(sid) in seq_by_id) + existing_alert_ids = {aid for sid in g_tuple for aid in mapping.get(int(sid), set())} + if existing_alert_ids: + target_alert_id = min(existing_alert_ids) + # If we now have a location and the alert is missing it (or start_at can be improved), update it + if isinstance(location, tuple): + current_alert = cast(Alert, await alerts.get(target_alert_id, strict=True)) + new_start_at = min(start_at, current_alert.started_at) if current_alert.started_at else start_at + new_last_seen = ( + max(last_seen_at, current_alert.last_seen_at) if current_alert.last_seen_at else last_seen_at + ) + if ( + current_alert.lat is None + or current_alert.lon is None + or (current_alert.started_at is None or new_start_at < current_alert.started_at) + or (current_alert.last_seen_at is None or new_last_seen > current_alert.last_seen_at) + ): + await alerts.update( + target_alert_id, + AlertUpdate( + lat=location[0], lon=location[1], started_at=new_start_at, last_seen_at=new_last_seen + ), + ) + else: + alert = await alerts.create( + AlertCreate( + organization_id=camera.organization_id, + lat=location[0] if isinstance(location, tuple) else None, + lon=location[1] if isinstance(location, tuple) else None, + started_at=start_at, + last_seen_at=last_seen_at, + ) + ) + target_alert_id = alert.id + for sid in g_tuple: + sid_int = int(sid) + if target_alert_id in mapping.get(sid_int, set()): + continue + mapping.setdefault(sid_int, set()).add(target_alert_id) + to_link.append(AlertSequence(alert_id=target_alert_id, sequence_id=sid_int)) + + if to_link: + session.add_all(to_link) + await session.commit() + + @router.post("/", status_code=status.HTTP_201_CREATED, summary="Register a new wildfire detection") async def create_detection( background_tasks: BackgroundTasks, @@ -66,6 +195,7 @@ async def create_detection( webhooks: WebhookCRUD = Depends(get_webhook_crud), organizations: OrganizationCRUD = Depends(get_organization_crud), sequences: SequenceCRUD = Depends(get_sequence_crud), + alerts: AlertCRUD = Depends(get_alert_crud), cameras: CameraCRUD = Depends(get_camera_crud), token_payload: TokenPayload = Security(get_jwt, scopes=[Role.CAMERA]), ) -> Detection: @@ -88,7 +218,7 @@ async def create_detection( # Sequence handling # Check if there is a sequence that was seen recently sequence = await sequences.fetch_all( - filters=[("camera_id", token_payload.sub), ("azimuth", det.azimuth)], + filters=[("camera_id", token_payload.sub), ("camera_azimuth", det.azimuth)], inequality_pair=( "last_seen_at", ">", @@ -118,12 +248,16 @@ async def create_detection( ) if len(dets_) >= settings.SEQUENCE_MIN_INTERVAL_DETS: + camera = cast(Camera, await cameras.get(det.camera_id, strict=True)) + cone_azimuth, cone_angle = resolve_cone(det.azimuth, dets_[0].bboxes, camera.angle_of_view) # Create new sequence sequence_ = await sequences.create( Sequence( camera_id=token_payload.sub, pose_id=pose_id, - azimuth=det.azimuth, + camera_azimuth=det.azimuth, + sequence_azimuth=cone_azimuth, + cone_angle=cone_angle, started_at=dets_[0].created_at, last_seen_at=det.created_at, ) @@ -133,6 +267,8 @@ async def create_detection( for det_ in dets_: await detections.update(det_.id, DetectionSequence(sequence_id=sequence_.id)) + await _attach_sequence_to_alert(sequence_, camera, cameras, sequences, alerts) + # Webhooks whs = await webhooks.fetch_all() if any(whs): @@ -152,13 +288,12 @@ async def create_detection( if org.slack_hook: bucket = s3_service.get_bucket(s3_service.resolve_bucket_name(token_payload.organization_id)) url = bucket.get_public_url(det.bucket_key) - camera = cast(Camera, await cameras.get(det.camera_id, strict=True)) background_tasks.add_task( slack_client.notify, org.slack_hook, det.model_dump_json(), url, camera.name ) - return det + return DetectionRead(**det.model_dump()) @router.get("/{detection_id}", status_code=status.HTTP_200_OK, summary="Fetch the information of a specific detection") @@ -177,7 +312,7 @@ async def get_detection( camera = cast(Camera, await cameras.get(detection.camera_id, strict=True)) if token_payload.organization_id != camera.organization_id: raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Access forbidden.") - return detection + return DetectionRead(**detection.model_dump()) @router.get("/{detection_id}/url", status_code=200) @@ -210,15 +345,18 @@ async def fetch_detections( detections: DetectionCRUD = Depends(get_detection_crud), cameras: CameraCRUD = Depends(get_camera_crud), token_payload: TokenPayload = Security(get_jwt, scopes=[UserRole.ADMIN, UserRole.AGENT, UserRole.USER]), -) -> List[Detection]: +) -> List[DetectionRead]: telemetry_client.capture(token_payload.sub, event="detections-fetch") if UserRole.ADMIN in token_payload.scopes: - return [elt for elt in await detections.fetch_all()] + return [DetectionRead(**elt.model_dump()) for elt in await detections.fetch_all()] cameras_list = await cameras.fetch_all(filters=("organization_id", token_payload.organization_id)) camera_ids = [camera.id for camera in cameras_list] - return await detections.fetch_all(in_pair=("camera_id", camera_ids), order_by="id") + return [ + DetectionRead(**elt.model_dump()) + for elt in await detections.fetch_all(in_pair=("camera_id", camera_ids), order_by="id") + ] @router.delete("/{detection_id}", status_code=status.HTTP_200_OK, summary="Delete a detection") diff --git a/src/app/api/api_v1/endpoints/organizations.py b/src/app/api/api_v1/endpoints/organizations.py index 8c51a6e7..9438129b 100644 --- a/src/app/api/api_v1/endpoints/organizations.py +++ b/src/app/api/api_v1/endpoints/organizations.py @@ -4,13 +4,14 @@ # See LICENSE or go to for full license details. -from typing import List, cast +from typing import Any, List, cast from fastapi import APIRouter, Depends, HTTPException, Path, Security, status +from sqlmodel import delete, select from app.api.dependencies import get_jwt, get_organization_crud from app.crud import OrganizationCRUD -from app.models import Organization, UserRole +from app.models import Alert, AlertSequence, Organization, UserRole from app.schemas.login import TokenPayload from app.schemas.organizations import OrganizationCreate, SlackHook, TelegramChannelId from app.services.slack import slack_client @@ -71,6 +72,17 @@ async def delete_organization( telemetry_client.capture( token_payload.sub, event="organizations-deletion", properties={"organization_id": organization_id} ) + # Remove alerts and their associations for this organization to satisfy FK constraints + org_session = organizations.session + alert_ids_res = await org_session.exec(select(Alert.id).where(Alert.organization_id == organization_id)) + alert_ids = list(alert_ids_res.all()) + if alert_ids: + delete_links: Any = delete(AlertSequence).where(cast(Any, AlertSequence.alert_id).in_(alert_ids)) + delete_alerts: Any = delete(Alert).where(cast(Any, Alert.id).in_(alert_ids)) + await org_session.exec(delete_links) + await org_session.exec(delete_alerts) + await org_session.commit() + bucket_name = s3_service.resolve_bucket_name(organization_id) if not (await s3_service.delete_bucket(bucket_name)): raise HTTPException(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail="Failed to create bucket") diff --git a/src/app/api/api_v1/endpoints/sequences.py b/src/app/api/api_v1/endpoints/sequences.py index cc3ff0ac..f8fa5b7a 100644 --- a/src/app/api/api_v1/endpoints/sequences.py +++ b/src/app/api/api_v1/endpoints/sequences.py @@ -3,22 +3,24 @@ # This program is licensed under the Apache License 2.0. # See LICENSE or go to for full license details. -from ast import literal_eval + from datetime import date, datetime, timedelta -from operator import itemgetter -from typing import Dict, List, Tuple, Union, cast +from typing import Any, List, Union, cast +import pandas as pd from fastapi import APIRouter, Depends, HTTPException, Path, Query, Security, status -from sqlmodel import func, select +from sqlmodel import delete, func, select from sqlmodel.ext.asyncio.session import AsyncSession -from app.api.dependencies import get_camera_crud, get_detection_crud, get_jwt, get_sequence_crud -from app.crud import CameraCRUD, DetectionCRUD, SequenceCRUD +from app.api.dependencies import get_alert_crud, get_camera_crud, get_detection_crud, get_jwt, get_sequence_crud +from app.crud import AlertCRUD, CameraCRUD, DetectionCRUD, SequenceCRUD from app.db import get_session -from app.models import Camera, Detection, Sequence, UserRole -from app.schemas.detections import DetectionSequence, DetectionWithUrl +from app.models import AlertSequence, AnnotationType, Camera, Detection, Sequence, UserRole +from app.schemas.alerts import AlertCreate, AlertUpdate +from app.schemas.detections import DetectionRead, DetectionSequence, DetectionWithUrl from app.schemas.login import TokenPayload -from app.schemas.sequences import SequenceLabel, SequenceWithCone +from app.schemas.sequences import SequenceLabel, SequenceRead +from app.services.overlap import compute_overlap from app.services.storage import s3_service from app.services.telemetry import telemetry_client @@ -33,54 +35,51 @@ async def verify_org_rights( raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Access forbidden.") -def _resolve_cone(azimuth: float, bboxes_str: str, aov: float) -> Tuple[float, float]: - bboxes = literal_eval(bboxes_str) - # Take the bbox with the highest confidence - xmin, _, xmax, _, _ = max(bboxes, key=itemgetter(2)) - return azimuth + aov * ((xmin + xmax) / 2 - 0.5), aov * (xmax - xmin) - - -async def resolve_detection_cones( - seq_ids: List[int], session: AsyncSession = Depends(get_session) -) -> Dict[int, Tuple[float, float]]: - if not seq_ids: - return {} - - # Define a Common Table Expression (CTE) using a window function - # Partition by sequence_id, order by id ascending, assign row number - row_number_cte = ( - select( # type: ignore[call-overload] - Detection.id.label("detection_id"), # type: ignore[attr-defined] - Detection.sequence_id, - Detection.azimuth, - Detection.bboxes, - Detection.camera_id, - func.row_number() - .over( - partition_by=Detection.sequence_id, - order_by=Detection.id.asc(), # type: ignore[attr-defined] - ) - .label("rn"), # Assign row number within each sequence_id group - ) - .where(Detection.sequence_id.in_(seq_ids)) # type: ignore[union-attr] - .cte("ranked_detections") # Create a Common Table Expression +async def _refresh_alert_state(alert_id: int, session: AsyncSession, alerts: AlertCRUD) -> None: + remaining_stmt: Any = ( + select(Sequence, Camera) + .join(AlertSequence, cast(Any, AlertSequence.sequence_id) == Sequence.id) + .join(Camera, cast(Any, Camera.id) == Sequence.camera_id) ) - - # Main query: Select from the CTE, join with Camera, filter for row_number = 1 - query = ( - select(row_number_cte.c.sequence_id, row_number_cte.c.azimuth, row_number_cte.c.bboxes, Camera.angle_of_view) # type: ignore[attr-defined] - # Join the CTE results with the Camera table - .join(Camera, row_number_cte.c.camera_id == Camera.id) - # Filter the CTE results to get only the row with rn = 1 (minimum id) for each sequence - .where(row_number_cte.c.rn == 1) + remaining_stmt = remaining_stmt.where(AlertSequence.alert_id == alert_id) + remaining_res = await session.exec(remaining_stmt) + rows = remaining_res.all() + if not rows: + await alerts.delete(alert_id) + return + + seqs = [row[0] for row in rows] + cams = [row[1] for row in rows] + new_start = min(seq.started_at for seq in seqs) + new_last = max(seq.last_seen_at for seq in seqs) + + loc: Union[tuple[float, float], None] = None + if len(rows) >= 2: + records = [] + for seq, cam in zip(seqs, cams, strict=False): + records.append({ + "id": seq.id, + "lat": cam.lat, + "lon": cam.lon, + "sequence_azimuth": seq.sequence_azimuth, + "cone_angle": seq.cone_angle, + "is_wildfire": seq.is_wildfire, + "started_at": seq.started_at, + "last_seen_at": seq.last_seen_at, + }) + df = compute_overlap(pd.DataFrame.from_records(records)) + loc = next((loc for locs in df["event_smoke_locations"].tolist() for loc in locs if loc is not None), None) + + await alerts.update( + alert_id, + AlertUpdate( + started_at=new_start, + last_seen_at=new_last, + lat=loc[0] if loc else None, + lon=loc[1] if loc else None, + ), ) - det_infos = await session.exec(query) - results = det_infos.all() - - # For each sequence, resolve the azimuth + opening angle - return {seq_id: _resolve_cone(azimuth, bboxes_str, aov) for seq_id, azimuth, bboxes_str, aov in results} - @router.get("/{sequence_id}", status_code=status.HTTP_200_OK, summary="Fetch the information of a specific sequence") async def get_sequence( @@ -95,7 +94,7 @@ async def get_sequence( if UserRole.ADMIN not in token_payload.scopes: await verify_org_rights(token_payload.organization_id, sequence.camera_id, cameras) - return sequence + return SequenceRead(**sequence.model_dump()) @router.get( @@ -120,7 +119,7 @@ async def fetch_sequence_detections( bucket = s3_service.get_bucket(s3_service.resolve_bucket_name(camera.organization_id)) return [ DetectionWithUrl( - **elt.__dict__, + **DetectionRead(**elt.model_dump()).model_dump(), url=bucket.get_public_url(elt.bucket_key), ) for elt in await detections.fetch_all( @@ -140,13 +139,10 @@ async def fetch_sequence_detections( async def fetch_latest_unlabeled_sequences( session: AsyncSession = Depends(get_session), token_payload: TokenPayload = Security(get_jwt, scopes=[UserRole.ADMIN, UserRole.AGENT, UserRole.USER]), -) -> List[SequenceWithCone]: +) -> List[SequenceRead]: telemetry_client.capture(token_payload.sub, event="sequence-fetch-latest") - # Limit to cameras in the same organization - # Get camera IDs for org camera_ids = await session.exec(select(Camera.id).where(Camera.organization_id == token_payload.organization_id)) - # Get sequences for those cameras fetched_sequences = ( await session.exec( select(Sequence) @@ -157,15 +153,7 @@ async def fetch_latest_unlabeled_sequences( .limit(15) ) ).all() - if len(fetched_sequences) == 0: - return [] - det_cones = await resolve_detection_cones([elt.__dict__["id"] for elt in fetched_sequences], session) - return [ - SequenceWithCone( - **elt.__dict__, cone_azimuth=det_cones[elt.__dict__["id"]][0], cone_angle=det_cones[elt.__dict__["id"]][1] - ) - for elt in fetched_sequences - ] + return [SequenceRead(**elt.model_dump()) for elt in fetched_sequences] @router.get("/all/fromdate", status_code=status.HTTP_200_OK, summary="Fetch all the sequences for a specific date") @@ -175,7 +163,7 @@ async def fetch_sequences_from_date( offset: Union[int, None] = Query(0, description="Number of sequences to skip before starting to fetch"), session: AsyncSession = Depends(get_session), token_payload: TokenPayload = Security(get_jwt, scopes=[UserRole.ADMIN, UserRole.AGENT, UserRole.USER]), -) -> List[SequenceWithCone]: +) -> List[SequenceRead]: telemetry_client.capture(token_payload.sub, event="sequence-fetch-from-date") # Limit to cameras in the same organization camera_ids = await session.exec(select(Camera.id).where(Camera.organization_id == token_payload.organization_id)) @@ -190,15 +178,7 @@ async def fetch_sequences_from_date( .offset(offset) ) ).all() - if len(fetched_sequences) == 0: - return [] - det_cones = await resolve_detection_cones([elt.__dict__["id"] for elt in fetched_sequences], session) - return [ - SequenceWithCone( - **elt.__dict__, cone_azimuth=det_cones[elt.__dict__["id"]][0], cone_angle=det_cones[elt.__dict__["id"]][1] - ) - for elt in fetched_sequences - ] + return [SequenceRead(**elt.model_dump()) for elt in fetched_sequences] @router.delete("/{sequence_id}", status_code=status.HTTP_200_OK, summary="Delete a sequence") @@ -206,16 +186,26 @@ async def delete_sequence( sequence_id: int = Path(..., gt=0), sequences: SequenceCRUD = Depends(get_sequence_crud), detections: DetectionCRUD = Depends(get_detection_crud), + alerts: AlertCRUD = Depends(get_alert_crud), session: AsyncSession = Depends(get_session), token_payload: TokenPayload = Security(get_jwt, scopes=[UserRole.ADMIN]), ) -> None: telemetry_client.capture(token_payload.sub, event="sequence-deletion", properties={"sequence_id": sequence_id}) + alert_ids_res = await session.exec(select(AlertSequence.alert_id).where(AlertSequence.sequence_id == sequence_id)) + alert_ids = list(alert_ids_res.all()) # Unset the sequence_id in the detections det_ids = await session.exec(select(Detection.id).where(Detection.sequence_id == sequence_id)) for det_id in det_ids.all(): await detections.update(det_id, DetectionSequence(sequence_id=None)) + # Drop alert links for this sequence to avoid FK issues + delete_stmt: Any = delete(AlertSequence).where(cast(Any, AlertSequence.sequence_id) == sequence_id) + await session.exec(delete_stmt) + await session.commit() # Delete the sequence await sequences.delete(sequence_id) + # Refresh affected alerts + for aid in alert_ids: + await _refresh_alert_state(aid, session, alerts) @router.patch("/{sequence_id}/label", status_code=status.HTTP_200_OK, summary="Label the nature of the sequence") @@ -224,6 +214,8 @@ async def label_sequence( sequence_id: int = Path(..., gt=0), cameras: CameraCRUD = Depends(get_camera_crud), sequences: SequenceCRUD = Depends(get_sequence_crud), + alerts: AlertCRUD = Depends(get_alert_crud), + session: AsyncSession = Depends(get_session), token_payload: TokenPayload = Security(get_jwt, scopes=[UserRole.ADMIN, UserRole.AGENT]), ) -> Sequence: telemetry_client.capture(token_payload.sub, event="sequence-label", properties={"sequence_id": sequence_id}) @@ -232,4 +224,32 @@ async def label_sequence( if UserRole.ADMIN not in token_payload.scopes: await verify_org_rights(token_payload.organization_id, sequence.camera_id, cameras) - return await sequences.update(sequence_id, payload) + updated = await sequences.update(sequence_id, payload) + + # If sequence is labeled as non-wildfire, remove it from alerts and refresh those alerts + if payload.is_wildfire is not None and payload.is_wildfire != AnnotationType.WILDFIRE_SMOKE: + alert_ids_res = await session.exec( + select(AlertSequence.alert_id).where(AlertSequence.sequence_id == sequence_id) + ) + alert_ids = list(alert_ids_res.all()) + if alert_ids: + delete_links: Any = delete(AlertSequence).where(cast(Any, AlertSequence.sequence_id) == sequence_id) + await session.exec(delete_links) + await session.commit() + for aid in alert_ids: + await _refresh_alert_state(aid, session, alerts) + # Create a fresh alert for this sequence alone + camera = cast(Camera, await cameras.get(sequence.camera_id, strict=True)) + new_alert = await alerts.create( + AlertCreate( + organization_id=camera.organization_id, + started_at=sequence.started_at, + last_seen_at=sequence.last_seen_at, + lat=None, + lon=None, + ) + ) + session.add(AlertSequence(alert_id=new_alert.id, sequence_id=sequence_id)) + await session.commit() + + return updated diff --git a/src/app/api/api_v1/router.py b/src/app/api/api_v1/router.py index 4651be63..c52ac49f 100644 --- a/src/app/api/api_v1/router.py +++ b/src/app/api/api_v1/router.py @@ -5,7 +5,17 @@ from fastapi import APIRouter -from app.api.api_v1.endpoints import cameras, detections, login, organizations, poses, sequences, users, webhooks +from app.api.api_v1.endpoints import ( + alerts, + cameras, + detections, + login, + organizations, + poses, + sequences, + users, + webhooks, +) api_router = APIRouter(redirect_slashes=True) api_router.include_router(login.router, prefix="/login", tags=["login"]) @@ -13,6 +23,7 @@ api_router.include_router(cameras.router, prefix="/cameras", tags=["cameras"]) api_router.include_router(poses.router, prefix="/poses", tags=["poses"]) api_router.include_router(detections.router, prefix="/detections", tags=["detections"]) +api_router.include_router(alerts.router, prefix="/alerts", tags=["alerts"]) api_router.include_router(sequences.router, prefix="/sequences", tags=["sequences"]) api_router.include_router(organizations.router, prefix="/organizations", tags=["organizations"]) api_router.include_router(webhooks.router, prefix="/webhooks", tags=["webhooks"]) diff --git a/src/app/api/dependencies.py b/src/app/api/dependencies.py index f5cfcd9d..19e993ba 100644 --- a/src/app/api/dependencies.py +++ b/src/app/api/dependencies.py @@ -15,7 +15,7 @@ from sqlmodel.ext.asyncio.session import AsyncSession from app.core.config import settings -from app.crud import CameraCRUD, DetectionCRUD, OrganizationCRUD, SequenceCRUD, UserCRUD, WebhookCRUD +from app.crud import AlertCRUD, CameraCRUD, DetectionCRUD, OrganizationCRUD, SequenceCRUD, UserCRUD, WebhookCRUD from app.crud.crud_pose import PoseCRUD from app.db import get_session from app.models import User, UserRole @@ -65,6 +65,10 @@ def get_sequence_crud(session: AsyncSession = Depends(get_session)) -> SequenceC return SequenceCRUD(session=session) +def get_alert_crud(session: AsyncSession = Depends(get_session)) -> AlertCRUD: + return AlertCRUD(session=session) + + def decode_token(token: str, authenticate_value: Union[str, None] = None) -> Dict[str, str]: try: payload = jwt_decode(token, settings.JWT_SECRET, algorithms=[settings.JWT_ALGORITHM]) diff --git a/src/app/crud/__init__.py b/src/app/crud/__init__.py index f343a7a8..72b96e39 100644 --- a/src/app/crud/__init__.py +++ b/src/app/crud/__init__.py @@ -5,3 +5,4 @@ from .crud_organization import * from .crud_sequence import * from .crud_webhook import * +from .crud_alert import * diff --git a/src/app/crud/base.py b/src/app/crud/base.py index adbc265b..6b7b0876 100644 --- a/src/app/crud/base.py +++ b/src/app/crud/base.py @@ -48,7 +48,7 @@ async def get(self, entry_id: int, strict: bool = False) -> Union[ModelType, Non return entry async def get_by(self, field_name: str, val: Union[str, int], strict: bool = False) -> Union[ModelType, None]: - statement = select(self.model).where(getattr(self.model, field_name) == val) # type: ignore[var-annotated] + statement: Any = select(self.model).where(getattr(self.model, field_name) == val) results = await self.session.exec(statement=statement) entry = results.one_or_none() if strict and entry is None: @@ -68,7 +68,7 @@ async def fetch_all( limit: Optional[int] = None, offset: Optional[int] = None, ) -> List[ModelType]: - statement = select(self.model) # type: ignore[var-annotated] + statement: Any = select(self.model) if isinstance(filters, tuple): statement = statement.where(getattr(self.model, filters[0]) == filters[1]) elif isinstance(filters, list): @@ -120,12 +120,12 @@ async def update(self, entry_id: int, payload: UpdateSchemaType) -> ModelType: async def delete(self, entry_id: int) -> None: await self.get(entry_id, strict=True) - statement = delete(self.model).where(self.model.id == entry_id) + statement = delete(self.model).where(cast(Any, self.model).id == entry_id) await self.session.exec(statement=statement) # type: ignore[call-overload] await self.session.commit() async def get_in(self, list_: List[Any], field_name: str) -> List[ModelType]: - statement = select(self.model).where(getattr(self.model, field_name).in_(list_)) # type: ignore[var-annotated] + statement: Any = select(self.model).where(getattr(self.model, field_name).in_(list_)) results = await self.session.exec(statement) - return results.all() + return list(results.all()) diff --git a/src/app/crud/crud_alert.py b/src/app/crud/crud_alert.py new file mode 100644 index 00000000..3b58fc8d --- /dev/null +++ b/src/app/crud/crud_alert.py @@ -0,0 +1,17 @@ +# Copyright (C) 2025-2026, Pyronear. + +# This program is licensed under the Apache License 2.0. +# See LICENSE or go to for full license details. + +from sqlmodel.ext.asyncio.session import AsyncSession + +from app.crud.base import BaseCRUD +from app.models import Alert +from app.schemas.alerts import AlertCreate, AlertUpdate + +__all__ = ["AlertCRUD"] + + +class AlertCRUD(BaseCRUD[Alert, AlertCreate, AlertUpdate]): + def __init__(self, session: AsyncSession) -> None: + super().__init__(session, Alert) diff --git a/src/app/crud/crud_sequence.py b/src/app/crud/crud_sequence.py index a501a24b..ed622e06 100644 --- a/src/app/crud/crud_sequence.py +++ b/src/app/crud/crud_sequence.py @@ -3,6 +3,7 @@ # This program is licensed under the Apache License 2.0. # See LICENSE or go to for full license details. + from typing import Union from sqlmodel.ext.asyncio.session import AsyncSession diff --git a/src/app/db.py b/src/app/db.py index 52ec46e7..b56b360a 100644 --- a/src/app/db.py +++ b/src/app/db.py @@ -5,9 +5,10 @@ import asyncio import logging +from typing import Any +from sqlalchemy.ext.asyncio import async_sessionmaker from sqlalchemy.ext.asyncio.engine import AsyncEngine -from sqlalchemy.orm import sessionmaker from sqlmodel import SQLModel, create_engine, select from sqlmodel.ext.asyncio.session import AsyncSession @@ -23,7 +24,7 @@ async def get_session() -> AsyncSession: # type: ignore[misc] - async_session = sessionmaker(bind=engine, class_=AsyncSession, expire_on_commit=False) + async_session = async_sessionmaker(bind=engine, class_=AsyncSession, expire_on_commit=False) async with async_session() as session: yield session @@ -36,9 +37,9 @@ async def init_db() -> None: logger.info("Initializing PostgreSQL database...") # Create the superadmin organization - statement = select(Organization).where(Organization.name == settings.SUPERADMIN_ORG) # type: ignore[var-annotated] - results = await session.execute(statement=statement) - organization = results.scalar_one_or_none() + org_stmt: Any = select(Organization).where(Organization.name == settings.SUPERADMIN_ORG) + org_results = await session.exec(statement=org_stmt) + organization = org_results.one_or_none() if not organization: new_orga = Organization(name=settings.SUPERADMIN_ORG) session.add(new_orga) @@ -51,9 +52,9 @@ async def init_db() -> None: s3_service.create_bucket(s3_service.resolve_bucket_name(organization_id)) # Check if admin exists - statement = select(User).where(User.login == settings.SUPERADMIN_LOGIN) - results = await session.exec(statement=statement) - user = results.one_or_none() + user_stmt: Any = select(User).where(User.login == settings.SUPERADMIN_LOGIN) + user_results = await session.exec(statement=user_stmt) + user = user_results.one_or_none() if not user: pwd = hash_password(settings.SUPERADMIN_PWD) session.add( diff --git a/src/app/main.py b/src/app/main.py index 73487361..63065c1f 100644 --- a/src/app/main.py +++ b/src/app/main.py @@ -80,7 +80,7 @@ async def add_process_time_header(request: Request, call_next): ) if isinstance(settings.SENTRY_DSN, str): - # Sentry middleware is compatible at runtime; ignore type mismatch from Starlette signature + # Sentry middleware is compatible at runtime app.add_middleware(SentryAsgiMiddleware) # type: ignore[arg-type] diff --git a/src/app/models.py b/src/app/models.py index 40ec1e0d..301e19ab 100644 --- a/src/app/models.py +++ b/src/app/models.py @@ -11,7 +11,7 @@ from app.core.config import settings -__all__ = ["Camera", "Detection", "Organization", "Pose", "Sequence", "User"] +__all__ = ["Alert", "AlertSequence", "Camera", "Detection", "Organization", "Pose", "Sequence", "User"] class UserRole(str, Enum): @@ -84,12 +84,30 @@ class Sequence(SQLModel, table=True): id: int = Field(None, primary_key=True) camera_id: int = Field(..., foreign_key="cameras.id", nullable=False) pose_id: Union[int, None] = Field(None, foreign_key="poses.id", nullable=True) - azimuth: float = Field(..., ge=0, lt=360) + camera_azimuth: float = Field(..., ge=0, lt=360) is_wildfire: Union[AnnotationType, None] = None + sequence_azimuth: Union[float, None] = Field(None, nullable=True) + cone_angle: Union[float, None] = Field(None, nullable=True) started_at: datetime = Field(..., nullable=False) last_seen_at: datetime = Field(..., nullable=False) +class Alert(SQLModel, table=True): + __tablename__ = "alerts" + id: int = Field(None, primary_key=True) + organization_id: int = Field(..., foreign_key="organizations.id", nullable=False) + lat: Union[float, None] = Field(default=None, gt=-90, lt=90, nullable=True) + lon: Union[float, None] = Field(default=None, gt=-180, lt=180, nullable=True) + started_at: datetime = Field(..., nullable=False) + last_seen_at: datetime = Field(..., nullable=False) + + +class AlertSequence(SQLModel, table=True): + __tablename__ = "alerts_sequences" + alert_id: int = Field(primary_key=True, foreign_key="alerts.id") + sequence_id: int = Field(primary_key=True, foreign_key="sequences.id") + + class Organization(SQLModel, table=True): __tablename__ = "organizations" id: int = Field(None, primary_key=True) diff --git a/src/app/schemas/__init__.py b/src/app/schemas/__init__.py index 93d4e0ea..46c37536 100644 --- a/src/app/schemas/__init__.py +++ b/src/app/schemas/__init__.py @@ -7,3 +7,4 @@ from .organizations import * from .sequences import * from .webhooks import * +from .alerts import * diff --git a/src/app/schemas/alerts.py b/src/app/schemas/alerts.py new file mode 100644 index 00000000..74b752f1 --- /dev/null +++ b/src/app/schemas/alerts.py @@ -0,0 +1,33 @@ +# Copyright (C) 2025-2026, Pyronear. + +# This program is licensed under the Apache License 2.0. +# See LICENSE or go to for full license details. + +from datetime import datetime +from typing import Optional + +from pydantic import BaseModel, Field + +__all__ = ["AlertBase", "AlertCreate", "AlertRead", "AlertUpdate"] + + +class AlertBase(BaseModel): + organization_id: Optional[int] = Field(None, gt=0) + lat: Optional[float] = None + lon: Optional[float] = None + started_at: Optional[datetime] = None + last_seen_at: Optional[datetime] = None + + +class AlertCreate(AlertBase): + organization_id: int = Field(..., gt=0) + started_at: datetime + last_seen_at: datetime + + +class AlertUpdate(AlertBase): + pass + + +class AlertRead(AlertCreate): + id: int diff --git a/src/app/schemas/detections.py b/src/app/schemas/detections.py index 19ece964..39c337b9 100644 --- a/src/app/schemas/detections.py +++ b/src/app/schemas/detections.py @@ -11,7 +11,7 @@ from app.core.config import settings from app.models import AnnotationType, Detection -__all__ = ["Azimuth", "DetectionCreate", "DetectionLabel", "DetectionUrl"] +__all__ = ["Azimuth", "DetectionCreate", "DetectionLabel", "DetectionRead", "DetectionUrl", "DetectionWithUrl"] class DetectionLabel(BaseModel): @@ -52,6 +52,10 @@ class DetectionUrl(BaseModel): url: str = Field(..., description="temporary URL to access the media content") +class DetectionRead(Detection): + pass + + class DetectionWithUrl(Detection): url: str = Field(..., description="temporary URL to access the media content") diff --git a/src/app/schemas/sequences.py b/src/app/schemas/sequences.py index 834d2958..2555dd57 100644 --- a/src/app/schemas/sequences.py +++ b/src/app/schemas/sequences.py @@ -9,7 +9,7 @@ from app.models import AnnotationType, Sequence -__all__ = ["SequenceUpdate", "SequenceWithCone"] +__all__ = ["SequenceLabel", "SequenceRead", "SequenceUpdate"] # Accesses @@ -21,6 +21,5 @@ class SequenceLabel(BaseModel): is_wildfire: AnnotationType -class SequenceWithCone(Sequence): - cone_azimuth: float - cone_angle: float +class SequenceRead(Sequence): + pass diff --git a/src/app/services/cones.py b/src/app/services/cones.py new file mode 100644 index 00000000..67ea37e0 --- /dev/null +++ b/src/app/services/cones.py @@ -0,0 +1,17 @@ +# Copyright (C) 2025-2026, Pyronear. + +# This program is licensed under the Apache License 2.0. +# See LICENSE or go to for full license details. + +from ast import literal_eval +from operator import itemgetter +from typing import Tuple + + +def resolve_cone(azimuth: float, bboxes_str: str, aov: float) -> Tuple[float, float]: + """Compute the cone azimuth and opening angle using the most confident bbox.""" + bboxes = literal_eval(bboxes_str) + xmin, _, xmax, _, _ = max(bboxes, key=itemgetter(2)) + cone_azimuth = round(azimuth + aov * ((xmin + xmax) / 2 - 0.5), 1) + cone_angle = round(aov * (xmax - xmin), 1) + return cone_azimuth, cone_angle diff --git a/src/app/services/overlap.py b/src/app/services/overlap.py new file mode 100644 index 00000000..376ee25a --- /dev/null +++ b/src/app/services/overlap.py @@ -0,0 +1,387 @@ +# Copyright (C) 2020-2026, Pyronear. + +# This program is licensed under the Apache License 2.0. +# See LICENSE or go to for full license details. + + +from __future__ import annotations + +import itertools +import logging +from collections import defaultdict +from math import atan2, cos, radians, sin, sqrt +from typing import Dict, List, Optional, Tuple + +import networkx as nx # type: ignore +import numpy as np +import pandas as pd +import pyproj +from geopy.distance import geodesic +from pyproj import Transformer +from shapely.geometry import Polygon +from shapely.geometry.base import BaseGeometry +from shapely.ops import transform as shapely_transform + +logger = logging.getLogger(__name__) + + +def haversine_km(lat1: float, lon1: float, lat2: float, lon2: float) -> float: + """ + Compute the great circle distance between two points on the Earth surface using the Haversine formula. + + Parameters + ---------- + lat1 : float + Latitude of point 1 in decimal degrees. + lon1 : float + Longitude of point 1 in decimal degrees. + lat2 : float + Latitude of point 2 in decimal degrees. + lon2 : float + Longitude of point 2 in decimal degrees. + + Returns + ------- + float + Distance between the two points in kilometers. + """ + r_earth = 6371.0 + dlat = radians(lat2 - lat1) + dlon = radians(lon2 - lon1) + a = sin(dlat / 2) ** 2 + cos(radians(lat1)) * cos(radians(lat2)) * sin(dlon / 2) ** 2 + c = 2 * atan2(sqrt(a), sqrt(1 - a)) + return r_earth * c + + +def get_centroid_latlon(geom: BaseGeometry) -> Tuple[float, float]: + """ + Compute the geographic coordinates of the centroid of a given geometry. + + Parameters + ---------- + geom : BaseGeometry + Geometry in EPSG:3857 (Web Mercator projection). + + Returns + ------- + tuple[float, float] + Latitude and longitude of the centroid in EPSG:4326. + """ + centroid = geom.centroid + transformer = pyproj.Transformer.from_crs("EPSG:3857", "EPSG:4326", always_xy=True) + lon, lat = transformer.transform(centroid.x, centroid.y) + return float(lat), float(lon) + + +def _build_cone_polygon( + lat: float, + lon: float, + azimuth: float, + opening_angle: float, + dist_km: float, + r_min_km: float, + resolution: int = 36, +) -> Polygon: + """ + Build a cone sector polygon on the sphere then return it in geographic coordinates. + + Parameters + ---------- + lat : float + Camera latitude. + lon : float + Camera longitude. + azimuth : float + Cone central azimuth in degrees. + opening_angle : float + Full opening angle in degrees. + dist_km : float + Outer radius in kilometers. + r_min_km : float + Inner radius in kilometers. + resolution : int + Number of points to sample the arc. + + Returns + ------- + shapely.geometry.Polygon + Cone polygon in EPSG:4326 coordinates. + """ + half_angle = opening_angle / 2.0 + angles = np.linspace(azimuth - half_angle, azimuth + half_angle, resolution) + + # Outer arc points + outer_arc = [geodesic(kilometers=dist_km).destination((lat, lon), float(az % 360)) for az in angles] + outer_points = [(p.longitude, p.latitude) for p in outer_arc] + + if r_min_km > 0: + # Inner arc points, walk reversed so ring orientation stays valid + inner_arc = [geodesic(kilometers=r_min_km).destination((lat, lon), float(az % 360)) for az in reversed(angles)] + inner_points = [(p.longitude, p.latitude) for p in inner_arc] + # Outer ring with a hole for the inner radius + return Polygon(outer_points + inner_points, holes=[inner_points]).buffer(0) + # Triangle like sector with apex at camera position + return Polygon([(lon, lat), *outer_points]).buffer(0) + + +def _project_polygon_from_4326_to_3857(polygon: Polygon) -> Polygon: + """ + Project a polygon from EPSG:4326 to EPSG:3857. + + Parameters + ---------- + polygon : Polygon + Geometry in EPSG:4326. + + Returns + ------- + Polygon + Geometry in EPSG:3857. + """ + transformer = Transformer.from_crs("EPSG:4326", "EPSG:3857", always_xy=True) + return shapely_transform(transformer.transform, polygon) + + +def get_projected_cone(row: pd.Series, r_km: float, r_min_km: float) -> Polygon: + """ + Build and project a detection cone to Web Mercator. + + Parameters + ---------- + row : pd.Series + Row with fields: lat, lon, sequence_azimuth, cone_angle. + r_km : float + Outer radius of the camera detection cone in kilometers. + r_min_km : float + Inner radius of the camera detection cone in kilometers. + + Returns + ------- + Polygon + Cone geometry in EPSG:3857. + """ + poly = _build_cone_polygon( + float(row["lat"]), + float(row["lon"]), + float(row["sequence_azimuth"]), + float(row["cone_angle"]), + float(r_km), + float(r_min_km), + ) + return _project_polygon_from_4326_to_3857(poly) + + +def _compute_localized_groups_from_cliques( + df: pd.DataFrame, + cliques: List[Tuple[int, ...]], + projected_cones: Dict[int, Polygon], + max_dist_km: float, +) -> List[Tuple[int, ...]]: + """ + From maximal cliques, split each clique into localized groups. + + Rules + ----- + For groups with size at least three, keep the whole group if the maximum distance + among all pair intersection barycenters is within max_dist_km. Otherwise split the + clique into all two by two pairs. + + Parameters + ---------- + df : pd.DataFrame + Source sequences, must contain column id. + cliques : list[tuple[int, ...]] + Maximal cliques computed from the overlap graph. + projected_cones : dict[int, Polygon] + Mapping from sequence id to its cone geometry in EPSG:3857. + max_dist_km : float + Maximum allowed distance between pair barycenters to keep a group. + + Returns + ------- + list[tuple[int, ...]] + Unique localized groups as sorted tuples, with strict subsets removed. + """ + base = [tuple(sorted(g)) for g in cliques] + ids_in_cliques = {x for g in base for x in g} + all_ids = set(df["id"].astype(int).tolist()) + work = base + [(sid,) for sid in sorted(all_ids - ids_in_cliques)] + + def split_one_group(group: Tuple[int, ...]) -> List[Tuple[int, ...]]: + group = tuple(sorted(group)) + if len(group) <= 1: + return [group] + + # Collect pairwise intersection barycenters + pair_barys: List[Tuple[float, float]] = [] + for i, j in itertools.combinations(group, 2): + gi = projected_cones.get(i) + gj = projected_cones.get(j) + if gi is None or gj is None: + continue + inter = gi.intersection(gj) + if inter.is_empty or inter.area <= 0: + continue + pair_barys.append(get_centroid_latlon(inter)) + + if len(group) == 2: + return [group] + + if len(pair_barys) < 2: + # Not enough info to validate locality, fall back to all pairs + return [tuple(sorted(p)) for p in itertools.combinations(group, 2)] + + # Diameter of barycenters + max_d = 0.0 + for (lat1, lon1), (lat2, lon2) in itertools.combinations(pair_barys, 2): + d = haversine_km(lat1, lon1, lat2, lon2) + if d > max_d: + max_d = d + + if max_d <= max_dist_km: + return [group] + return [tuple(sorted(p)) for p in itertools.combinations(group, 2)] + + # Build candidate groups from all cliques + candidates: List[Tuple[int, ...]] = [] + for clique in sorted(set(work)): + candidates.extend(split_one_group(clique)) + + # Remove exact duplicates + candidates = sorted({tuple(sorted(g)) for g in candidates}) + + # Drop strict subsets of any other group + keep: List[Tuple[int, ...]] = [] + as_sets = [set(g) for g in candidates] + for i, gi in enumerate(as_sets): + if any(i != j and gi.issubset(as_sets[j]) for j in range(len(as_sets))): + continue + keep.append(candidates[i]) + + return keep + + +def compute_overlap( + api_sequences: pd.DataFrame, + r_km: float = 35.0, + r_min_km: float = 0.5, + max_dist_km: float = 2.0, +) -> pd.DataFrame: + """ + Build localized event groups and attach them to the input DataFrame. + + This function sets two columns on the returned DataFrame: + event_groups: list of tuples of sequence ids + event_smoke_locations: list of (lat, lon), same order as event_groups + + Parameters + ---------- + api_sequences : pd.DataFrame + Input with fields: id, lat, lon, sequence_azimuth, cone_angle, is_wildfire, + started_at, last_seen_at. + r_km : float + Outer radius of the camera detection cone in kilometers. + r_min_km : float + Inner radius of the camera detection cone in kilometers. + max_dist_km : float + Maximum allowed distance between pair intersection barycenters to keep a group. + + Returns + ------- + pd.DataFrame + DataFrame copy including event_groups and event_smoke_locations columns. + """ + df = api_sequences.copy() + df["id"] = df["id"].astype(int) + df["started_at"] = pd.to_datetime(df["started_at"]) + df["last_seen_at"] = pd.to_datetime(df["last_seen_at"]) + + # keep positives and unknowns + df_valid = df[df["is_wildfire"].isin([None, "wildfire_smoke"])] + + if df_valid.empty: + df["event_groups"] = df["id"].astype(int).map(lambda sid: [(sid,)]) + df["event_smoke_locations"] = [[] for _ in range(len(df))] + return df + + # Precompute cones in Web Mercator + projected_cones: Dict[int, Polygon] = {} + for _, row in df_valid.iterrows(): + sid = int(row["id"]) + try: + projected_cones[sid] = get_projected_cone(row, r_km, r_min_km) + except Exception as exc: # noqa: BLE001 + logger.warning("Failed to build cone for sequence %s: %s", sid, exc) + + # Phase 1, build overlap graph gated by time overlap + ids = df_valid["id"].astype(int).tolist() + rows_by_id: Dict[int, Dict[str, pd.Timestamp]] = df_valid.set_index("id")[["started_at", "last_seen_at"]].to_dict( + "index" + ) + + overlapping_pairs: List[Tuple[int, int]] = [] + for i, id1 in enumerate(ids): + row1 = rows_by_id[id1] + for id2 in ids[i + 1 :]: + row2 = rows_by_id[id2] + # Require overlapping time windows + if row1["started_at"] > row2["last_seen_at"] or row2["started_at"] > row1["last_seen_at"]: + continue + # Spatial overlap test + if projected_cones[id1].intersects(projected_cones[id2]): + overlapping_pairs.append((id1, id2)) + + graph = nx.Graph() + graph.add_edges_from(overlapping_pairs) + cliques = [tuple(sorted(c)) for c in nx.find_cliques(graph) if len(c) >= 2] + + # Phase 2, localized groups from cliques + localized_groups = _compute_localized_groups_from_cliques(df, cliques, projected_cones, max_dist_km) + + # Per group localization, median of pair barycenters for robustness + def group_smoke_location(seq_tuple: Tuple[int, ...]) -> Optional[Tuple[float, float]]: + if len(seq_tuple) < 2: + return None + pts: List[Tuple[float, float]] = [] + for i, j in itertools.combinations(seq_tuple, 2): + gi = projected_cones.get(i) + gj = projected_cones.get(j) + if gi is None or gj is None: + continue + inter = gi.intersection(gj) + if inter.is_empty or inter.area <= 0: + continue + pts.append(get_centroid_latlon(inter)) + if not pts: + # No intersections: use centroid of available cones as best-effort location + polys: List[BaseGeometry] = [p for p in (projected_cones.get(sid) for sid in seq_tuple) if p is not None] + if not polys: + return None + try: + merged: BaseGeometry = polys[0] + for p in polys[1:]: + merged = merged.union(p) + return get_centroid_latlon(merged) + except Exception as exc: # noqa: BLE001 + logger.warning("Failed fallback centroid for group %s: %s", seq_tuple, exc) + return None + lats, lons = zip(*pts, strict=False) + return float(np.median(lats)), float(np.median(lons)) + + group_to_smoke: Dict[Tuple[int, ...], Optional[Tuple[float, float]]] = { + g: group_smoke_location(g) for g in localized_groups + } + + # Attach back to df + seq_to_groups: Dict[int, List[Tuple[int, ...]]] = defaultdict(list) + seq_to_smokes: Dict[int, List[Optional[Tuple[float, float]]]] = defaultdict(list) + for g in localized_groups: + smo = group_to_smoke[g] + for sid in g: + seq_to_groups[sid].append(g) + seq_to_smokes[sid].append(smo) + + df["event_groups"] = df["id"].astype(int).map(lambda sid: seq_to_groups.get(sid, [(sid,)])) + df["event_smoke_locations"] = df["id"].astype(int).map(lambda sid: seq_to_smokes.get(sid, [])) + + return df diff --git a/src/tests/conftest.py b/src/tests/conftest.py index 2f4d930e..b583b686 100644 --- a/src/tests/conftest.py +++ b/src/tests/conftest.py @@ -164,8 +164,10 @@ "id": 1, "camera_id": 1, "pose_id": 1, - "azimuth": 43.7, + "camera_azimuth": 43.7, "is_wildfire": "wildfire_smoke", + "sequence_azimuth": 34.6, + "cone_angle": 54.8, "started_at": datetime.strptime("2023-11-07T15:08:19.226673", dt_format), "last_seen_at": datetime.strptime("2023-11-07T15:28:19.226673", dt_format), }, @@ -173,8 +175,10 @@ "id": 2, "camera_id": 2, "pose_id": 3, - "azimuth": 74.8, + "camera_azimuth": 74.8, "is_wildfire": None, + "sequence_azimuth": 65.7, + "cone_angle": 54.8, "started_at": datetime.strptime("2023-11-07T16:08:19.226673", dt_format), "last_seen_at": datetime.strptime("2023-11-07T16:08:19.226673", dt_format), }, diff --git a/src/tests/endpoints/test_alerts.py b/src/tests/endpoints/test_alerts.py new file mode 100644 index 00000000..534fc4b2 --- /dev/null +++ b/src/tests/endpoints/test_alerts.py @@ -0,0 +1,303 @@ +# Copyright (C) 2025-2026, Pyronear. +# +# This program is licensed under the Apache License 2.0. +# See LICENSE or go to for full license details. + +from datetime import datetime, timedelta +from typing import Any, List, Tuple, cast + +import pandas as pd +import pytest # type: ignore +from httpx import AsyncClient +from sqlmodel import select +from sqlmodel.ext.asyncio.session import AsyncSession + +from app.core.config import settings +from app.models import Alert, AlertSequence, AnnotationType, Camera, Organization, Sequence +from app.services.overlap import compute_overlap + + +async def _create_alert_with_sequences( + session: AsyncSession, org_id: int, camera_id: int, lat: float, lon: float +) -> Tuple[Alert, List[int]]: + now = datetime.utcnow() + seq_payloads = [ + { + "camera_id": camera_id, + "pose_id": None, + "camera_azimuth": 180.0, + "is_wildfire": None, + "sequence_azimuth": 163.4, + "cone_angle": 1.0, + }, + { + "camera_id": camera_id, + "pose_id": None, + "camera_azimuth": 25.0, + "is_wildfire": None, + "sequence_azimuth": 8.3, + "cone_angle": 0.8, + }, + { + "camera_id": camera_id, + "pose_id": None, + "camera_azimuth": 276.0, + "is_wildfire": None, + "sequence_azimuth": 276.5, + "cone_angle": 3.0, + }, + ] + sequences: List[Sequence] = [] + for idx, payload in enumerate(seq_payloads): + seq = Sequence( + **payload, + started_at=now - timedelta(seconds=10 * (idx + 1)), + last_seen_at=now - timedelta(seconds=idx), + ) + session.add(seq) + sequences.append(seq) + await session.commit() + for seq in sequences: + await session.refresh(seq) + + alert = Alert( + organization_id=org_id, + lat=lat, + lon=lon, + started_at=min(seq.started_at for seq in sequences), + last_seen_at=max(seq.last_seen_at for seq in sequences), + ) + session.add(alert) + await session.commit() + await session.refresh(alert) + + for seq in sequences: + session.add(AlertSequence(alert_id=alert.id, sequence_id=seq.id)) + await session.commit() + return alert, [seq.id for seq in sequences] + + +@pytest.mark.asyncio +async def test_get_alert_and_sequences(async_client: AsyncClient, detection_session: AsyncSession): + alert, _seq_ids = await _create_alert_with_sequences( + detection_session, org_id=1, camera_id=1, lat=48.3856355, lon=2.7323256 + ) + + auth = pytest.get_token( + pytest.user_table[0]["id"], pytest.user_table[0]["role"].split(), pytest.user_table[0]["organization_id"] + ) + + resp = await async_client.get(f"/alerts/{alert.id}", headers=auth) + assert resp.status_code == 200, resp.text + assert resp.json()["id"] == alert.id + assert resp.json()["lat"] == pytest.approx(alert.lat) + assert resp.json()["lon"] == pytest.approx(alert.lon) + assert resp.json()["started_at"] == alert.started_at.isoformat() + assert resp.json()["last_seen_at"] == alert.last_seen_at.isoformat() + + resp = await async_client.get(f"/alerts/{alert.id}/sequences?limit=5&desc=true", headers=auth) + assert resp.status_code == 200, resp.text + returned = resp.json() + last_seen_times = [item["last_seen_at"] for item in returned] + assert last_seen_times == sorted(last_seen_times, reverse=True) + + +@pytest.mark.asyncio +async def test_alerts_unlabeled_latest(async_client: AsyncClient, detection_session: AsyncSession): + alert, _ = await _create_alert_with_sequences( + detection_session, org_id=1, camera_id=1, lat=48.3856355, lon=2.7323256 + ) + + auth = pytest.get_token( + pytest.user_table[0]["id"], pytest.user_table[0]["role"].split(), pytest.user_table[0]["organization_id"] + ) + resp = await async_client.get("/alerts/unlabeled/latest", headers=auth) + assert resp.status_code == 200, resp.text + payload = resp.json() + assert any(item["id"] == alert.id for item in payload) + returned = next(item for item in payload if item["id"] == alert.id) + assert returned["lat"] == pytest.approx(alert.lat) + assert returned["lon"] == pytest.approx(alert.lon) + assert returned["started_at"] == alert.started_at.isoformat() + assert returned["last_seen_at"] == alert.last_seen_at.isoformat() + + +@pytest.mark.asyncio +async def test_alerts_from_date(async_client: AsyncClient, detection_session: AsyncSession): + alert, _ = await _create_alert_with_sequences( + detection_session, org_id=1, camera_id=1, lat=48.3856355, lon=2.7323256 + ) + date_str = alert.started_at.date().isoformat() + + auth = pytest.get_token( + pytest.user_table[0]["id"], pytest.user_table[0]["role"].split(), pytest.user_table[0]["organization_id"] + ) + resp = await async_client.get(f"/alerts/all/fromdate?from_date={date_str}", headers=auth) + assert resp.status_code == 200, resp.text + assert any(item["id"] == alert.id for item in resp.json()) + + # Ensure order is by started_at desc + returned = resp.json() + started_times = [item["started_at"] for item in returned] + assert started_times == sorted(started_times, reverse=True) + + +@pytest.mark.asyncio +async def test_triangulation_creates_single_alert( + async_client: AsyncClient, detection_session: AsyncSession, mock_img: bytes +): + organization = await detection_session.get(Organization, 1) + assert organization is not None + organization.name = "sdis-77" + detection_session.add(organization) + await detection_session.commit() + await detection_session.refresh(organization) + + camera_specs = [ + { + "name": "croix-augas", + "lat": 48.4267, + "lon": 2.7109, + "azimuth": 190.0, + "bboxes": "[(0,0.530,0.018,0.553,0.183)]", + }, + { + "name": "nemours", + "lat": 48.2605, + "lon": 2.7064, + "azimuth": 25.0, + "bboxes": "[(0.184,0.425,0.199,0.447,0.557)]", + }, + { + "name": "moret-sur-loing", + "lat": 48.3792, + "lon": 2.8208, + "azimuth": 280.0, + "bboxes": "[(0.408,0.462,0.463,0.496,0.498)]", + }, + ] + cameras: List[Camera] = [] + for spec in camera_specs: + camera = Camera( + organization_id=organization.id, + name=spec["name"], + angle_of_view=54.2, + elevation=110.0, + lat=spec["lat"], + lon=spec["lon"], + is_trustable=True, + ) + detection_session.add(camera) + cameras.append(camera) + await detection_session.commit() + for camera in cameras: + await detection_session.refresh(camera) + + for _ in range(settings.SEQUENCE_MIN_INTERVAL_DETS): + for camera, spec in zip(cameras, camera_specs, strict=False): + auth = pytest.get_token(camera.id, ["camera"], organization.id) + response = await async_client.post( + "/detections", + data={"azimuth": spec["azimuth"], "bboxes": spec["bboxes"]}, + files={"file": ("logo.png", mock_img, "image/png")}, + headers=auth, + ) + assert response.status_code == 201, response.text + + camera_ids = [camera.id for camera in cameras] + seqs_res = await detection_session.exec( + select(Sequence).where(cast(Any, Sequence.camera_id).in_(camera_ids)).execution_options(populate_existing=True) + ) + sequences = sorted(seqs_res.all(), key=lambda seq: seq.id) + assert len(sequences) == len(cameras) + + seq_ids = {seq.id for seq in sequences} + mappings_res = await detection_session.exec( + select(AlertSequence.alert_id, AlertSequence.sequence_id).where( + cast(Any, AlertSequence.sequence_id).in_(list(seq_ids)) + ) + ) + mappings = set(mappings_res.all()) + alert_ids = {aid for aid, _ in mappings} + assert len(alert_ids) == 1 + assert {sid for _, sid in mappings} == seq_ids + + alert_res = await detection_session.exec(select(Alert).where(Alert.id == next(iter(alert_ids)))) + alert = alert_res.one() + assert alert.organization_id == organization.id + + camera_by_id = {camera.id: camera for camera in cameras} + records = [ + { + "id": seq.id, + "lat": camera_by_id[seq.camera_id].lat, + "lon": camera_by_id[seq.camera_id].lon, + "sequence_azimuth": seq.sequence_azimuth, + "cone_angle": seq.cone_angle, + "is_wildfire": seq.is_wildfire, + "started_at": seq.started_at, + "last_seen_at": seq.last_seen_at, + } + for seq in sequences + ] + df = compute_overlap(pd.DataFrame.from_records(records)) + expected_loc = None + for groups, locations in zip(df["event_groups"], df["event_smoke_locations"], strict=False): + for idx, group in enumerate(groups): + if set(group) == seq_ids: + if idx < len(locations): + expected_loc = locations[idx] + break + if expected_loc is not None: + break + + assert expected_loc is not None + assert alert.lat == pytest.approx(expected_loc[0]) + assert alert.lon == pytest.approx(expected_loc[1]) + + auth = pytest.get_token( + pytest.user_table[0]["id"], pytest.user_table[0]["role"].split(), pytest.user_table[0]["organization_id"] + ) + initial_alert_id = next(iter(alert_ids)) + initial_mappings = set(mappings) + + resp = await async_client.patch( + f"/sequences/{sequences[0].id}/label", + json={"is_wildfire": AnnotationType.WILDFIRE_SMOKE.value}, + headers=auth, + ) + assert resp.status_code == 200, resp.text + + mappings_res = await detection_session.exec( + select(AlertSequence.alert_id, AlertSequence.sequence_id).where( + cast(Any, AlertSequence.sequence_id).in_(list(seq_ids)) + ) + ) + mappings_after_wildfire = set(mappings_res.all()) + alert_ids_after_wildfire = {aid for aid, _ in mappings_after_wildfire} + assert alert_ids_after_wildfire == {initial_alert_id} + assert mappings_after_wildfire == initial_mappings + + resp = await async_client.patch( + f"/sequences/{sequences[1].id}/label", + json={"is_wildfire": AnnotationType.OTHER_SMOKE.value}, + headers=auth, + ) + assert resp.status_code == 200, resp.text + + mappings_res = await detection_session.exec( + select(AlertSequence.alert_id, AlertSequence.sequence_id).where( + cast(Any, AlertSequence.sequence_id).in_(list(seq_ids)) + ) + ) + mappings_after_other = set(mappings_res.all()) + alert_ids_after_other = {aid for aid, _ in mappings_after_other} + assert len(alert_ids_after_other) == 2 + new_alert_ids = alert_ids_after_other - {initial_alert_id} + assert len(new_alert_ids) == 1 + new_alert_id = next(iter(new_alert_ids)) + + assert {sid for aid, sid in mappings_after_other if aid == new_alert_id} == {sequences[1].id} + remaining_ids = {seq.id for seq in sequences if seq.id != sequences[1].id} + updated_mappings = {(aid, sid) for aid, sid in mappings_after_other if aid == initial_alert_id} + assert updated_mappings == {(initial_alert_id, sid) for sid in remaining_ids} diff --git a/src/tests/endpoints/test_detections.py b/src/tests/endpoints/test_detections.py index dd4864b1..42d5225c 100644 --- a/src/tests/endpoints/test_detections.py +++ b/src/tests/endpoints/test_detections.py @@ -1,9 +1,17 @@ +from datetime import datetime, timedelta from typing import Any, Dict, List, Union -import pytest +import pytest # type: ignore from httpx import AsyncClient +from sqlmodel import select from sqlmodel.ext.asyncio.session import AsyncSession +from app.api.api_v1.endpoints.detections import _attach_sequence_to_alert +from app.core.config import settings +from app.crud import AlertCRUD, CameraCRUD, SequenceCRUD +from app.models import AlertSequence, Camera, Detection, Sequence +from app.services.cones import resolve_cone + @pytest.mark.parametrize( ("user_idx", "cam_idx", "payload", "status_code", "status_detail", "repeat"), @@ -253,3 +261,107 @@ async def test_delete_detection( assert response.json()["detail"] == status_detail if response.status_code // 100 == 2: assert response.json() is None + + +@pytest.mark.asyncio +async def test_create_detection_creates_sequence( + async_client: AsyncClient, detection_session: AsyncSession, monkeypatch +): + # Force sequence creation on first detection + monkeypatch.setattr(settings, "SEQUENCE_MIN_INTERVAL_DETS", 1) + mock_img = b"img" + auth = pytest.get_token(pytest.camera_table[0]["id"], ["camera"], pytest.camera_table[0]["organization_id"]) + payload = { + "azimuth": 120.0, + "pose_id": None, + "bboxes": "[(0.1,0.1,0.2,0.2,0.9)]", + } + resp = await async_client.post( + "/detections", data=payload, files={"file": ("img.png", mock_img, "image/png")}, headers=auth + ) + assert resp.status_code == 201, resp.text + data = resp.json() + assert data["sequence_id"] is not None + + seq_res = await detection_session.get(Sequence, data["sequence_id"]) + assert seq_res is not None + assert seq_res.sequence_azimuth is not None + assert seq_res.cone_angle is not None + camera = await detection_session.get(Camera, pytest.camera_table[0]["id"]) + assert camera is not None + expected_sequence_azimuth, expected_cone_angle = resolve_cone( + float(payload["azimuth"] if payload["azimuth"] is not None else 0.0), + str(payload["bboxes"]), + camera.angle_of_view, + ) + assert seq_res.sequence_azimuth == pytest.approx(expected_sequence_azimuth) + assert seq_res.cone_angle == pytest.approx(expected_cone_angle) + # Detection references the sequence + det_res = await detection_session.get(Detection, data["id"]) + assert det_res is not None + assert det_res.sequence_id == seq_res.id + + +@pytest.mark.asyncio +async def test_attach_sequence_to_alert_creates_alert(detection_session: AsyncSession): + seq_crud = SequenceCRUD(detection_session) + alert_crud = AlertCRUD(detection_session) + cam_crud = CameraCRUD(detection_session) + now = datetime.utcnow() + cam1 = await detection_session.get(Camera, 1) + assert cam1 is not None + cam2 = Camera( + organization_id=1, + name="cam-3", + angle_of_view=90.0, + elevation=100.0, + lat=3.7, + lon=-45.0, + is_trustable=True, + last_active_at=now, + last_image=None, + created_at=now, + ) + detection_session.add(cam2) + await detection_session.commit() + await detection_session.refresh(cam2) + + seq1 = Sequence( + camera_id=cam1.id, + pose_id=None, + camera_azimuth=0.0, + sequence_azimuth=0.0, + cone_angle=90.0, + is_wildfire=None, + started_at=now - timedelta(seconds=30), + last_seen_at=now - timedelta(seconds=20), + ) + seq2 = Sequence( + camera_id=cam2.id, + pose_id=None, + camera_azimuth=5.0, + sequence_azimuth=5.0, + cone_angle=90.0, + is_wildfire=None, + started_at=now - timedelta(seconds=25), + last_seen_at=now - timedelta(seconds=10), + ) + detection_session.add(seq1) + detection_session.add(seq2) + await detection_session.commit() + await detection_session.refresh(seq1) + await detection_session.refresh(seq2) + + await _attach_sequence_to_alert(seq2, cam2, cam_crud, seq_crud, alert_crud) + + alerts = await alert_crud.fetch_all() + assert len(alerts) == 1 + alert = alerts[0] + assert alert.started_at == min(seq1.started_at, seq2.started_at) + assert alert.last_seen_at == max(seq1.last_seen_at, seq2.last_seen_at) + assert alert.lat is not None + assert alert.lon is not None + + mappings_res = await detection_session.exec(select(AlertSequence)) + mappings = mappings_res.all() + assert {(m.alert_id, m.sequence_id) for m in mappings} == {(alert.id, seq1.id), (alert.id, seq2.id)} diff --git a/src/tests/endpoints/test_sequences.py b/src/tests/endpoints/test_sequences.py index bf9e8846..6ba3022e 100644 --- a/src/tests/endpoints/test_sequences.py +++ b/src/tests/endpoints/test_sequences.py @@ -1,9 +1,14 @@ +from datetime import datetime, timedelta from typing import Any, Dict, List, Union -import pytest +import pytest # type: ignore from httpx import AsyncClient +from sqlmodel import select from sqlmodel.ext.asyncio.session import AsyncSession +from app.models import Alert, AlertSequence, Camera, Detection, Sequence +from app.schemas.sequences import SequenceLabel + @pytest.mark.parametrize( ("user_idx", "sequence_id", "status_code", "status_detail", "expected_result"), @@ -176,11 +181,8 @@ async def test_fetch_sequences_from_date( if isinstance(status_detail, str): assert response.json()["detail"] == status_detail if response.status_code // 100 == 2: - # Compare without cone_azimuth and cone_angle - assert [ - {k: v for k, v in item.items() if k not in {"cone_azimuth", "cone_angle"}} for item in response.json() - ] == expected_result - assert all(isinstance(elt["cone_azimuth"], float) for elt in response.json()) + assert response.json() == expected_result + assert all(isinstance(elt["sequence_azimuth"], float) for elt in response.json()) assert all(isinstance(elt["cone_angle"], float) for elt in response.json()) @@ -216,9 +218,162 @@ async def test_latest_sequences( if isinstance(status_detail, str): assert response.json()["detail"] == status_detail if response.status_code // 100 == 2: - # Compare without cone_azimuth and cone_angle - assert [ - {k: v for k, v in item.items() if k not in {"cone_azimuth", "cone_angle"}} for item in response.json() - ] == expected_result - assert all(isinstance(elt["cone_azimuth"], float) for elt in response.json()) + assert response.json() == expected_result + assert all(isinstance(elt["sequence_azimuth"], float) for elt in response.json()) assert all(isinstance(elt["cone_angle"], float) for elt in response.json()) + + +@pytest.mark.asyncio +async def test_sequence_label_updates_alerts(async_client: AsyncClient, detection_session: AsyncSession): + # Create a sequence linked to a camera and an alert + camera = await detection_session.get(Camera, 1) + assert camera is not None + now = datetime.utcnow() + seq1 = Sequence( + camera_id=camera.id, + pose_id=None, + camera_azimuth=180.0, + sequence_azimuth=170.0, + cone_angle=5.0, + is_wildfire=None, + started_at=now - timedelta(seconds=30), + last_seen_at=now - timedelta(seconds=20), + ) + seq2 = Sequence( + camera_id=camera.id, + pose_id=None, + camera_azimuth=182.0, + sequence_azimuth=172.0, + cone_angle=5.0, + is_wildfire=None, + started_at=now - timedelta(seconds=25), + last_seen_at=now - timedelta(seconds=10), + ) + detection_session.add(seq1) + detection_session.add(seq2) + await detection_session.commit() + await detection_session.refresh(seq1) + await detection_session.refresh(seq2) + + alert = Alert( + organization_id=camera.organization_id, + lat=1.0, + lon=2.0, + started_at=min(seq1.started_at, seq2.started_at), + last_seen_at=max(seq1.last_seen_at, seq2.last_seen_at), + ) + detection_session.add(alert) + await detection_session.commit() + await detection_session.refresh(alert) + detection_session.add(AlertSequence(alert_id=alert.id, sequence_id=seq1.id)) + detection_session.add(AlertSequence(alert_id=alert.id, sequence_id=seq2.id)) + await detection_session.commit() + + auth = pytest.get_token( + pytest.user_table[0]["id"], pytest.user_table[0]["role"].split(), pytest.user_table[0]["organization_id"] + ) + + # Keep original timings to avoid accessing expired ORM objects + seq1_start, seq1_last = seq1.started_at, seq1.last_seen_at + seq2_start, seq2_last = seq2.started_at, seq2.last_seen_at + + resp = await async_client.patch( + f"/sequences/{seq1.id}/label", + json={"is_wildfire": SequenceLabel(is_wildfire="other_smoke").is_wildfire}, + headers=auth, + ) + assert resp.status_code == 200, resp.text + + alerts_res = await detection_session.exec(select(Alert).execution_options(populate_existing=True)) + alerts_rows = alerts_res.all() + assert len(alerts_rows) == 2 + mappings_res = await detection_session.exec( + select(AlertSequence.alert_id, AlertSequence.sequence_id).execution_options(populate_existing=True) + ) + mappings = {(aid, sid) for aid, sid in mappings_res.all()} + + row_by_id = {row.id: row for row in alerts_rows} + new_alert_row = next(row for aid, row in row_by_id.items() if aid != alert.id) + updated_alert_row = row_by_id[alert.id] + + assert (new_alert_row.id, seq1.id) in mappings + assert (updated_alert_row.id, seq1.id) not in mappings + assert (updated_alert_row.id, seq2.id) in mappings + + assert updated_alert_row.started_at == seq2_start + assert updated_alert_row.last_seen_at == seq2_last + assert updated_alert_row.lat is None + assert updated_alert_row.lon is None + + assert new_alert_row.started_at == seq1_start + assert new_alert_row.last_seen_at == seq1_last + assert new_alert_row.lat is None + assert new_alert_row.lon is None + + +@pytest.mark.asyncio +async def test_delete_sequence_cleans_alerts_and_detections(async_client: AsyncClient, detection_session: AsyncSession): + camera = await detection_session.get(Camera, 1) + assert camera is not None + now = datetime.utcnow() + seq = Sequence( + camera_id=camera.id, + pose_id=None, + camera_azimuth=45.0, + sequence_azimuth=40.0, + cone_angle=10.0, + is_wildfire=None, + started_at=now, + last_seen_at=now, + ) + detection = Detection( + camera_id=camera.id, + pose_id=None, + sequence_id=None, + azimuth=45.0, + bucket_key="tmp", + bboxes="[(0.1,0.1,0.2,0.2,0.9)]", + created_at=now, + ) + detection_session.add(seq) + detection_session.add(detection) + await detection_session.commit() + await detection_session.refresh(seq) + await detection_session.refresh(detection) + + alert = Alert( + organization_id=camera.organization_id, + lat=None, + lon=None, + started_at=now, + last_seen_at=now, + ) + detection_session.add(alert) + await detection_session.commit() + await detection_session.refresh(alert) + detection_session.add(AlertSequence(alert_id=alert.id, sequence_id=seq.id)) + await detection_session.commit() + + # Link detection to sequence + detection.sequence_id = seq.id + detection_session.add(detection) + await detection_session.commit() + + auth = pytest.get_token( + pytest.user_table[0]["id"], pytest.user_table[0]["role"].split(), pytest.user_table[0]["organization_id"] + ) + resp = await async_client.delete(f"/sequences/{seq.id}", headers=auth) + assert resp.status_code == 200, resp.text + + # Alert and mapping should be gone + mappings_res = await detection_session.exec(select(AlertSequence)) + assert mappings_res.all() == [] + alerts_res = await detection_session.exec(select(Alert)) + assert alerts_res.all() == [] + + # Detection should have sequence_id cleared + det_res = await detection_session.exec( + select(Detection).where(Detection.id == detection.id).execution_options(populate_existing=True) + ) + det = det_res.one() + assert det.sequence_id is None diff --git a/src/tests/services/test_overlap.py b/src/tests/services/test_overlap.py new file mode 100644 index 00000000..5b216c5e --- /dev/null +++ b/src/tests/services/test_overlap.py @@ -0,0 +1,53 @@ +# Copyright (C) 2025-2026, Pyronear. +# +# This program is licensed under the Apache License 2.0. +# See LICENSE or go to for full license details. + +from datetime import datetime, timedelta + +import pandas as pd + +from app.services.overlap import compute_overlap + + +def _make_sequence( + id_: int, + lat: float, + lon: float, + sequence_azimuth: float, + cone_angle: float, + started_at: datetime, + last_seen_at: datetime, + is_wildfire=None, +): + return { + "id": id_, + "lat": lat, + "lon": lon, + "sequence_azimuth": sequence_azimuth, + "cone_angle": cone_angle, + "is_wildfire": is_wildfire, + "started_at": started_at, + "last_seen_at": last_seen_at, + } + + +def test_compute_overlap_groups_and_locations() -> None: + now = datetime.utcnow() + seqs = [ + _make_sequence(1, 48.3792, 2.8208, 276.5, 3.0, now - timedelta(seconds=9), now - timedelta(seconds=1)), + _make_sequence(2, 48.2605, 2.7064, 8.3, 0.8, now - timedelta(seconds=8), now - timedelta(seconds=2)), + _make_sequence(3, 48.4267, 2.7109, 163.4, 1.0, now - timedelta(seconds=7), now - timedelta(seconds=3)), + _make_sequence(4, 10.0, 10.0, 90.0, 1.0, now - timedelta(seconds=6), now - timedelta(seconds=4)), + ] + df = compute_overlap(pd.DataFrame.from_records(seqs)) + + row1 = df[df["id"] == 1].iloc[0] + row4 = df[df["id"] == 4].iloc[0] + + assert row1["event_groups"] == [(1, 2, 3)] + assert row1["event_smoke_locations"][0] is not None + + # Non-overlapping singleton keeps its own group and no location + assert row4["event_groups"] == [(4,)] + assert row4["event_smoke_locations"] == [None]