From 6f7afb3e466bd9a2a08075d95a9778220b72aa5d Mon Sep 17 00:00:00 2001
From: luc
Date: Tue, 8 Oct 2024 14:20:54 -0400
Subject: [PATCH 01/18] feat: Add Nix flake and custom overlays (WIP)
This commit introduces a Nix flake configuration and custom overlays for
pyobjc-core and pystray packages. The changes include:
- Add flake.nix with configuration for aarch64-darwin system
- Create flake.lock file with pinned dependencies
- Add custom overlay for pyobjc-core in overlays/pyobjc-core/
- Include build.nix and default.nix for pyobjc-core
- Add custom overlay for pystray in overlays/pystray/
- Include default.nix for pystray
NOTE: The Nix flake is currently not working due to a missing dependency.
The `pyobjc-framework-quartz` package is not yet packaged, which prevents
the successful building of the development environment.
Error message:
pyobjc-framework-quartz not installed
These changes are a work in progress towards allowing reproducible builds
and development environments for the project, specifically targeting
macOS ARM64 architecture.
The custom overlays address compatibility issues and provide necessary
modifications for building pyobjc-core and pystray on the target system.
Further work is needed to package the missing dependency or find an
alternative solution.
---
flake.lock | 144 +++++++++++++++++++++++++++++++
flake.nix | 77 +++++++++++++++++
overlays/pyobjc-core/build.nix | 9 ++
overlays/pyobjc-core/default.nix | 124 ++++++++++++++++++++++++++
overlays/pystray/default.nix | 78 +++++++++++++++++
5 files changed, 432 insertions(+)
create mode 100644 flake.lock
create mode 100644 flake.nix
create mode 100644 overlays/pyobjc-core/build.nix
create mode 100644 overlays/pyobjc-core/default.nix
create mode 100644 overlays/pystray/default.nix
diff --git a/flake.lock b/flake.lock
new file mode 100644
index 0000000..a5eb791
--- /dev/null
+++ b/flake.lock
@@ -0,0 +1,144 @@
+{
+ "nodes": {
+ "flake-utils": {
+ "inputs": {
+ "systems": "systems"
+ },
+ "locked": {
+ "lastModified": 1726560853,
+ "narHash": "sha256-X6rJYSESBVr3hBoH0WbKE5KvhPU5bloyZ2L4K60/fPQ=",
+ "owner": "numtide",
+ "repo": "flake-utils",
+ "rev": "c1dfcf08411b08f6b8615f7d8971a2bfa81d5e8a",
+ "type": "github"
+ },
+ "original": {
+ "owner": "numtide",
+ "repo": "flake-utils",
+ "type": "github"
+ }
+ },
+ "nix-github-actions": {
+ "inputs": {
+ "nixpkgs": [
+ "poetry2nix",
+ "nixpkgs"
+ ]
+ },
+ "locked": {
+ "lastModified": 1720066371,
+ "narHash": "sha256-uPlLYH2S0ACj0IcgaK9Lsf4spmJoGejR9DotXiXSBZQ=",
+ "owner": "nix-community",
+ "repo": "nix-github-actions",
+ "rev": "622f829f5fe69310a866c8a6cd07e747c44ef820",
+ "type": "github"
+ },
+ "original": {
+ "owner": "nix-community",
+ "repo": "nix-github-actions",
+ "type": "github"
+ }
+ },
+ "nixpkgs": {
+ "locked": {
+ "lastModified": 1728241625,
+ "narHash": "sha256-yumd4fBc/hi8a9QgA9IT8vlQuLZ2oqhkJXHPKxH/tRw=",
+ "owner": "NixOS",
+ "repo": "nixpkgs",
+ "rev": "c31898adf5a8ed202ce5bea9f347b1c6871f32d1",
+ "type": "github"
+ },
+ "original": {
+ "owner": "NixOS",
+ "ref": "nixos-unstable",
+ "repo": "nixpkgs",
+ "type": "github"
+ }
+ },
+ "poetry2nix": {
+ "inputs": {
+ "flake-utils": [
+ "flake-utils"
+ ],
+ "nix-github-actions": "nix-github-actions",
+ "nixpkgs": [
+ "nixpkgs"
+ ],
+ "systems": "systems_2",
+ "treefmt-nix": "treefmt-nix"
+ },
+ "locked": {
+ "lastModified": 1728266256,
+ "narHash": "sha256-RefXB9kqYch6uGT+mo6m3KTbNerfbDYz+EqkLb6YBbs=",
+ "owner": "nix-community",
+ "repo": "poetry2nix",
+ "rev": "8e965fd42c0da4357c53d987bc62b54a954424da",
+ "type": "github"
+ },
+ "original": {
+ "owner": "nix-community",
+ "repo": "poetry2nix",
+ "type": "github"
+ }
+ },
+ "root": {
+ "inputs": {
+ "flake-utils": "flake-utils",
+ "nixpkgs": "nixpkgs",
+ "poetry2nix": "poetry2nix"
+ }
+ },
+ "systems": {
+ "locked": {
+ "lastModified": 1681028828,
+ "narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
+ "owner": "nix-systems",
+ "repo": "default",
+ "rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
+ "type": "github"
+ },
+ "original": {
+ "owner": "nix-systems",
+ "repo": "default",
+ "type": "github"
+ }
+ },
+ "systems_2": {
+ "locked": {
+ "lastModified": 1681028828,
+ "narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
+ "owner": "nix-systems",
+ "repo": "default",
+ "rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
+ "type": "github"
+ },
+ "original": {
+ "id": "systems",
+ "type": "indirect"
+ }
+ },
+ "treefmt-nix": {
+ "inputs": {
+ "nixpkgs": [
+ "poetry2nix",
+ "nixpkgs"
+ ]
+ },
+ "locked": {
+ "lastModified": 1727984844,
+ "narHash": "sha256-xpRqITAoD8rHlXQafYZOLvUXCF6cnZkPfoq67ThN0Hc=",
+ "owner": "numtide",
+ "repo": "treefmt-nix",
+ "rev": "4446c7a6fc0775df028c5a3f6727945ba8400e64",
+ "type": "github"
+ },
+ "original": {
+ "owner": "numtide",
+ "repo": "treefmt-nix",
+ "type": "github"
+ }
+ }
+ },
+ "root": "root",
+ "version": 7
+}
diff --git a/flake.nix b/flake.nix
new file mode 100644
index 0000000..55b536e
--- /dev/null
+++ b/flake.nix
@@ -0,0 +1,77 @@
+{
+ description = "topos";
+
+ inputs = {
+ flake-utils.url = "github:numtide/flake-utils";
+ nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable";
+ poetry2nix = {
+ url = "github:nix-community/poetry2nix";
+ inputs.nixpkgs.follows = "nixpkgs";
+ inputs.flake-utils.follows = "flake-utils";
+ };
+ };
+
+ outputs = { self, nixpkgs, flake-utils, poetry2nix }:
+ flake-utils.lib.eachSystem ["aarch64-darwin"] (system:
+ let
+ pkgs = import nixpkgs {
+ inherit system;
+ overlays = [
+ poetry2nix.overlays.default
+ (final: prev: {
+ myapp = final.callPackage myapp { };
+ pythonPackagesExtensions = prev.pythonPackagesExtensions ++ [
+ (python-final: python-prev: {
+ pyobjc-core = python-final.callPackage ./overlays/pyobjc-core/default.nix { };
+ pystray = python-final.callPackage ./overlays/pystray/default.nix {
+ inherit (python-final) pyobjc-core;
+ };
+ })
+ ];
+ })
+ ];
+ };
+
+ # see https://github.com/nix-community/poetry2nix/tree/master#api for more functions and examples.
+ myapp = { poetry2nix, lib }: poetry2nix.mkPoetryApplication {
+ projectDir = self;
+ preferWheels = true;
+ overrides = poetry2nix.overrides.withDefaults (final: super:
+ lib.mapAttrs
+ (attr: systems: super.${attr}.overridePythonAttrs
+ (old: {
+ nativeBuildInputs = (old.nativeBuildInputs or [ ]) ++ map (a: final.${a}) systems;
+ }))
+ {
+ # https://github.com/nix-community/poetry2nix/blob/master/docs/edgecases.md#modulenotfounderror-no-module-named-packagename
+ # package = [ "setuptools" ];
+ }
+ );
+ };
+
+ in
+ {
+ packages.default = pkgs.myapp;
+ devShells = {
+ # Shell for app dependencies.
+ #
+ # nix develop
+ #
+ # Use this shell for developing your app.
+ default = pkgs.mkShell {
+ inputsFrom = [ pkgs.myapp ];
+ };
+
+ # Shell for poetry.
+ #
+ # nix develop .#poetry
+ #
+ # Use this shell for changes to pyproject.toml and poetry.lock.
+ poetry = pkgs.mkShell {
+ packages = [ pkgs.poetry ];
+ };
+ };
+ legacyPackages = pkgs;
+ }
+ );
+}
diff --git a/overlays/pyobjc-core/build.nix b/overlays/pyobjc-core/build.nix
new file mode 100644
index 0000000..b37a62a
--- /dev/null
+++ b/overlays/pyobjc-core/build.nix
@@ -0,0 +1,9 @@
+{ pkgs ? import {} }:
+
+let
+ pythonPackages = pkgs.python3Packages;
+in
+ pythonPackages.callPackage ./default.nix {
+ inherit (pythonPackages) buildPythonPackage setuptools;
+ inherit (pkgs) lib stdenv fetchPypi xcodebuild cctools darwin;
+ }
diff --git a/overlays/pyobjc-core/default.nix b/overlays/pyobjc-core/default.nix
new file mode 100644
index 0000000..ed351ab
--- /dev/null
+++ b/overlays/pyobjc-core/default.nix
@@ -0,0 +1,124 @@
+# umerged pr https://github.com/NixOS/nixpkgs/pull/336801
+{
+ lib,
+ pkgs,
+ stdenv,
+ buildPythonPackage,
+ setuptools,
+ fetchPypi,
+ xcodebuild,
+ cctools,
+ darwin,
+}:
+let
+ appleSDK = darwin.apple_sdk_11_0;
+
+ apple_libffi = stdenv.mkDerivation {
+ pname = "apple-libffi";
+ inherit (appleSDK.MacOSX-SDK) version;
+ dontUnpack = true;
+ installPhase = ''
+ mkdir -p $out/include $out/lib
+ cp -r ${appleSDK.MacOSX-SDK}/usr/include/ffi $out/include/
+ cp -r ${appleSDK.MacOSX-SDK}/usr/lib/libffi.* $out/lib/
+ '';
+ };
+in
+
+buildPythonPackage rec {
+ pname = "pyobjc-core";
+ version = "10.3.1";
+ src = fetchPypi {
+ pname = "pyobjc_core";
+ inherit version;
+ hash = "sha256-sgSoDMwHD5qz+K9COjolpv14fiKFCNAMTDD4rFOLpyA=";
+ };
+ pyproject = true;
+ build-system = [ setuptools ];
+
+ nativeBuildInputs = [
+ xcodebuild
+ cctools
+ ];
+
+ buildInputs = [
+ appleSDK.objc4
+ appleSDK.frameworks.Foundation
+ appleSDK.frameworks.GameplayKit
+ appleSDK.frameworks.MetalPerformanceShaders
+ apple_libffi
+ ];
+
+ checkPhase = ''
+ # TODO: This library does not follow standard testing with pytest
+ # and implemented its own test runner bootstrapping unittest
+ python3 setup.py test
+ '';
+
+ hardeningDisable = [ "strictoverflow" ]; # -fno-strict-overflow is not supported in clang on darwin
+ env.NIX_CFLAGS_COMPILE = toString [ "-Wno-error=deprecated-declarations" ];
+ postPatch = ''
+ # TODO: Make patch for setup.py
+ # ignore the manual include flag for ffi, appears that it needs a very specific ffi from sdk (needs confirmation)
+ substituteInPlace setup.py --replace-fail '"-I/usr/include/ffi"' '#"-I/usr/include/ffi"'
+ # make os.path.exists that can spoil objc4 return True
+ substituteInPlace setup.py --replace-fail 'os.path.join(self.sdk_root, "usr/include/objc/runtime.h")' '"/"'
+
+ # Turn off clang’s Link Time Optimization, or else we can’t recognize (and link) Objective C .o’s:
+ sed -r 's/"-flto=[^"]+",//g' -i setup.py
+ # Fix some test code:
+ grep -RF '"sw_vers"' | cut -d: -f1 | while IFS= read -r file ; do
+ sed -r "s+"sw_vers"+"/usr/bin/sw_vers"+g" -i "$file"
+ done
+
+ # Disables broken tests and fixes some of them
+ # TODO: make a patch for tests
+ substituteInPlace \
+ PyObjCTest/test_nsdecimal.py \
+ --replace-fail "Cannot compare NSDecimal and decimal.Decimal" "Cannot compare NSDecimal and (\\\\w+.)?Decimal"
+ substituteInPlace \
+ PyObjCTest/test_bundleFunctions.py \
+ --replace-fail "os.path.expanduser(\"~\")" "\"/var/empty\""
+ substituteInPlace \
+ PyObjCTest/test_methodaccess.py \
+ --replace-fail "testClassThroughInstance2" \
+ "disable_testClassThroughInstance2"
+
+
+ # Fixes impurities in the package and fixes darwin min version
+ # TODO: Propose a patch that fixes it in a better way
+ # Force it to target our ‘darwinMinVersion’, it’s not recognized correctly:
+ grep -RF -- '-DPyObjC_BUILD_RELEASE=%02d%02d' | cut -d: -f1 | while IFS= read -r file ; do
+ sed -r '/-DPyObjC_BUILD_RELEASE=%02d%02d/{s/%02d%02d/${
+ lib.concatMapStrings (lib.fixedWidthString 2 "0") (
+ lib.splitString "." stdenv.targetPlatform.darwinMinVersion
+ )
+ }/;n;d;}' -i "$file"
+ done
+ # impurities:
+ ( grep -RF '/usr/bin/xcrun' || true ; ) | cut -d: -f1 | while IFS= read -r file ; do
+ sed -r "s+/usr/bin/xcrun+$(which xcrun)+g" -i "$file"
+ done
+ ( grep -RF '/usr/bin/python' || true ; ) | cut -d: -f1 | while IFS= read -r file ; do
+ sed -r "s+/usr/bin/python+$(which python)+g" -i "$file"
+ done
+
+ # Adjust expected paths for libcrypto
+ substituteInPlace PyObjCTest/test_dyld.py \
+ --replace '/usr/lib/libcrypto.dylib' '${pkgs.openssl.out}/lib/libcrypto.dylib' \
+ --replace '/Library/Frameworks/Python.framework/Versions/3.12/lib/libcrypto.dylib' '${pkgs.openssl.out}/lib/libcrypto.dylib'
+
+ # Disable the failing test_dyld_framework test
+ substituteInPlace PyObjCTest/test_dyld.py \
+ --replace 'def test_dyld_framework' 'def disabled_test_dyld_framework'
+ '';
+ passthru = {
+ inherit apple_libffi;
+ };
+ meta = {
+ description = "The Python <-> Objective-C Bridge with bindings for macOS frameworks";
+ homepage = "https://pypi.org/project/pyobjc-core/";
+ platforms = lib.platforms.darwin;
+ maintainers = [ lib.maintainers.ferrine ];
+ };
+}
diff --git a/overlays/pystray/default.nix b/overlays/pystray/default.nix
new file mode 100644
index 0000000..455c475
--- /dev/null
+++ b/overlays/pystray/default.nix
@@ -0,0 +1,78 @@
+# default.nix
+{ lib
+, buildPythonPackage
+, fetchFromGitHub
+, fetchpatch
+, pillow
+, xlib
+, six
+, xvfb-run
+, setuptools
+, gobject-introspection
+, pygobject3
+, gtk3
+, libayatana-appindicator
+, stdenv
+, pyobjc-core
+}:
+
+buildPythonPackage rec {
+ pname = "pystray";
+ version = "0.19.5";
+ pyproject = true;
+
+ src = fetchFromGitHub {
+ owner = "moses-palmer";
+ repo = "pystray";
+ rev = "v${version}";
+ hash = "sha256-CZhbaXwKFrRBEomzfFPMQdMkTOl5lbgI64etfDRiRu4=";
+ };
+
+ patches = [
+ (fetchpatch {
+ url = "https://github.com/moses-palmer/pystray/commit/813007e3034d950d93a2f3e5b029611c3c9c98ad.patch";
+ hash = "sha256-m2LfZcWXSfgxb73dac21VDdMDVz3evzcCz5QjdnfM1U=";
+ })
+ ];
+
+ postPatch = ''
+ substituteInPlace setup.py \
+ --replace-fail "'sphinx >=1.3.1'" ""
+ '';
+
+ nativeBuildInputs = [
+ gobject-introspection
+ setuptools
+ ];
+
+ propagatedBuildInputs = [
+ pillow
+ six
+ # pygobject3
+ gtk3
+ ] ++ lib.optionals stdenv.isDarwin [
+ pyobjc-core
+ ] ++ lib.optionals stdenv.isLinux [
+ xlib
+ libayatana-appindicator
+ ];
+
+ nativeCheckInputs = lib.optionals stdenv.isLinux [ xvfb-run ];
+
+ checkPhase = lib.optionalString stdenv.isLinux ''
+ rm tests/icon_tests.py # test needs user input
+
+ xvfb-run -s '-screen 0 800x600x24' python setup.py test
+ '';
+
+ meta = with lib; {
+ homepage = "https://github.com/moses-palmer/pystray";
+ description = "This library allows you to create a system tray icon";
+ license = with licenses; [
+ gpl3Plus
+ lgpl3Plus
+ ];
+ platforms = platforms.unix;
+ maintainers = with maintainers; [ jojosch ];
+ };
+}
From 73cf1bc8acbd02c5e2dc21e16c236d5693f3299b Mon Sep 17 00:00:00 2001
From: luc
Date: Tue, 8 Oct 2024 14:26:05 -0400
Subject: [PATCH 02/18] fix: Update Nix flake to build Python packages
successfully
This commit resolves the issue with building Python packages in the Nix flake.
- Modified flake.nix to remove pyobjc-core custom package
- Updated overlays/pystray/default.nix to fetch and use the wheel from PyPI
- Added overlays/pystray/build.nix for building the pystray package
The primary fix was to install pystray from the pre-built wheel on PyPI
instead of building it from source. This approach bypasses the need for
the missing pyobjc-framework-quartz dependency and allows the flake to
build successfully.
These changes enable the Nix flake to correctly build the required Python
packages, resolving the previous error and allowing the development
environment to be created as intended.
---
flake.nix | 5 +-
overlays/pyobjc-core/default.nix | 124 --------------------
overlays/{pyobjc-core => pystray}/build.nix | 4 +-
overlays/pystray/default.nix | 88 ++++----------
4 files changed, 25 insertions(+), 196 deletions(-)
delete mode 100644 overlays/pyobjc-core/default.nix
rename overlays/{pyobjc-core => pystray}/build.nix (50%)
diff --git a/flake.nix b/flake.nix
index 55b536e..8a17593 100644
--- a/flake.nix
+++ b/flake.nix
@@ -22,10 +22,7 @@
myapp = final.callPackage myapp { };
pythonPackagesExtensions = prev.pythonPackagesExtensions ++ [
(python-final: python-prev: {
- pyobjc-core = python-final.callPackage ./overlays/pyobjc-core/default.nix { };
- pystray = python-final.callPackage ./overlays/pystray/default.nix {
- inherit (python-final) pyobjc-core;
- };
+ pystray = python-final.callPackage ./overlays/pystray/default.nix { };
})
];
})
diff --git a/overlays/pyobjc-core/default.nix b/overlays/pyobjc-core/default.nix
deleted file mode 100644
index ed351ab..0000000
--- a/overlays/pyobjc-core/default.nix
+++ /dev/null
@@ -1,124 +0,0 @@
-# umerged pr https://github.com/NixOS/nixpkgs/pull/336801
-{
- lib,
- pkgs,
- stdenv,
- buildPythonPackage,
- setuptools,
- fetchPypi,
- xcodebuild,
- cctools,
- darwin,
-}:
-let
- appleSDK = darwin.apple_sdk_11_0;
-
- apple_libffi = stdenv.mkDerivation {
- pname = "apple-libffi";
- inherit (appleSDK.MacOSX-SDK) version;
- dontUnpack = true;
- installPhase = ''
- mkdir -p $out/include $out/lib
- cp -r ${appleSDK.MacOSX-SDK}/usr/include/ffi $out/include/
- cp -r ${appleSDK.MacOSX-SDK}/usr/lib/libffi.* $out/lib/
- '';
- };
-in
-
-buildPythonPackage rec {
- pname = "pyobjc-core";
- version = "10.3.1";
- src = fetchPypi {
- pname = "pyobjc_core";
- inherit version;
- hash = "sha256-sgSoDMwHD5qz+K9COjolpv14fiKFCNAMTDD4rFOLpyA=";
- };
- pyproject = true;
- build-system = [ setuptools ];
-
- nativeBuildInputs = [
- xcodebuild
- cctools
- ];
-
- buildInputs = [
- appleSDK.objc4
- appleSDK.frameworks.Foundation
- appleSDK.frameworks.GameplayKit
- appleSDK.frameworks.MetalPerformanceShaders
- apple_libffi
- ];
-
- checkPhase = ''
- # TODO: This library does not follow standard testing with pytest
- # and implemented its own test runner bootstrapping unittest
- python3 setup.py test
- '';
-
- hardeningDisable = [ "strictoverflow" ]; # -fno-strict-overflow is not supported in clang on darwin
- env.NIX_CFLAGS_COMPILE = toString [ "-Wno-error=deprecated-declarations" ];
- postPatch = ''
- # TODO: Make patch for setup.py
- # ignore the manual include flag for ffi, appears that it needs a very specific ffi from sdk (needs confirmation)
- substituteInPlace setup.py --replace-fail '"-I/usr/include/ffi"' '#"-I/usr/include/ffi"'
- # make os.path.exists that can spoil objc4 return True
- substituteInPlace setup.py --replace-fail 'os.path.join(self.sdk_root, "usr/include/objc/runtime.h")' '"/"'
-
- # Turn off clang’s Link Time Optimization, or else we can’t recognize (and link) Objective C .o’s:
- sed -r 's/"-flto=[^"]+",//g' -i setup.py
- # Fix some test code:
- grep -RF '"sw_vers"' | cut -d: -f1 | while IFS= read -r file ; do
- sed -r "s+"sw_vers"+"/usr/bin/sw_vers"+g" -i "$file"
- done
-
- # Disables broken tests and fixes some of them
- # TODO: make a patch for tests
- substituteInPlace \
- PyObjCTest/test_nsdecimal.py \
- --replace-fail "Cannot compare NSDecimal and decimal.Decimal" "Cannot compare NSDecimal and (\\\\w+.)?Decimal"
- substituteInPlace \
- PyObjCTest/test_bundleFunctions.py \
- --replace-fail "os.path.expanduser(\"~\")" "\"/var/empty\""
- substituteInPlace \
- PyObjCTest/test_methodaccess.py \
- --replace-fail "testClassThroughInstance2" \
- "disable_testClassThroughInstance2"
-
-
- # Fixes impurities in the package and fixes darwin min version
- # TODO: Propose a patch that fixes it in a better way
- # Force it to target our ‘darwinMinVersion’, it’s not recognized correctly:
- grep -RF -- '-DPyObjC_BUILD_RELEASE=%02d%02d' | cut -d: -f1 | while IFS= read -r file ; do
- sed -r '/-DPyObjC_BUILD_RELEASE=%02d%02d/{s/%02d%02d/${
- lib.concatMapStrings (lib.fixedWidthString 2 "0") (
- lib.splitString "." stdenv.targetPlatform.darwinMinVersion
- )
- }/;n;d;}' -i "$file"
- done
- # impurities:
- ( grep -RF '/usr/bin/xcrun' || true ; ) | cut -d: -f1 | while IFS= read -r file ; do
- sed -r "s+/usr/bin/xcrun+$(which xcrun)+g" -i "$file"
- done
- ( grep -RF '/usr/bin/python' || true ; ) | cut -d: -f1 | while IFS= read -r file ; do
- sed -r "s+/usr/bin/python+$(which python)+g" -i "$file"
- done
-
- # Adjust expected paths for libcrypto
- substituteInPlace PyObjCTest/test_dyld.py \
- --replace '/usr/lib/libcrypto.dylib' '${pkgs.openssl.out}/lib/libcrypto.dylib' \
- --replace '/Library/Frameworks/Python.framework/Versions/3.12/lib/libcrypto.dylib' '${pkgs.openssl.out}/lib/libcrypto.dylib'
-
- # Disable the failing test_dyld_framework test
- substituteInPlace PyObjCTest/test_dyld.py \
- --replace 'def test_dyld_framework' 'def disabled_test_dyld_framework'
- '';
- passthru = {
- inherit apple_libffi;
- };
- meta = {
- description = "The Python <-> Objective-C Bridge with bindings for macOS frameworks";
- homepage = "https://pypi.org/project/pyobjc-core/";
- platforms = lib.platforms.darwin;
- maintainers = [ lib.maintainers.ferrine ];
- };
-}
diff --git a/overlays/pyobjc-core/build.nix b/overlays/pystray/build.nix
similarity index 50%
rename from overlays/pyobjc-core/build.nix
rename to overlays/pystray/build.nix
index b37a62a..4c798fe 100644
--- a/overlays/pyobjc-core/build.nix
+++ b/overlays/pystray/build.nix
@@ -4,6 +4,6 @@ let
pythonPackages = pkgs.python3Packages;
in
pythonPackages.callPackage ./default.nix {
- inherit (pythonPackages) buildPythonPackage setuptools;
- inherit (pkgs) lib stdenv fetchPypi xcodebuild cctools darwin;
+ inherit (pythonPackages) buildPythonPackage pythonOlder fetchPypi flit-core pytestCheckHook;
+ inherit (pkgs) lib;
}
diff --git a/overlays/pystray/default.nix b/overlays/pystray/default.nix
index 455c475..c6f4958 100644
--- a/overlays/pystray/default.nix
+++ b/overlays/pystray/default.nix
@@ -1,78 +1,34 @@
-# default.nix
-{ lib
-, buildPythonPackage
-, fetchFromGitHub
-, fetchpatch
-, pillow
-, xlib
-, six
-, xvfb-run
-, setuptools
-, gobject-introspection
-, pygobject3
-, gtk3
-, libayatana-appindicator
-, stdenv
-, pyobjc-core
+{
+ lib,
+ buildPythonPackage,
+ pythonOlder,
+ fetchPypi,
+ flit-core,
+ pytestCheckHook,
}:
buildPythonPackage rec {
pname = "pystray";
version = "0.19.5";
- pyproject = true;
- src = fetchFromGitHub {
- owner = "moses-palmer";
- repo = "pystray";
- rev = "v${version}";
- hash = "sha256-CZhbaXwKFrRBEomzfFPMQdMkTOl5lbgI64etfDRiRu4=";
- };
-
- patches = [
- (fetchpatch {
- url = "https://github.com/moses-palmer/pystray/commit/813007e3034d950d93a2f3e5b029611c3c9c98ad.patch";
- hash = "sha256-m2LfZcWXSfgxb73dac21VDdMDVz3evzcCz5QjdnfM1U=";
- })
- ];
-
- postPatch = ''
- substituteInPlace setup.py \
- --replace-fail "'sphinx >=1.3.1'" ""
- '';
-
- nativeBuildInputs = [
- gobject-introspection
- setuptools
- ];
-
- propagatedBuildInputs = [
- pillow
- six
- # pygobject3
- gtk3
- ] ++ lib.optionals stdenv.isDarwin [
- pyobjc-core
- ] ++ lib.optionals stdenv.isLinux [
- xlib
- libayatana-appindicator
- ];
-
- nativeCheckInputs = lib.optionals stdenv.isLinux [ xvfb-run ];
+ disabled = pythonOlder "3.7";
- checkPhase = lib.optionalString stdenv.isLinux ''
- rm tests/icon_tests.py # test needs user input
+ format = "wheel";
- xvfb-run -s '-screen 0 800x600x24' python setup.py test
- '';
+ src = fetchPypi {
+ inherit pname;
+ inherit version;
+ inherit format;
+ dist="py2.py3";
+ python="py2.py3";
+ sha256 = "a0c2229d02cf87207297c22d86ffc57c86c227517b038c0d3c59df79295ac617";
+ };
meta = with lib; {
- homepage = "https://github.com/moses-palmer/pystray";
- description = "This library allows you to create a system tray icon";
- license = with licenses; [
- gpl3Plus
- lgpl3Plus
- ];
- platforms = platforms.unix;
- maintainers = with maintainers; [ jojosch ];
+ changelog = "https://github.com/rasterio/affine/blob/${version}/CHANGES.txt";
+ description = "Matrices describing affine transformation of the plane";
+ license = licenses.bsd3;
+ homepage = "https://github.com/rasterio/affine";
+ maintainers = with maintainers; [ mredaelli ];
};
}
From ce2ce9ac8157e6c5a9e941ae5c37b4af0f09b2fd Mon Sep 17 00:00:00 2001
From: luc
Date: Tue, 8 Oct 2024 14:36:07 -0400
Subject: [PATCH 03/18] feat: Expose 'topos' in dev shell
Add shellHook to make the 'topos' command available in the default
development shell by adding the app's bin directory to PATH.
---
flake.nix | 4 ++++
1 file changed, 4 insertions(+)
diff --git a/flake.nix b/flake.nix
index 8a17593..75cb751 100644
--- a/flake.nix
+++ b/flake.nix
@@ -57,6 +57,10 @@
# Use this shell for developing your app.
default = pkgs.mkShell {
inputsFrom = [ pkgs.myapp ];
+
+ shellHook = ''
+ export PATH="${pkgs.myapp}/bin:$PATH"
+ '';
};
# Shell for poetry.
From 5178413543e600b8deed7d968969e46e8e5fed1d Mon Sep 17 00:00:00 2001
From: luc
Date: Tue, 8 Oct 2024 15:10:32 -0400
Subject: [PATCH 04/18] fix: Resolve spaCy model loading issue
This commit addresses the problem where spaCy couldn't find the 'en_core_web_sm' model.
Changes include:
- Remove 'active_spacy_model' from config.yaml
- Add 'en_core_web_sm' as a direct dependency in pyproject.toml
- Update token_classifiers.py to import and load the model directly
Note: This fix may generate a warning about potential version incompatibility
between the installed model (3.8.0) and the current spaCy version (3.7.2).
Future updates should align these versions to ensure full compatibility.
---
flake.nix | 4 +-
overlays/tkinter/default.nix | 36 +
poetry.lock | 892 ++----------------
pyproject.toml | 2 +-
topos/api/api_routes.py | 73 +-
topos/api/p2p_chat_routes.py | 22 +-
.../basic_analytics/token_classifiers.py | 19 +-
7 files changed, 204 insertions(+), 844 deletions(-)
create mode 100644 overlays/tkinter/default.nix
diff --git a/flake.nix b/flake.nix
index 75cb751..90b3639 100644
--- a/flake.nix
+++ b/flake.nix
@@ -41,7 +41,7 @@
}))
{
# https://github.com/nix-community/poetry2nix/blob/master/docs/edgecases.md#modulenotfounderror-no-module-named-packagename
- # package = [ "setuptools" ];
+ package = [ "setuptools" ];
}
);
};
@@ -56,7 +56,7 @@
#
# Use this shell for developing your app.
default = pkgs.mkShell {
- inputsFrom = [ pkgs.myapp ];
+ inputsFrom = [ pkgs.myapp ];
shellHook = ''
export PATH="${pkgs.myapp}/bin:$PATH"
diff --git a/overlays/tkinter/default.nix b/overlays/tkinter/default.nix
new file mode 100644
index 0000000..a473a2f
--- /dev/null
+++ b/overlays/tkinter/default.nix
@@ -0,0 +1,36 @@
+{ lib
+, stdenv
+, buildPythonPackage
+, python
+, isPyPy
+}:
+
+buildPythonPackage {
+ pname = "tkinter";
+ version = python.version;
+ format = "other";
+
+ disabled = isPyPy;
+
+ installPhase =
+ ''
+ # Move the tkinter module
+ mkdir -p $out/${python.sitePackages}
+ mv lib/${python.libPrefix}/lib-dynload/_tkinter* $out/${python.sitePackages}/
+ ''
+ + lib.optionalString (!stdenv.isDarwin) ''
+ # Update the rpath to point to python without x11Support
+ old_rpath=$(patchelf --print-rpath $out/${python.sitePackages}/_tkinter*)
+ new_rpath=$(sed "s#${python}#${python}#g" <<< "$old_rpath" )
+ patchelf --set-rpath $new_rpath $out/${python.sitePackages}/_tkinter*
+ '';
+
+ meta = python.meta // {
+ description = "The standard Python interface to the Tcl/Tk GUI toolkit";
+ longDescription = ''
+ The tkinter package ("Tk interface") is the standard Python interface to
+ the Tcl/Tk GUI toolkit. Both Tk and tkinter are available on most Unix
+ platforms, including macOS, as well as on Windows systems.
+ '';
+ };
+}
diff --git a/poetry.lock b/poetry.lock
index 73a75ee..e7cb313 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -31,116 +31,6 @@ test-prod = ["parameterized", "pytest (>=7.2.0,<=8.0.0)", "pytest-subtests", "py
test-trackers = ["comet-ml", "dvclive", "tensorboard", "wandb"]
testing = ["bitsandbytes", "datasets", "diffusers", "evaluate", "parameterized", "pytest (>=7.2.0,<=8.0.0)", "pytest-subtests", "pytest-xdist", "scikit-learn", "scipy", "timm", "torchpippy (>=0.2.0)", "tqdm", "transformers"]
-[[package]]
-name = "aiohttp"
-version = "3.9.5"
-description = "Async http client/server framework (asyncio)"
-optional = false
-python-versions = ">=3.8"
-files = [
- {file = "aiohttp-3.9.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:fcde4c397f673fdec23e6b05ebf8d4751314fa7c24f93334bf1f1364c1c69ac7"},
- {file = "aiohttp-3.9.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5d6b3f1fabe465e819aed2c421a6743d8debbde79b6a8600739300630a01bf2c"},
- {file = "aiohttp-3.9.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6ae79c1bc12c34082d92bf9422764f799aee4746fd7a392db46b7fd357d4a17a"},
- {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4d3ebb9e1316ec74277d19c5f482f98cc65a73ccd5430540d6d11682cd857430"},
- {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84dabd95154f43a2ea80deffec9cb44d2e301e38a0c9d331cc4aa0166fe28ae3"},
- {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c8a02fbeca6f63cb1f0475c799679057fc9268b77075ab7cf3f1c600e81dd46b"},
- {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c26959ca7b75ff768e2776d8055bf9582a6267e24556bb7f7bd29e677932be72"},
- {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:714d4e5231fed4ba2762ed489b4aec07b2b9953cf4ee31e9871caac895a839c0"},
- {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e7a6a8354f1b62e15d48e04350f13e726fa08b62c3d7b8401c0a1314f02e3558"},
- {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c413016880e03e69d166efb5a1a95d40f83d5a3a648d16486592c49ffb76d0db"},
- {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:ff84aeb864e0fac81f676be9f4685f0527b660f1efdc40dcede3c251ef1e867f"},
- {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:ad7f2919d7dac062f24d6f5fe95d401597fbb015a25771f85e692d043c9d7832"},
- {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:702e2c7c187c1a498a4e2b03155d52658fdd6fda882d3d7fbb891a5cf108bb10"},
- {file = "aiohttp-3.9.5-cp310-cp310-win32.whl", hash = "sha256:67c3119f5ddc7261d47163ed86d760ddf0e625cd6246b4ed852e82159617b5fb"},
- {file = "aiohttp-3.9.5-cp310-cp310-win_amd64.whl", hash = "sha256:471f0ef53ccedec9995287f02caf0c068732f026455f07db3f01a46e49d76bbb"},
- {file = "aiohttp-3.9.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:e0ae53e33ee7476dd3d1132f932eeb39bf6125083820049d06edcdca4381f342"},
- {file = "aiohttp-3.9.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c088c4d70d21f8ca5c0b8b5403fe84a7bc8e024161febdd4ef04575ef35d474d"},
- {file = "aiohttp-3.9.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:639d0042b7670222f33b0028de6b4e2fad6451462ce7df2af8aee37dcac55424"},
- {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f26383adb94da5e7fb388d441bf09c61e5e35f455a3217bfd790c6b6bc64b2ee"},
- {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:66331d00fb28dc90aa606d9a54304af76b335ae204d1836f65797d6fe27f1ca2"},
- {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4ff550491f5492ab5ed3533e76b8567f4b37bd2995e780a1f46bca2024223233"},
- {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f22eb3a6c1080d862befa0a89c380b4dafce29dc6cd56083f630073d102eb595"},
- {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a81b1143d42b66ffc40a441379387076243ef7b51019204fd3ec36b9f69e77d6"},
- {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f64fd07515dad67f24b6ea4a66ae2876c01031de91c93075b8093f07c0a2d93d"},
- {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:93e22add827447d2e26d67c9ac0161756007f152fdc5210277d00a85f6c92323"},
- {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:55b39c8684a46e56ef8c8d24faf02de4a2b2ac60d26cee93bc595651ff545de9"},
- {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4715a9b778f4293b9f8ae7a0a7cef9829f02ff8d6277a39d7f40565c737d3771"},
- {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:afc52b8d969eff14e069a710057d15ab9ac17cd4b6753042c407dcea0e40bf75"},
- {file = "aiohttp-3.9.5-cp311-cp311-win32.whl", hash = "sha256:b3df71da99c98534be076196791adca8819761f0bf6e08e07fd7da25127150d6"},
- {file = "aiohttp-3.9.5-cp311-cp311-win_amd64.whl", hash = "sha256:88e311d98cc0bf45b62fc46c66753a83445f5ab20038bcc1b8a1cc05666f428a"},
- {file = "aiohttp-3.9.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:c7a4b7a6cf5b6eb11e109a9755fd4fda7d57395f8c575e166d363b9fc3ec4678"},
- {file = "aiohttp-3.9.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:0a158704edf0abcac8ac371fbb54044f3270bdbc93e254a82b6c82be1ef08f3c"},
- {file = "aiohttp-3.9.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d153f652a687a8e95ad367a86a61e8d53d528b0530ef382ec5aaf533140ed00f"},
- {file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82a6a97d9771cb48ae16979c3a3a9a18b600a8505b1115cfe354dfb2054468b4"},
- {file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:60cdbd56f4cad9f69c35eaac0fbbdf1f77b0ff9456cebd4902f3dd1cf096464c"},
- {file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8676e8fd73141ded15ea586de0b7cda1542960a7b9ad89b2b06428e97125d4fa"},
- {file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da00da442a0e31f1c69d26d224e1efd3a1ca5bcbf210978a2ca7426dfcae9f58"},
- {file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18f634d540dd099c262e9f887c8bbacc959847cfe5da7a0e2e1cf3f14dbf2daf"},
- {file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:320e8618eda64e19d11bdb3bd04ccc0a816c17eaecb7e4945d01deee2a22f95f"},
- {file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:2faa61a904b83142747fc6a6d7ad8fccff898c849123030f8e75d5d967fd4a81"},
- {file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:8c64a6dc3fe5db7b1b4d2b5cb84c4f677768bdc340611eca673afb7cf416ef5a"},
- {file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:393c7aba2b55559ef7ab791c94b44f7482a07bf7640d17b341b79081f5e5cd1a"},
- {file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:c671dc117c2c21a1ca10c116cfcd6e3e44da7fcde37bf83b2be485ab377b25da"},
- {file = "aiohttp-3.9.5-cp312-cp312-win32.whl", hash = "sha256:5a7ee16aab26e76add4afc45e8f8206c95d1d75540f1039b84a03c3b3800dd59"},
- {file = "aiohttp-3.9.5-cp312-cp312-win_amd64.whl", hash = "sha256:5ca51eadbd67045396bc92a4345d1790b7301c14d1848feaac1d6a6c9289e888"},
- {file = "aiohttp-3.9.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:694d828b5c41255e54bc2dddb51a9f5150b4eefa9886e38b52605a05d96566e8"},
- {file = "aiohttp-3.9.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0605cc2c0088fcaae79f01c913a38611ad09ba68ff482402d3410bf59039bfb8"},
- {file = "aiohttp-3.9.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4558e5012ee03d2638c681e156461d37b7a113fe13970d438d95d10173d25f78"},
- {file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9dbc053ac75ccc63dc3a3cc547b98c7258ec35a215a92bd9f983e0aac95d3d5b"},
- {file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4109adee842b90671f1b689901b948f347325045c15f46b39797ae1bf17019de"},
- {file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6ea1a5b409a85477fd8e5ee6ad8f0e40bf2844c270955e09360418cfd09abac"},
- {file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3c2890ca8c59ee683fd09adf32321a40fe1cf164e3387799efb2acebf090c11"},
- {file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3916c8692dbd9d55c523374a3b8213e628424d19116ac4308e434dbf6d95bbdd"},
- {file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8d1964eb7617907c792ca00b341b5ec3e01ae8c280825deadbbd678447b127e1"},
- {file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:d5ab8e1f6bee051a4bf6195e38a5c13e5e161cb7bad83d8854524798bd9fcd6e"},
- {file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:52c27110f3862a1afbcb2af4281fc9fdc40327fa286c4625dfee247c3ba90156"},
- {file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:7f64cbd44443e80094309875d4f9c71d0401e966d191c3d469cde4642bc2e031"},
- {file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8b4f72fbb66279624bfe83fd5eb6aea0022dad8eec62b71e7bf63ee1caadeafe"},
- {file = "aiohttp-3.9.5-cp38-cp38-win32.whl", hash = "sha256:6380c039ec52866c06d69b5c7aad5478b24ed11696f0e72f6b807cfb261453da"},
- {file = "aiohttp-3.9.5-cp38-cp38-win_amd64.whl", hash = "sha256:da22dab31d7180f8c3ac7c7635f3bcd53808f374f6aa333fe0b0b9e14b01f91a"},
- {file = "aiohttp-3.9.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:1732102949ff6087589408d76cd6dea656b93c896b011ecafff418c9661dc4ed"},
- {file = "aiohttp-3.9.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c6021d296318cb6f9414b48e6a439a7f5d1f665464da507e8ff640848ee2a58a"},
- {file = "aiohttp-3.9.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:239f975589a944eeb1bad26b8b140a59a3a320067fb3cd10b75c3092405a1372"},
- {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3b7b30258348082826d274504fbc7c849959f1989d86c29bc355107accec6cfb"},
- {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cd2adf5c87ff6d8b277814a28a535b59e20bfea40a101db6b3bdca7e9926bc24"},
- {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e9a3d838441bebcf5cf442700e3963f58b5c33f015341f9ea86dcd7d503c07e2"},
- {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e3a1ae66e3d0c17cf65c08968a5ee3180c5a95920ec2731f53343fac9bad106"},
- {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9c69e77370cce2d6df5d12b4e12bdcca60c47ba13d1cbbc8645dd005a20b738b"},
- {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0cbf56238f4bbf49dab8c2dc2e6b1b68502b1e88d335bea59b3f5b9f4c001475"},
- {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:d1469f228cd9ffddd396d9948b8c9cd8022b6d1bf1e40c6f25b0fb90b4f893ed"},
- {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:45731330e754f5811c314901cebdf19dd776a44b31927fa4b4dbecab9e457b0c"},
- {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:3fcb4046d2904378e3aeea1df51f697b0467f2aac55d232c87ba162709478c46"},
- {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8cf142aa6c1a751fcb364158fd710b8a9be874b81889c2bd13aa8893197455e2"},
- {file = "aiohttp-3.9.5-cp39-cp39-win32.whl", hash = "sha256:7b179eea70833c8dee51ec42f3b4097bd6370892fa93f510f76762105568cf09"},
- {file = "aiohttp-3.9.5-cp39-cp39-win_amd64.whl", hash = "sha256:38d80498e2e169bc61418ff36170e0aad0cd268da8b38a17c4cf29d254a8b3f1"},
- {file = "aiohttp-3.9.5.tar.gz", hash = "sha256:edea7d15772ceeb29db4aff55e482d4bcfb6ae160ce144f2682de02f6d693551"},
-]
-
-[package.dependencies]
-aiosignal = ">=1.1.2"
-async-timeout = {version = ">=4.0,<5.0", markers = "python_version < \"3.11\""}
-attrs = ">=17.3.0"
-frozenlist = ">=1.1.1"
-multidict = ">=4.5,<7.0"
-yarl = ">=1.0,<2.0"
-
-[package.extras]
-speedups = ["Brotli", "aiodns", "brotlicffi"]
-
-[[package]]
-name = "aiosignal"
-version = "1.3.1"
-description = "aiosignal: a list of registered asynchronous callbacks"
-optional = false
-python-versions = ">=3.7"
-files = [
- {file = "aiosignal-1.3.1-py3-none-any.whl", hash = "sha256:f8376fb07dd1e86a584e4fcdec80b36b7f81aac666ebc724e2c090300dd83b17"},
- {file = "aiosignal-1.3.1.tar.gz", hash = "sha256:54cd96e15e1649b75d6c87526a6ff0b6c1b0dd3459f43d9ca11d48c339b68cfc"},
-]
-
-[package.dependencies]
-frozenlist = ">=1.1.0"
-
[[package]]
name = "annotated-types"
version = "0.7.0"
@@ -174,36 +64,6 @@ doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphin
test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"]
trio = ["trio (>=0.23)"]
-[[package]]
-name = "async-timeout"
-version = "4.0.3"
-description = "Timeout context manager for asyncio programs"
-optional = false
-python-versions = ">=3.7"
-files = [
- {file = "async-timeout-4.0.3.tar.gz", hash = "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f"},
- {file = "async_timeout-4.0.3-py3-none-any.whl", hash = "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028"},
-]
-
-[[package]]
-name = "attrs"
-version = "23.2.0"
-description = "Classes Without Boilerplate"
-optional = false
-python-versions = ">=3.7"
-files = [
- {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"},
- {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"},
-]
-
-[package.extras]
-cov = ["attrs[tests]", "coverage[toml] (>=5.3)"]
-dev = ["attrs[tests]", "pre-commit"]
-docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"]
-tests = ["attrs[tests-no-zope]", "zope-interface"]
-tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"]
-tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"]
-
[[package]]
name = "beautifulsoup4"
version = "4.12.3"
@@ -722,21 +582,6 @@ files = [
{file = "Cython-0.29.37.tar.gz", hash = "sha256:f813d4a6dd94adee5d4ff266191d1d95bf6d4164a4facc535422c021b2504cfb"},
]
-[[package]]
-name = "dataclasses-json"
-version = "0.6.7"
-description = "Easily serialize dataclasses to and from JSON."
-optional = false
-python-versions = "<4.0,>=3.7"
-files = [
- {file = "dataclasses_json-0.6.7-py3-none-any.whl", hash = "sha256:0dbf33f26c8d5305befd61b39d2b3414e8a407bedc2834dea9b8d642666fb40a"},
- {file = "dataclasses_json-0.6.7.tar.gz", hash = "sha256:b6b3e528266ea45b9535223bc53ca645f5208833c29229e847b3f26a1cc55fc0"},
-]
-
-[package.dependencies]
-marshmallow = ">=3.18.0,<4.0.0"
-typing-inspect = ">=0.4.0,<1"
-
[[package]]
name = "deprecation"
version = "2.1.0"
@@ -805,6 +650,20 @@ files = [
[package.extras]
dev = ["coverage", "coveralls", "pytest"]
+[[package]]
+name = "en_core_web_sm"
+version = "3.8.0"
+description = "English pipeline optimized for CPU. Components: tok2vec, tagger, parser, senter, ner, attribute_ruler, lemmatizer."
+optional = false
+python-versions = "*"
+files = [
+ {file = "en_core_web_sm-3.8.0-py3-none-any.whl", hash = "sha256:1932429db727d4bff3deed6b34cfc05df17794f4a52eeb26cf8928f7c1a0fb85"},
+]
+
+[package.source]
+type = "url"
+url = "https://github.com/explosion/spacy-models/releases/download/en_core_web_sm-3.8.0/en_core_web_sm-3.8.0-py3-none-any.whl"
+
[[package]]
name = "exceptiongroup"
version = "1.2.1"
@@ -919,92 +778,6 @@ ufo = ["fs (>=2.2.0,<3)"]
unicode = ["unicodedata2 (>=15.1.0)"]
woff = ["brotli (>=1.0.1)", "brotlicffi (>=0.8.0)", "zopfli (>=0.1.4)"]
-[[package]]
-name = "frozenlist"
-version = "1.4.1"
-description = "A list-like structure which implements collections.abc.MutableSequence"
-optional = false
-python-versions = ">=3.8"
-files = [
- {file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f9aa1878d1083b276b0196f2dfbe00c9b7e752475ed3b682025ff20c1c1f51ac"},
- {file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:29acab3f66f0f24674b7dc4736477bcd4bc3ad4b896f5f45379a67bce8b96868"},
- {file = "frozenlist-1.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:74fb4bee6880b529a0c6560885fce4dc95936920f9f20f53d99a213f7bf66776"},
- {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:590344787a90ae57d62511dd7c736ed56b428f04cd8c161fcc5e7232c130c69a"},
- {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:068b63f23b17df8569b7fdca5517edef76171cf3897eb68beb01341131fbd2ad"},
- {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c849d495bf5154cd8da18a9eb15db127d4dba2968d88831aff6f0331ea9bd4c"},
- {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9750cc7fe1ae3b1611bb8cfc3f9ec11d532244235d75901fb6b8e42ce9229dfe"},
- {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9b2de4cf0cdd5bd2dee4c4f63a653c61d2408055ab77b151c1957f221cabf2a"},
- {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0633c8d5337cb5c77acbccc6357ac49a1770b8c487e5b3505c57b949b4b82e98"},
- {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:27657df69e8801be6c3638054e202a135c7f299267f1a55ed3a598934f6c0d75"},
- {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:f9a3ea26252bd92f570600098783d1371354d89d5f6b7dfd87359d669f2109b5"},
- {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:4f57dab5fe3407b6c0c1cc907ac98e8a189f9e418f3b6e54d65a718aaafe3950"},
- {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e02a0e11cf6597299b9f3bbd3f93d79217cb90cfd1411aec33848b13f5c656cc"},
- {file = "frozenlist-1.4.1-cp310-cp310-win32.whl", hash = "sha256:a828c57f00f729620a442881cc60e57cfcec6842ba38e1b19fd3e47ac0ff8dc1"},
- {file = "frozenlist-1.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:f56e2333dda1fe0f909e7cc59f021eba0d2307bc6f012a1ccf2beca6ba362439"},
- {file = "frozenlist-1.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a0cb6f11204443f27a1628b0e460f37fb30f624be6051d490fa7d7e26d4af3d0"},
- {file = "frozenlist-1.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b46c8ae3a8f1f41a0d2ef350c0b6e65822d80772fe46b653ab6b6274f61d4a49"},
- {file = "frozenlist-1.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fde5bd59ab5357e3853313127f4d3565fc7dad314a74d7b5d43c22c6a5ed2ced"},
- {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:722e1124aec435320ae01ee3ac7bec11a5d47f25d0ed6328f2273d287bc3abb0"},
- {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2471c201b70d58a0f0c1f91261542a03d9a5e088ed3dc6c160d614c01649c106"},
- {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c757a9dd70d72b076d6f68efdbb9bc943665ae954dad2801b874c8c69e185068"},
- {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f146e0911cb2f1da549fc58fc7bcd2b836a44b79ef871980d605ec392ff6b0d2"},
- {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f9c515e7914626b2a2e1e311794b4c35720a0be87af52b79ff8e1429fc25f19"},
- {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c302220494f5c1ebeb0912ea782bcd5e2f8308037b3c7553fad0e48ebad6ad82"},
- {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:442acde1e068288a4ba7acfe05f5f343e19fac87bfc96d89eb886b0363e977ec"},
- {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:1b280e6507ea8a4fa0c0a7150b4e526a8d113989e28eaaef946cc77ffd7efc0a"},
- {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:fe1a06da377e3a1062ae5fe0926e12b84eceb8a50b350ddca72dc85015873f74"},
- {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:db9e724bebd621d9beca794f2a4ff1d26eed5965b004a97f1f1685a173b869c2"},
- {file = "frozenlist-1.4.1-cp311-cp311-win32.whl", hash = "sha256:e774d53b1a477a67838a904131c4b0eef6b3d8a651f8b138b04f748fccfefe17"},
- {file = "frozenlist-1.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:fb3c2db03683b5767dedb5769b8a40ebb47d6f7f45b1b3e3b4b51ec8ad9d9825"},
- {file = "frozenlist-1.4.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:1979bc0aeb89b33b588c51c54ab0161791149f2461ea7c7c946d95d5f93b56ae"},
- {file = "frozenlist-1.4.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cc7b01b3754ea68a62bd77ce6020afaffb44a590c2289089289363472d13aedb"},
- {file = "frozenlist-1.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c9c92be9fd329ac801cc420e08452b70e7aeab94ea4233a4804f0915c14eba9b"},
- {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c3894db91f5a489fc8fa6a9991820f368f0b3cbdb9cd8849547ccfab3392d86"},
- {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ba60bb19387e13597fb059f32cd4d59445d7b18b69a745b8f8e5db0346f33480"},
- {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8aefbba5f69d42246543407ed2461db31006b0f76c4e32dfd6f42215a2c41d09"},
- {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:780d3a35680ced9ce682fbcf4cb9c2bad3136eeff760ab33707b71db84664e3a"},
- {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9acbb16f06fe7f52f441bb6f413ebae6c37baa6ef9edd49cdd567216da8600cd"},
- {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:23b701e65c7b36e4bf15546a89279bd4d8675faabc287d06bbcfac7d3c33e1e6"},
- {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:3e0153a805a98f5ada7e09826255ba99fb4f7524bb81bf6b47fb702666484ae1"},
- {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:dd9b1baec094d91bf36ec729445f7769d0d0cf6b64d04d86e45baf89e2b9059b"},
- {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:1a4471094e146b6790f61b98616ab8e44f72661879cc63fa1049d13ef711e71e"},
- {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5667ed53d68d91920defdf4035d1cdaa3c3121dc0b113255124bcfada1cfa1b8"},
- {file = "frozenlist-1.4.1-cp312-cp312-win32.whl", hash = "sha256:beee944ae828747fd7cb216a70f120767fc9f4f00bacae8543c14a6831673f89"},
- {file = "frozenlist-1.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:64536573d0a2cb6e625cf309984e2d873979709f2cf22839bf2d61790b448ad5"},
- {file = "frozenlist-1.4.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:20b51fa3f588ff2fe658663db52a41a4f7aa6c04f6201449c6c7c476bd255c0d"},
- {file = "frozenlist-1.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:410478a0c562d1a5bcc2f7ea448359fcb050ed48b3c6f6f4f18c313a9bdb1826"},
- {file = "frozenlist-1.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c6321c9efe29975232da3bd0af0ad216800a47e93d763ce64f291917a381b8eb"},
- {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:48f6a4533887e189dae092f1cf981f2e3885175f7a0f33c91fb5b7b682b6bab6"},
- {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6eb73fa5426ea69ee0e012fb59cdc76a15b1283d6e32e4f8dc4482ec67d1194d"},
- {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fbeb989b5cc29e8daf7f976b421c220f1b8c731cbf22b9130d8815418ea45887"},
- {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:32453c1de775c889eb4e22f1197fe3bdfe457d16476ea407472b9442e6295f7a"},
- {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:693945278a31f2086d9bf3df0fe8254bbeaef1fe71e1351c3bd730aa7d31c41b"},
- {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:1d0ce09d36d53bbbe566fe296965b23b961764c0bcf3ce2fa45f463745c04701"},
- {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:3a670dc61eb0d0eb7080890c13de3066790f9049b47b0de04007090807c776b0"},
- {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:dca69045298ce5c11fd539682cff879cc1e664c245d1c64da929813e54241d11"},
- {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a06339f38e9ed3a64e4c4e43aec7f59084033647f908e4259d279a52d3757d09"},
- {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b7f2f9f912dca3934c1baec2e4585a674ef16fe00218d833856408c48d5beee7"},
- {file = "frozenlist-1.4.1-cp38-cp38-win32.whl", hash = "sha256:e7004be74cbb7d9f34553a5ce5fb08be14fb33bc86f332fb71cbe5216362a497"},
- {file = "frozenlist-1.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:5a7d70357e7cee13f470c7883a063aae5fe209a493c57d86eb7f5a6f910fae09"},
- {file = "frozenlist-1.4.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:bfa4a17e17ce9abf47a74ae02f32d014c5e9404b6d9ac7f729e01562bbee601e"},
- {file = "frozenlist-1.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b7e3ed87d4138356775346e6845cccbe66cd9e207f3cd11d2f0b9fd13681359d"},
- {file = "frozenlist-1.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c99169d4ff810155ca50b4da3b075cbde79752443117d89429595c2e8e37fed8"},
- {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edb678da49d9f72c9f6c609fbe41a5dfb9a9282f9e6a2253d5a91e0fc382d7c0"},
- {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6db4667b187a6742b33afbbaf05a7bc551ffcf1ced0000a571aedbb4aa42fc7b"},
- {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55fdc093b5a3cb41d420884cdaf37a1e74c3c37a31f46e66286d9145d2063bd0"},
- {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82e8211d69a4f4bc360ea22cd6555f8e61a1bd211d1d5d39d3d228b48c83a897"},
- {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89aa2c2eeb20957be2d950b85974b30a01a762f3308cd02bb15e1ad632e22dc7"},
- {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9d3e0c25a2350080e9319724dede4f31f43a6c9779be48021a7f4ebde8b2d742"},
- {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7268252af60904bf52c26173cbadc3a071cece75f873705419c8681f24d3edea"},
- {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:0c250a29735d4f15321007fb02865f0e6b6a41a6b88f1f523ca1596ab5f50bd5"},
- {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:96ec70beabbd3b10e8bfe52616a13561e58fe84c0101dd031dc78f250d5128b9"},
- {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:23b2d7679b73fe0e5a4560b672a39f98dfc6f60df63823b0a9970525325b95f6"},
- {file = "frozenlist-1.4.1-cp39-cp39-win32.whl", hash = "sha256:a7496bfe1da7fb1a4e1cc23bb67c58fab69311cc7d32b5a99c2007b4b2a0e932"},
- {file = "frozenlist-1.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:e6a20a581f9ce92d389a8c7d7c3dd47c81fd5d6e655c8dddf341e14aa48659d0"},
- {file = "frozenlist-1.4.1-py3-none-any.whl", hash = "sha256:04ced3e6a46b4cfffe20f9ae482818e34eba9b5fb0ce4056e4cc9b6e212d09b7"},
- {file = "frozenlist-1.4.1.tar.gz", hash = "sha256:c037a86e8513059a2613aaba4d817bb90b9d9b6b69aace3ce9c877e8c8ed402b"},
-]
-
[[package]]
name = "fsspec"
version = "2024.6.0"
@@ -1073,77 +846,6 @@ files = [
httpx = {version = ">=0.24,<0.28", extras = ["http2"]}
pydantic = ">=1.10,<3"
-[[package]]
-name = "greenlet"
-version = "3.0.3"
-description = "Lightweight in-process concurrent programming"
-optional = false
-python-versions = ">=3.7"
-files = [
- {file = "greenlet-3.0.3-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:9da2bd29ed9e4f15955dd1595ad7bc9320308a3b766ef7f837e23ad4b4aac31a"},
- {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d353cadd6083fdb056bb46ed07e4340b0869c305c8ca54ef9da3421acbdf6881"},
- {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dca1e2f3ca00b84a396bc1bce13dd21f680f035314d2379c4160c98153b2059b"},
- {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3ed7fb269f15dc662787f4119ec300ad0702fa1b19d2135a37c2c4de6fadfd4a"},
- {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd4f49ae60e10adbc94b45c0b5e6a179acc1736cf7a90160b404076ee283cf83"},
- {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:73a411ef564e0e097dbe7e866bb2dda0f027e072b04da387282b02c308807405"},
- {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7f362975f2d179f9e26928c5b517524e89dd48530a0202570d55ad6ca5d8a56f"},
- {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:649dde7de1a5eceb258f9cb00bdf50e978c9db1b996964cd80703614c86495eb"},
- {file = "greenlet-3.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:68834da854554926fbedd38c76e60c4a2e3198c6fbed520b106a8986445caaf9"},
- {file = "greenlet-3.0.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:b1b5667cced97081bf57b8fa1d6bfca67814b0afd38208d52538316e9422fc61"},
- {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52f59dd9c96ad2fc0d5724107444f76eb20aaccb675bf825df6435acb7703559"},
- {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:afaff6cf5200befd5cec055b07d1c0a5a06c040fe5ad148abcd11ba6ab9b114e"},
- {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fe754d231288e1e64323cfad462fcee8f0288654c10bdf4f603a39ed923bef33"},
- {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2797aa5aedac23af156bbb5a6aa2cd3427ada2972c828244eb7d1b9255846379"},
- {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b7f009caad047246ed379e1c4dbcb8b020f0a390667ea74d2387be2998f58a22"},
- {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c5e1536de2aad7bf62e27baf79225d0d64360d4168cf2e6becb91baf1ed074f3"},
- {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:894393ce10ceac937e56ec00bb71c4c2f8209ad516e96033e4b3b1de270e200d"},
- {file = "greenlet-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:1ea188d4f49089fc6fb283845ab18a2518d279c7cd9da1065d7a84e991748728"},
- {file = "greenlet-3.0.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:70fb482fdf2c707765ab5f0b6655e9cfcf3780d8d87355a063547b41177599be"},
- {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4d1ac74f5c0c0524e4a24335350edad7e5f03b9532da7ea4d3c54d527784f2e"},
- {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:149e94a2dd82d19838fe4b2259f1b6b9957d5ba1b25640d2380bea9c5df37676"},
- {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:15d79dd26056573940fcb8c7413d84118086f2ec1a8acdfa854631084393efcc"},
- {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b7db1ebff4ba09aaaeae6aa491daeb226c8150fc20e836ad00041bcb11230"},
- {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fcd2469d6a2cf298f198f0487e0a5b1a47a42ca0fa4dfd1b6862c999f018ebbf"},
- {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1f672519db1796ca0d8753f9e78ec02355e862d0998193038c7073045899f305"},
- {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2516a9957eed41dd8f1ec0c604f1cdc86758b587d964668b5b196a9db5bfcde6"},
- {file = "greenlet-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:bba5387a6975598857d86de9eac14210a49d554a77eb8261cc68b7d082f78ce2"},
- {file = "greenlet-3.0.3-cp37-cp37m-macosx_11_0_universal2.whl", hash = "sha256:5b51e85cb5ceda94e79d019ed36b35386e8c37d22f07d6a751cb659b180d5274"},
- {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:daf3cb43b7cf2ba96d614252ce1684c1bccee6b2183a01328c98d36fcd7d5cb0"},
- {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99bf650dc5d69546e076f413a87481ee1d2d09aaaaaca058c9251b6d8c14783f"},
- {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2dd6e660effd852586b6a8478a1d244b8dc90ab5b1321751d2ea15deb49ed414"},
- {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3391d1e16e2a5a1507d83e4a8b100f4ee626e8eca43cf2cadb543de69827c4c"},
- {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e1f145462f1fa6e4a4ae3c0f782e580ce44d57c8f2c7aae1b6fa88c0b2efdb41"},
- {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1a7191e42732df52cb5f39d3527217e7ab73cae2cb3694d241e18f53d84ea9a7"},
- {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0448abc479fab28b00cb472d278828b3ccca164531daab4e970a0458786055d6"},
- {file = "greenlet-3.0.3-cp37-cp37m-win32.whl", hash = "sha256:b542be2440edc2d48547b5923c408cbe0fc94afb9f18741faa6ae970dbcb9b6d"},
- {file = "greenlet-3.0.3-cp37-cp37m-win_amd64.whl", hash = "sha256:01bc7ea167cf943b4c802068e178bbf70ae2e8c080467070d01bfa02f337ee67"},
- {file = "greenlet-3.0.3-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:1996cb9306c8595335bb157d133daf5cf9f693ef413e7673cb07e3e5871379ca"},
- {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ddc0f794e6ad661e321caa8d2f0a55ce01213c74722587256fb6566049a8b04"},
- {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c9db1c18f0eaad2f804728c67d6c610778456e3e1cc4ab4bbd5eeb8e6053c6fc"},
- {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7170375bcc99f1a2fbd9c306f5be8764eaf3ac6b5cb968862cad4c7057756506"},
- {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b66c9c1e7ccabad3a7d037b2bcb740122a7b17a53734b7d72a344ce39882a1b"},
- {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:098d86f528c855ead3479afe84b49242e174ed262456c342d70fc7f972bc13c4"},
- {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:81bb9c6d52e8321f09c3d165b2a78c680506d9af285bfccbad9fb7ad5a5da3e5"},
- {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fd096eb7ffef17c456cfa587523c5f92321ae02427ff955bebe9e3c63bc9f0da"},
- {file = "greenlet-3.0.3-cp38-cp38-win32.whl", hash = "sha256:d46677c85c5ba00a9cb6f7a00b2bfa6f812192d2c9f7d9c4f6a55b60216712f3"},
- {file = "greenlet-3.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:419b386f84949bf0e7c73e6032e3457b82a787c1ab4a0e43732898a761cc9dbf"},
- {file = "greenlet-3.0.3-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:da70d4d51c8b306bb7a031d5cff6cc25ad253affe89b70352af5f1cb68e74b53"},
- {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086152f8fbc5955df88382e8a75984e2bb1c892ad2e3c80a2508954e52295257"},
- {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d73a9fe764d77f87f8ec26a0c85144d6a951a6c438dfe50487df5595c6373eac"},
- {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7dcbe92cc99f08c8dd11f930de4d99ef756c3591a5377d1d9cd7dd5e896da71"},
- {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1551a8195c0d4a68fac7a4325efac0d541b48def35feb49d803674ac32582f61"},
- {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:64d7675ad83578e3fc149b617a444fab8efdafc9385471f868eb5ff83e446b8b"},
- {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b37eef18ea55f2ffd8f00ff8fe7c8d3818abd3e25fb73fae2ca3b672e333a7a6"},
- {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:77457465d89b8263bca14759d7c1684df840b6811b2499838cc5b040a8b5b113"},
- {file = "greenlet-3.0.3-cp39-cp39-win32.whl", hash = "sha256:57e8974f23e47dac22b83436bdcf23080ade568ce77df33159e019d161ce1d1e"},
- {file = "greenlet-3.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:c5ee858cfe08f34712f548c3c363e807e7186f03ad7a5039ebadb29e8c6be067"},
- {file = "greenlet-3.0.3.tar.gz", hash = "sha256:43374442353259554ce33599da8b692d5aa96f8976d567d4badf263371fbe491"},
-]
-
-[package.extras]
-docs = ["Sphinx", "furo"]
-test = ["objgraph", "psutil"]
-
[[package]]
name = "h11"
version = "0.14.0"
@@ -1404,31 +1106,6 @@ files = [
{file = "joblib-1.4.2.tar.gz", hash = "sha256:2382c5816b2636fbd20a09e0f4e9dad4736765fdfb7dca582943b9c1366b3f0e"},
]
-[[package]]
-name = "jsonpatch"
-version = "1.33"
-description = "Apply JSON-Patches (RFC 6902)"
-optional = false
-python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*"
-files = [
- {file = "jsonpatch-1.33-py2.py3-none-any.whl", hash = "sha256:0ae28c0cd062bbd8b8ecc26d7d164fbbea9652a1a3693f3b956c1eae5145dade"},
- {file = "jsonpatch-1.33.tar.gz", hash = "sha256:9fcd4009c41e6d12348b4a0ff2563ba56a2923a7dfee731d004e212e1ee5030c"},
-]
-
-[package.dependencies]
-jsonpointer = ">=1.9"
-
-[[package]]
-name = "jsonpointer"
-version = "3.0.0"
-description = "Identify specific nodes in a JSON document (RFC 6901)"
-optional = false
-python-versions = ">=3.7"
-files = [
- {file = "jsonpointer-3.0.0-py2.py3-none-any.whl", hash = "sha256:13e088adc14fca8b6aa8177c044e12701e6ad4b28ff10e65f2267a90109c9942"},
- {file = "jsonpointer-3.0.0.tar.gz", hash = "sha256:2b2d729f2091522d61c3b31f82e11870f60b68f43fbc705cb76bf4b832af59ef"},
-]
-
[[package]]
name = "kiwisolver"
version = "1.4.5"
@@ -1542,96 +1219,6 @@ files = [
{file = "kiwisolver-1.4.5.tar.gz", hash = "sha256:e57e563a57fb22a142da34f38acc2fc1a5c864bc29ca1517a88abc963e60d6ec"},
]
-[[package]]
-name = "langchain"
-version = "0.1.4"
-description = "Building applications with LLMs through composability"
-optional = false
-python-versions = ">=3.8.1,<4.0"
-files = [
- {file = "langchain-0.1.4-py3-none-any.whl", hash = "sha256:6befdd6221f5f326092e31a3c19efdc7ce3d7d1f2e2cab065141071451730ed7"},
- {file = "langchain-0.1.4.tar.gz", hash = "sha256:8767a9461e2b717ce9a35b1fa20659de89ea86ba9c2a4ff516e05d47ab2d195d"},
-]
-
-[package.dependencies]
-aiohttp = ">=3.8.3,<4.0.0"
-async-timeout = {version = ">=4.0.0,<5.0.0", markers = "python_version < \"3.11\""}
-dataclasses-json = ">=0.5.7,<0.7"
-jsonpatch = ">=1.33,<2.0"
-langchain-community = ">=0.0.14,<0.1"
-langchain-core = ">=0.1.16,<0.2"
-langsmith = ">=0.0.83,<0.1"
-numpy = ">=1,<2"
-pydantic = ">=1,<3"
-PyYAML = ">=5.3"
-requests = ">=2,<3"
-SQLAlchemy = ">=1.4,<3"
-tenacity = ">=8.1.0,<9.0.0"
-
-[package.extras]
-azure = ["azure-ai-formrecognizer (>=3.2.1,<4.0.0)", "azure-ai-textanalytics (>=5.3.0,<6.0.0)", "azure-ai-vision (>=0.11.1b1,<0.12.0)", "azure-cognitiveservices-speech (>=1.28.0,<2.0.0)", "azure-core (>=1.26.4,<2.0.0)", "azure-cosmos (>=4.4.0b1,<5.0.0)", "azure-identity (>=1.12.0,<2.0.0)", "azure-search-documents (==11.4.0b8)", "openai (<2)"]
-clarifai = ["clarifai (>=9.1.0)"]
-cli = ["typer (>=0.9.0,<0.10.0)"]
-cohere = ["cohere (>=4,<5)"]
-docarray = ["docarray[hnswlib] (>=0.32.0,<0.33.0)"]
-embeddings = ["sentence-transformers (>=2,<3)"]
-extended-testing = ["aiosqlite (>=0.19.0,<0.20.0)", "aleph-alpha-client (>=2.15.0,<3.0.0)", "anthropic (>=0.3.11,<0.4.0)", "arxiv (>=1.4,<2.0)", "assemblyai (>=0.17.0,<0.18.0)", "atlassian-python-api (>=3.36.0,<4.0.0)", "beautifulsoup4 (>=4,<5)", "bibtexparser (>=1.4.0,<2.0.0)", "cassio (>=0.1.0,<0.2.0)", "chardet (>=5.1.0,<6.0.0)", "cohere (>=4,<5)", "couchbase (>=4.1.9,<5.0.0)", "dashvector (>=1.0.1,<2.0.0)", "databricks-vectorsearch (>=0.21,<0.22)", "datasets (>=2.15.0,<3.0.0)", "dgml-utils (>=0.3.0,<0.4.0)", "esprima (>=4.0.1,<5.0.0)", "faiss-cpu (>=1,<2)", "feedparser (>=6.0.10,<7.0.0)", "fireworks-ai (>=0.9.0,<0.10.0)", "geopandas (>=0.13.1,<0.14.0)", "gitpython (>=3.1.32,<4.0.0)", "google-cloud-documentai (>=2.20.1,<3.0.0)", "gql (>=3.4.1,<4.0.0)", "hologres-vector (>=0.0.6,<0.0.7)", "html2text (>=2020.1.16,<2021.0.0)", "javelin-sdk (>=0.1.8,<0.2.0)", "jinja2 (>=3,<4)", "jq (>=1.4.1,<2.0.0)", "jsonschema (>1)", "langchain-openai (>=0.0.2,<0.1)", "lxml (>=4.9.2,<5.0.0)", "markdownify (>=0.11.6,<0.12.0)", "motor (>=3.3.1,<4.0.0)", "msal (>=1.25.0,<2.0.0)", "mwparserfromhell (>=0.6.4,<0.7.0)", "mwxml (>=0.3.3,<0.4.0)", "newspaper3k (>=0.2.8,<0.3.0)", "numexpr (>=2.8.6,<3.0.0)", "openai (<2)", "openapi-pydantic (>=0.3.2,<0.4.0)", "pandas (>=2.0.1,<3.0.0)", "pdfminer-six (>=20221105,<20221106)", "pgvector (>=0.1.6,<0.2.0)", "praw (>=7.7.1,<8.0.0)", "psychicapi (>=0.8.0,<0.9.0)", "py-trello (>=0.19.0,<0.20.0)", "pymupdf (>=1.22.3,<2.0.0)", "pypdf (>=3.4.0,<4.0.0)", "pypdfium2 (>=4.10.0,<5.0.0)", "pyspark (>=3.4.0,<4.0.0)", "rank-bm25 (>=0.2.2,<0.3.0)", "rapidfuzz (>=3.1.1,<4.0.0)", "rapidocr-onnxruntime (>=1.3.2,<2.0.0)", "requests-toolbelt (>=1.0.0,<2.0.0)", "rspace_client (>=2.5.0,<3.0.0)", "scikit-learn (>=1.2.2,<2.0.0)", "sqlite-vss (>=0.1.2,<0.2.0)", "streamlit (>=1.18.0,<2.0.0)", "sympy (>=1.12,<2.0)", "telethon (>=1.28.5,<2.0.0)", "timescale-vector (>=0.0.1,<0.0.2)", "tqdm (>=4.48.0)", "upstash-redis (>=0.15.0,<0.16.0)", "xata (>=1.0.0a7,<2.0.0)", "xmltodict (>=0.13.0,<0.14.0)"]
-javascript = ["esprima (>=4.0.1,<5.0.0)"]
-llms = ["clarifai (>=9.1.0)", "cohere (>=4,<5)", "huggingface_hub (>=0,<1)", "manifest-ml (>=0.0.1,<0.0.2)", "nlpcloud (>=1,<2)", "openai (<2)", "openlm (>=0.0.5,<0.0.6)", "torch (>=1,<3)", "transformers (>=4,<5)"]
-openai = ["openai (<2)", "tiktoken (>=0.3.2,<0.6.0)"]
-qdrant = ["qdrant-client (>=1.3.1,<2.0.0)"]
-text-helpers = ["chardet (>=5.1.0,<6.0.0)"]
-
-[[package]]
-name = "langchain-community"
-version = "0.0.18"
-description = "Community contributed LangChain integrations."
-optional = false
-python-versions = ">=3.8.1,<4.0"
-files = [
- {file = "langchain_community-0.0.18-py3-none-any.whl", hash = "sha256:b87e20c1fa3f37e9608d7ccc08b4d8ed86f875b8c1e735d0464ae986e41c5a71"},
- {file = "langchain_community-0.0.18.tar.gz", hash = "sha256:f044f331b418f16148b76929f27cc2107fce2d190ea3fae0cdaf155ceda9892f"},
-]
-
-[package.dependencies]
-aiohttp = ">=3.8.3,<4.0.0"
-dataclasses-json = ">=0.5.7,<0.7"
-langchain-core = ">=0.1.19,<0.2"
-langsmith = ">=0.0.83,<0.1"
-numpy = ">=1,<2"
-PyYAML = ">=5.3"
-requests = ">=2,<3"
-SQLAlchemy = ">=1.4,<3"
-tenacity = ">=8.1.0,<9.0.0"
-
-[package.extras]
-cli = ["typer (>=0.9.0,<0.10.0)"]
-extended-testing = ["aiosqlite (>=0.19.0,<0.20.0)", "aleph-alpha-client (>=2.15.0,<3.0.0)", "anthropic (>=0.3.11,<0.4.0)", "arxiv (>=1.4,<2.0)", "assemblyai (>=0.17.0,<0.18.0)", "atlassian-python-api (>=3.36.0,<4.0.0)", "azure-ai-documentintelligence (>=1.0.0b1,<2.0.0)", "beautifulsoup4 (>=4,<5)", "bibtexparser (>=1.4.0,<2.0.0)", "cassio (>=0.1.0,<0.2.0)", "chardet (>=5.1.0,<6.0.0)", "cohere (>=4,<5)", "databricks-vectorsearch (>=0.21,<0.22)", "datasets (>=2.15.0,<3.0.0)", "dgml-utils (>=0.3.0,<0.4.0)", "elasticsearch (>=8.12.0,<9.0.0)", "esprima (>=4.0.1,<5.0.0)", "faiss-cpu (>=1,<2)", "feedparser (>=6.0.10,<7.0.0)", "fireworks-ai (>=0.9.0,<0.10.0)", "geopandas (>=0.13.1,<0.14.0)", "gitpython (>=3.1.32,<4.0.0)", "google-cloud-documentai (>=2.20.1,<3.0.0)", "gql (>=3.4.1,<4.0.0)", "gradientai (>=1.4.0,<2.0.0)", "hdbcli (>=2.19.21,<3.0.0)", "hologres-vector (>=0.0.6,<0.0.7)", "html2text (>=2020.1.16,<2021.0.0)", "httpx (>=0.24.1,<0.25.0)", "javelin-sdk (>=0.1.8,<0.2.0)", "jinja2 (>=3,<4)", "jq (>=1.4.1,<2.0.0)", "jsonschema (>1)", "lxml (>=4.9.2,<5.0.0)", "markdownify (>=0.11.6,<0.12.0)", "motor (>=3.3.1,<4.0.0)", "msal (>=1.25.0,<2.0.0)", "mwparserfromhell (>=0.6.4,<0.7.0)", "mwxml (>=0.3.3,<0.4.0)", "newspaper3k (>=0.2.8,<0.3.0)", "numexpr (>=2.8.6,<3.0.0)", "nvidia-riva-client (>=2.14.0,<3.0.0)", "oci (>=2.119.1,<3.0.0)", "openai (<2)", "openapi-pydantic (>=0.3.2,<0.4.0)", "oracle-ads (>=2.9.1,<3.0.0)", "pandas (>=2.0.1,<3.0.0)", "pdfminer-six (>=20221105,<20221106)", "pgvector (>=0.1.6,<0.2.0)", "praw (>=7.7.1,<8.0.0)", "psychicapi (>=0.8.0,<0.9.0)", "py-trello (>=0.19.0,<0.20.0)", "pymupdf (>=1.22.3,<2.0.0)", "pypdf (>=3.4.0,<4.0.0)", "pypdfium2 (>=4.10.0,<5.0.0)", "pyspark (>=3.4.0,<4.0.0)", "rank-bm25 (>=0.2.2,<0.3.0)", "rapidfuzz (>=3.1.1,<4.0.0)", "rapidocr-onnxruntime (>=1.3.2,<2.0.0)", "rdflib (==7.0.0)", "requests-toolbelt (>=1.0.0,<2.0.0)", "rspace_client (>=2.5.0,<3.0.0)", "scikit-learn (>=1.2.2,<2.0.0)", "sqlite-vss (>=0.1.2,<0.2.0)", "streamlit (>=1.18.0,<2.0.0)", "sympy (>=1.12,<2.0)", "telethon (>=1.28.5,<2.0.0)", "timescale-vector (>=0.0.1,<0.0.2)", "tqdm (>=4.48.0)", "upstash-redis (>=0.15.0,<0.16.0)", "xata (>=1.0.0a7,<2.0.0)", "xmltodict (>=0.13.0,<0.14.0)", "zhipuai (>=1.0.7,<2.0.0)"]
-
-[[package]]
-name = "langchain-core"
-version = "0.1.19"
-description = "Building applications with LLMs through composability"
-optional = false
-python-versions = ">=3.8.1,<4.0"
-files = [
- {file = "langchain_core-0.1.19-py3-none-any.whl", hash = "sha256:46b5fd54181df5aa6d3041d61beb2b91e5437b6742274e7924a97734ed62cf43"},
- {file = "langchain_core-0.1.19.tar.gz", hash = "sha256:30539190a63dff53e995f10aefb943b4f7e01aba4bf28fd1e13016b040c0e9da"},
-]
-
-[package.dependencies]
-anyio = ">=3,<5"
-jsonpatch = ">=1.33,<2.0"
-langsmith = ">=0.0.83,<0.1"
-packaging = ">=23.2,<24.0"
-pydantic = ">=1,<3"
-PyYAML = ">=5.3"
-requests = ">=2,<3"
-tenacity = ">=8.1.0,<9.0.0"
-
-[package.extras]
-extended-testing = ["jinja2 (>=3,<4)"]
-
[[package]]
name = "langcodes"
version = "3.4.0"
@@ -1650,21 +1237,6 @@ language-data = ">=1.2"
build = ["build", "twine"]
test = ["pytest", "pytest-cov"]
-[[package]]
-name = "langsmith"
-version = "0.0.92"
-description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform."
-optional = false
-python-versions = ">=3.8.1,<4.0"
-files = [
- {file = "langsmith-0.0.92-py3-none-any.whl", hash = "sha256:ddcf65e3b5ca11893ae8ef9816ce2a11a089d051be491886e43a2c4556b88fd0"},
- {file = "langsmith-0.0.92.tar.gz", hash = "sha256:61a3a502222bdd221b7f592b6fc14756d74c4fc088aa6bd8834b92adfe9ee583"},
-]
-
-[package.dependencies]
-pydantic = ">=1,<3"
-requests = ">=2,<3"
-
[[package]]
name = "language-data"
version = "1.2.0"
@@ -1885,25 +1457,6 @@ files = [
{file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"},
]
-[[package]]
-name = "marshmallow"
-version = "3.21.3"
-description = "A lightweight library for converting complex datatypes to and from native Python datatypes."
-optional = false
-python-versions = ">=3.8"
-files = [
- {file = "marshmallow-3.21.3-py3-none-any.whl", hash = "sha256:86ce7fb914aa865001a4b2092c4c2872d13bc347f3d42673272cabfdbad386f1"},
- {file = "marshmallow-3.21.3.tar.gz", hash = "sha256:4f57c5e050a54d66361e826f94fba213eb10b67b2fdb02c3e0343ce207ba1662"},
-]
-
-[package.dependencies]
-packaging = ">=17.0"
-
-[package.extras]
-dev = ["marshmallow[tests]", "pre-commit (>=3.5,<4.0)", "tox"]
-docs = ["alabaster (==0.7.16)", "autodocsumm (==0.2.12)", "sphinx (==7.3.7)", "sphinx-issues (==4.1.0)", "sphinx-version-warning (==1.1.2)"]
-tests = ["pytest", "pytz", "simplejson"]
-
[[package]]
name = "matplotlib"
version = "3.9.0"
@@ -1992,105 +1545,6 @@ docs = ["sphinx"]
gmpy = ["gmpy2 (>=2.1.0a4)"]
tests = ["pytest (>=4.6)"]
-[[package]]
-name = "multidict"
-version = "6.0.5"
-description = "multidict implementation"
-optional = false
-python-versions = ">=3.7"
-files = [
- {file = "multidict-6.0.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:228b644ae063c10e7f324ab1ab6b548bdf6f8b47f3ec234fef1093bc2735e5f9"},
- {file = "multidict-6.0.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:896ebdcf62683551312c30e20614305f53125750803b614e9e6ce74a96232604"},
- {file = "multidict-6.0.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:411bf8515f3be9813d06004cac41ccf7d1cd46dfe233705933dd163b60e37600"},
- {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d147090048129ce3c453f0292e7697d333db95e52616b3793922945804a433c"},
- {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:215ed703caf15f578dca76ee6f6b21b7603791ae090fbf1ef9d865571039ade5"},
- {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c6390cf87ff6234643428991b7359b5f59cc15155695deb4eda5c777d2b880f"},
- {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21fd81c4ebdb4f214161be351eb5bcf385426bf023041da2fd9e60681f3cebae"},
- {file = "multidict-6.0.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3cc2ad10255f903656017363cd59436f2111443a76f996584d1077e43ee51182"},
- {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6939c95381e003f54cd4c5516740faba40cf5ad3eeff460c3ad1d3e0ea2549bf"},
- {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:220dd781e3f7af2c2c1053da9fa96d9cf3072ca58f057f4c5adaaa1cab8fc442"},
- {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:766c8f7511df26d9f11cd3a8be623e59cca73d44643abab3f8c8c07620524e4a"},
- {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:fe5d7785250541f7f5019ab9cba2c71169dc7d74d0f45253f8313f436458a4ef"},
- {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c1c1496e73051918fcd4f58ff2e0f2f3066d1c76a0c6aeffd9b45d53243702cc"},
- {file = "multidict-6.0.5-cp310-cp310-win32.whl", hash = "sha256:7afcdd1fc07befad18ec4523a782cde4e93e0a2bf71239894b8d61ee578c1319"},
- {file = "multidict-6.0.5-cp310-cp310-win_amd64.whl", hash = "sha256:99f60d34c048c5c2fabc766108c103612344c46e35d4ed9ae0673d33c8fb26e8"},
- {file = "multidict-6.0.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f285e862d2f153a70586579c15c44656f888806ed0e5b56b64489afe4a2dbfba"},
- {file = "multidict-6.0.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:53689bb4e102200a4fafa9de9c7c3c212ab40a7ab2c8e474491914d2305f187e"},
- {file = "multidict-6.0.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:612d1156111ae11d14afaf3a0669ebf6c170dbb735e510a7438ffe2369a847fd"},
- {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7be7047bd08accdb7487737631d25735c9a04327911de89ff1b26b81745bd4e3"},
- {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de170c7b4fe6859beb8926e84f7d7d6c693dfe8e27372ce3b76f01c46e489fcf"},
- {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:04bde7a7b3de05732a4eb39c94574db1ec99abb56162d6c520ad26f83267de29"},
- {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85f67aed7bb647f93e7520633d8f51d3cbc6ab96957c71272b286b2f30dc70ed"},
- {file = "multidict-6.0.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:425bf820055005bfc8aa9a0b99ccb52cc2f4070153e34b701acc98d201693733"},
- {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d3eb1ceec286eba8220c26f3b0096cf189aea7057b6e7b7a2e60ed36b373b77f"},
- {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:7901c05ead4b3fb75113fb1dd33eb1253c6d3ee37ce93305acd9d38e0b5f21a4"},
- {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e0e79d91e71b9867c73323a3444724d496c037e578a0e1755ae159ba14f4f3d1"},
- {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:29bfeb0dff5cb5fdab2023a7a9947b3b4af63e9c47cae2a10ad58394b517fddc"},
- {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e030047e85cbcedbfc073f71836d62dd5dadfbe7531cae27789ff66bc551bd5e"},
- {file = "multidict-6.0.5-cp311-cp311-win32.whl", hash = "sha256:2f4848aa3baa109e6ab81fe2006c77ed4d3cd1e0ac2c1fbddb7b1277c168788c"},
- {file = "multidict-6.0.5-cp311-cp311-win_amd64.whl", hash = "sha256:2faa5ae9376faba05f630d7e5e6be05be22913782b927b19d12b8145968a85ea"},
- {file = "multidict-6.0.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:51d035609b86722963404f711db441cf7134f1889107fb171a970c9701f92e1e"},
- {file = "multidict-6.0.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cbebcd5bcaf1eaf302617c114aa67569dd3f090dd0ce8ba9e35e9985b41ac35b"},
- {file = "multidict-6.0.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2ffc42c922dbfddb4a4c3b438eb056828719f07608af27d163191cb3e3aa6cc5"},
- {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ceb3b7e6a0135e092de86110c5a74e46bda4bd4fbfeeb3a3bcec79c0f861e450"},
- {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:79660376075cfd4b2c80f295528aa6beb2058fd289f4c9252f986751a4cd0496"},
- {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e4428b29611e989719874670fd152b6625500ad6c686d464e99f5aaeeaca175a"},
- {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d84a5c3a5f7ce6db1f999fb9438f686bc2e09d38143f2d93d8406ed2dd6b9226"},
- {file = "multidict-6.0.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:76c0de87358b192de7ea9649beb392f107dcad9ad27276324c24c91774ca5271"},
- {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:79a6d2ba910adb2cbafc95dad936f8b9386e77c84c35bc0add315b856d7c3abb"},
- {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:92d16a3e275e38293623ebf639c471d3e03bb20b8ebb845237e0d3664914caef"},
- {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:fb616be3538599e797a2017cccca78e354c767165e8858ab5116813146041a24"},
- {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:14c2976aa9038c2629efa2c148022ed5eb4cb939e15ec7aace7ca932f48f9ba6"},
- {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:435a0984199d81ca178b9ae2c26ec3d49692d20ee29bc4c11a2a8d4514c67eda"},
- {file = "multidict-6.0.5-cp312-cp312-win32.whl", hash = "sha256:9fe7b0653ba3d9d65cbe7698cca585bf0f8c83dbbcc710db9c90f478e175f2d5"},
- {file = "multidict-6.0.5-cp312-cp312-win_amd64.whl", hash = "sha256:01265f5e40f5a17f8241d52656ed27192be03bfa8764d88e8220141d1e4b3556"},
- {file = "multidict-6.0.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:19fe01cea168585ba0f678cad6f58133db2aa14eccaf22f88e4a6dccadfad8b3"},
- {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6bf7a982604375a8d49b6cc1b781c1747f243d91b81035a9b43a2126c04766f5"},
- {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:107c0cdefe028703fb5dafe640a409cb146d44a6ae201e55b35a4af8e95457dd"},
- {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:403c0911cd5d5791605808b942c88a8155c2592e05332d2bf78f18697a5fa15e"},
- {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aeaf541ddbad8311a87dd695ed9642401131ea39ad7bc8cf3ef3967fd093b626"},
- {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e4972624066095e52b569e02b5ca97dbd7a7ddd4294bf4e7247d52635630dd83"},
- {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d946b0a9eb8aaa590df1fe082cee553ceab173e6cb5b03239716338629c50c7a"},
- {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b55358304d7a73d7bdf5de62494aaf70bd33015831ffd98bc498b433dfe5b10c"},
- {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:a3145cb08d8625b2d3fee1b2d596a8766352979c9bffe5d7833e0503d0f0b5e5"},
- {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:d65f25da8e248202bd47445cec78e0025c0fe7582b23ec69c3b27a640dd7a8e3"},
- {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:c9bf56195c6bbd293340ea82eafd0071cb3d450c703d2c93afb89f93b8386ccc"},
- {file = "multidict-6.0.5-cp37-cp37m-win32.whl", hash = "sha256:69db76c09796b313331bb7048229e3bee7928eb62bab5e071e9f7fcc4879caee"},
- {file = "multidict-6.0.5-cp37-cp37m-win_amd64.whl", hash = "sha256:fce28b3c8a81b6b36dfac9feb1de115bab619b3c13905b419ec71d03a3fc1423"},
- {file = "multidict-6.0.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:76f067f5121dcecf0d63a67f29080b26c43c71a98b10c701b0677e4a065fbd54"},
- {file = "multidict-6.0.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b82cc8ace10ab5bd93235dfaab2021c70637005e1ac787031f4d1da63d493c1d"},
- {file = "multidict-6.0.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5cb241881eefd96b46f89b1a056187ea8e9ba14ab88ba632e68d7a2ecb7aadf7"},
- {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8e94e6912639a02ce173341ff62cc1201232ab86b8a8fcc05572741a5dc7d93"},
- {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:09a892e4a9fb47331da06948690ae38eaa2426de97b4ccbfafbdcbe5c8f37ff8"},
- {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55205d03e8a598cfc688c71ca8ea5f66447164efff8869517f175ea632c7cb7b"},
- {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37b15024f864916b4951adb95d3a80c9431299080341ab9544ed148091b53f50"},
- {file = "multidict-6.0.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2a1dee728b52b33eebff5072817176c172050d44d67befd681609b4746e1c2e"},
- {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:edd08e6f2f1a390bf137080507e44ccc086353c8e98c657e666c017718561b89"},
- {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:60d698e8179a42ec85172d12f50b1668254628425a6bd611aba022257cac1386"},
- {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:3d25f19500588cbc47dc19081d78131c32637c25804df8414463ec908631e453"},
- {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:4cc0ef8b962ac7a5e62b9e826bd0cd5040e7d401bc45a6835910ed699037a461"},
- {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:eca2e9d0cc5a889850e9bbd68e98314ada174ff6ccd1129500103df7a94a7a44"},
- {file = "multidict-6.0.5-cp38-cp38-win32.whl", hash = "sha256:4a6a4f196f08c58c59e0b8ef8ec441d12aee4125a7d4f4fef000ccb22f8d7241"},
- {file = "multidict-6.0.5-cp38-cp38-win_amd64.whl", hash = "sha256:0275e35209c27a3f7951e1ce7aaf93ce0d163b28948444bec61dd7badc6d3f8c"},
- {file = "multidict-6.0.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e7be68734bd8c9a513f2b0cfd508802d6609da068f40dc57d4e3494cefc92929"},
- {file = "multidict-6.0.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1d9ea7a7e779d7a3561aade7d596649fbecfa5c08a7674b11b423783217933f9"},
- {file = "multidict-6.0.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ea1456df2a27c73ce51120fa2f519f1bea2f4a03a917f4a43c8707cf4cbbae1a"},
- {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf590b134eb70629e350691ecca88eac3e3b8b3c86992042fb82e3cb1830d5e1"},
- {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5c0631926c4f58e9a5ccce555ad7747d9a9f8b10619621f22f9635f069f6233e"},
- {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dce1c6912ab9ff5f179eaf6efe7365c1f425ed690b03341911bf4939ef2f3046"},
- {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0868d64af83169e4d4152ec612637a543f7a336e4a307b119e98042e852ad9c"},
- {file = "multidict-6.0.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:141b43360bfd3bdd75f15ed811850763555a251e38b2405967f8e25fb43f7d40"},
- {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7df704ca8cf4a073334e0427ae2345323613e4df18cc224f647f251e5e75a527"},
- {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6214c5a5571802c33f80e6c84713b2c79e024995b9c5897f794b43e714daeec9"},
- {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:cd6c8fca38178e12c00418de737aef1261576bd1b6e8c6134d3e729a4e858b38"},
- {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:e02021f87a5b6932fa6ce916ca004c4d441509d33bbdbeca70d05dff5e9d2479"},
- {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ebd8d160f91a764652d3e51ce0d2956b38efe37c9231cd82cfc0bed2e40b581c"},
- {file = "multidict-6.0.5-cp39-cp39-win32.whl", hash = "sha256:04da1bb8c8dbadf2a18a452639771951c662c5ad03aefe4884775454be322c9b"},
- {file = "multidict-6.0.5-cp39-cp39-win_amd64.whl", hash = "sha256:d6f6d4f185481c9669b9447bf9d9cf3b95a0e9df9d169bbc17e363b7d5487755"},
- {file = "multidict-6.0.5-py3-none-any.whl", hash = "sha256:0d63c74e3d7ab26de115c49bffc92cc77ed23395303d496eae515d4204a625e7"},
- {file = "multidict-6.0.5.tar.gz", hash = "sha256:f7e301075edaf50500f0b341543c41194d8df3ae5caf4702f2095f3ca73dd8da"},
-]
-
[[package]]
name = "murmurhash"
version = "1.0.10"
@@ -2133,17 +1587,6 @@ files = [
{file = "murmurhash-1.0.10.tar.gz", hash = "sha256:5282aab1317804c6ebd6dd7f69f15ba9075aee671c44a34be2bde0f1b11ef88a"},
]
-[[package]]
-name = "mypy-extensions"
-version = "1.0.0"
-description = "Type system extensions for programs checked with the mypy type checker."
-optional = false
-python-versions = ">=3.5"
-files = [
- {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"},
- {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"},
-]
-
[[package]]
name = "neo4j"
version = "5.21.0"
@@ -2412,6 +1855,7 @@ description = "Nvidia JIT LTO Library"
optional = false
python-versions = ">=3"
files = [
+ {file = "nvidia_nvjitlink_cu12-12.5.40-py3-none-manylinux2014_aarch64.whl", hash = "sha256:004186d5ea6a57758fd6d57052a123c73a4815adf365eb8dd6a85c9eaa7535ff"},
{file = "nvidia_nvjitlink_cu12-12.5.40-py3-none-manylinux2014_x86_64.whl", hash = "sha256:d9714f27c1d0f0895cd8915c07a87a1d0029a0aa36acaf9156952ec2a8a12189"},
{file = "nvidia_nvjitlink_cu12-12.5.40-py3-none-win_amd64.whl", hash = "sha256:c3401dc8543b52d3a8158007a0c1ab4e9c768fcbd24153a48c86972102197ddd"},
]
@@ -3001,6 +2445,64 @@ numba = ">=0.51.2"
scikit-learn = ">=0.18"
scipy = ">=1.0"
+[[package]]
+name = "pyobjc-core"
+version = "10.3.1"
+description = "Python<->ObjC Interoperability Module"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "pyobjc_core-10.3.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ea46d2cda17921e417085ac6286d43ae448113158afcf39e0abe484c58fb3d78"},
+ {file = "pyobjc_core-10.3.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:899d3c84d2933d292c808f385dc881a140cf08632907845043a333a9d7c899f9"},
+ {file = "pyobjc_core-10.3.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:6ff5823d13d0a534cdc17fa4ad47cf5bee4846ce0fd27fc40012e12b46db571b"},
+ {file = "pyobjc_core-10.3.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:2581e8e68885bcb0e11ec619e81ef28e08ee3fac4de20d8cc83bc5af5bcf4a90"},
+ {file = "pyobjc_core-10.3.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ea98d4c2ec39ca29e62e0327db21418696161fb138ee6278daf2acbedf7ce504"},
+ {file = "pyobjc_core-10.3.1-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:4c179c26ee2123d0aabffb9dbc60324b62b6f8614fb2c2328b09386ef59ef6d8"},
+ {file = "pyobjc_core-10.3.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:cb901fce65c9be420c40d8a6ee6fff5ff27c6945f44fd7191989b982baa66dea"},
+ {file = "pyobjc_core-10.3.1.tar.gz", hash = "sha256:b204a80ccc070f9ab3f8af423a3a25a6fd787e228508d00c4c30f8ac538ba720"},
+]
+
+[[package]]
+name = "pyobjc-framework-cocoa"
+version = "10.3.1"
+description = "Wrappers for the Cocoa frameworks on macOS"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "pyobjc_framework_Cocoa-10.3.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:4cb4f8491ab4d9b59f5187e42383f819f7a46306a4fa25b84f126776305291d1"},
+ {file = "pyobjc_framework_Cocoa-10.3.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:5f31021f4f8fdf873b57a97ee1f3c1620dbe285e0b4eaed73dd0005eb72fd773"},
+ {file = "pyobjc_framework_Cocoa-10.3.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:11b4e0bad4bbb44a4edda128612f03cdeab38644bbf174de0c13129715497296"},
+ {file = "pyobjc_framework_Cocoa-10.3.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:de5e62e5ccf2871a94acf3bf79646b20ea893cc9db78afa8d1fe1b0d0f7cbdb0"},
+ {file = "pyobjc_framework_Cocoa-10.3.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c5af24610ab639bd1f521ce4500484b40787f898f691b7a23da3339e6bc8b90"},
+ {file = "pyobjc_framework_Cocoa-10.3.1-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:a7151186bb7805deea434fae9a4423335e6371d105f29e73cc2036c6779a9dbc"},
+ {file = "pyobjc_framework_Cocoa-10.3.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:743d2a1ac08027fd09eab65814c79002a1d0421d7c0074ffd1217b6560889744"},
+ {file = "pyobjc_framework_cocoa-10.3.1.tar.gz", hash = "sha256:1cf20714daaa986b488fb62d69713049f635c9d41a60c8da97d835710445281a"},
+]
+
+[package.dependencies]
+pyobjc-core = ">=10.3.1"
+
+[[package]]
+name = "pyobjc-framework-quartz"
+version = "10.3.1"
+description = "Wrappers for the Quartz frameworks on macOS"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "pyobjc_framework_Quartz-10.3.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:5ef4fd315ed2bc42ef77fdeb2bae28a88ec986bd7b8079a87ba3b3475348f96e"},
+ {file = "pyobjc_framework_Quartz-10.3.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:96578d4a3e70164efe44ad7dc320ecd4e211758ffcde5dcd694de1bbdfe090a4"},
+ {file = "pyobjc_framework_Quartz-10.3.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:ca35f92486869a41847a1703bb176aab8a53dbfd8e678d1f4d68d8e6e1581c71"},
+ {file = "pyobjc_framework_Quartz-10.3.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:00a0933267e3a46ea4afcc35d117b2efb920f06de797fa66279c52e7057e3590"},
+ {file = "pyobjc_framework_Quartz-10.3.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a161bedb4c5257a02ad56a910cd7eefb28bdb0ea78607df0d70ed4efe4ea54c1"},
+ {file = "pyobjc_framework_Quartz-10.3.1-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:d7a8028e117a94923a511944bfa9daf9744e212f06cf89010c60934a479863a5"},
+ {file = "pyobjc_framework_Quartz-10.3.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:de00c983b3267eb26fa42c6ed9f15e2bf006bde8afa7fe2b390646aa21a5d6fc"},
+ {file = "pyobjc_framework_quartz-10.3.1.tar.gz", hash = "sha256:b6d7e346d735c9a7f147cd78e6da79eeae416a0b7d3874644c83a23786c6f886"},
+]
+
+[package.dependencies]
+pyobjc-core = ">=10.3.1"
+pyobjc-framework-Cocoa = ">=10.3.1"
+
[[package]]
name = "pyparsing"
version = "3.1.2"
@@ -3015,6 +2517,22 @@ files = [
[package.extras]
diagrams = ["jinja2", "railroad-diagrams"]
+[[package]]
+name = "pystray"
+version = "0.19.5"
+description = "Provides systray integration"
+optional = false
+python-versions = "*"
+files = [
+ {file = "pystray-0.19.5-py2.py3-none-any.whl", hash = "sha256:a0c2229d02cf87207297c22d86ffc57c86c227517b038c0d3c59df79295ac617"},
+]
+
+[package.dependencies]
+Pillow = "*"
+pyobjc-framework-Quartz = {version = ">=7.0", markers = "sys_platform == \"darwin\""}
+python-xlib = {version = ">=0.17", markers = "sys_platform == \"linux\""}
+six = "*"
+
[[package]]
name = "pytest"
version = "7.4.4"
@@ -3129,6 +2647,20 @@ files = [
[package.extras]
dev = ["atomicwrites (==1.4.1)", "attrs (==23.2.0)", "coverage (==7.4.1)", "hatch", "invoke (==2.2.0)", "more-itertools (==10.2.0)", "pbr (==6.0.0)", "pluggy (==1.4.0)", "py (==1.11.0)", "pytest (==8.0.0)", "pytest-cov (==4.1.0)", "pytest-timeout (==2.2.0)", "pyyaml (==6.0.1)", "ruff (==0.2.1)"]
+[[package]]
+name = "python-xlib"
+version = "0.33"
+description = "Python X Library"
+optional = false
+python-versions = "*"
+files = [
+ {file = "python-xlib-0.33.tar.gz", hash = "sha256:55af7906a2c75ce6cb280a584776080602444f75815a7aff4d287bb2d7018b32"},
+ {file = "python_xlib-0.33-py2.py3-none-any.whl", hash = "sha256:c3534038d42e0df2f1392a1b30a15a4ff5fdc2b86cfa94f072bf11b10a164398"},
+]
+
+[package.dependencies]
+six = ">=1.10.0"
+
[[package]]
name = "pytz"
version = "2024.1"
@@ -3826,93 +3358,6 @@ files = [
{file = "spacy_loggers-1.0.5-py3-none-any.whl", hash = "sha256:196284c9c446cc0cdb944005384270d775fdeaf4f494d8e269466cfa497ef645"},
]
-[[package]]
-name = "sqlalchemy"
-version = "2.0.30"
-description = "Database Abstraction Library"
-optional = false
-python-versions = ">=3.7"
-files = [
- {file = "SQLAlchemy-2.0.30-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:3b48154678e76445c7ded1896715ce05319f74b1e73cf82d4f8b59b46e9c0ddc"},
- {file = "SQLAlchemy-2.0.30-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2753743c2afd061bb95a61a51bbb6a1a11ac1c44292fad898f10c9839a7f75b2"},
- {file = "SQLAlchemy-2.0.30-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a7bfc726d167f425d4c16269a9a10fe8630ff6d14b683d588044dcef2d0f6be7"},
- {file = "SQLAlchemy-2.0.30-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c4f61ada6979223013d9ab83a3ed003ded6959eae37d0d685db2c147e9143797"},
- {file = "SQLAlchemy-2.0.30-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:3a365eda439b7a00732638f11072907c1bc8e351c7665e7e5da91b169af794af"},
- {file = "SQLAlchemy-2.0.30-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bba002a9447b291548e8d66fd8c96a6a7ed4f2def0bb155f4f0a1309fd2735d5"},
- {file = "SQLAlchemy-2.0.30-cp310-cp310-win32.whl", hash = "sha256:0138c5c16be3600923fa2169532205d18891b28afa817cb49b50e08f62198bb8"},
- {file = "SQLAlchemy-2.0.30-cp310-cp310-win_amd64.whl", hash = "sha256:99650e9f4cf3ad0d409fed3eec4f071fadd032e9a5edc7270cd646a26446feeb"},
- {file = "SQLAlchemy-2.0.30-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:955991a09f0992c68a499791a753523f50f71a6885531568404fa0f231832aa0"},
- {file = "SQLAlchemy-2.0.30-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f69e4c756ee2686767eb80f94c0125c8b0a0b87ede03eacc5c8ae3b54b99dc46"},
- {file = "SQLAlchemy-2.0.30-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69c9db1ce00e59e8dd09d7bae852a9add716efdc070a3e2068377e6ff0d6fdaa"},
- {file = "SQLAlchemy-2.0.30-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a1429a4b0f709f19ff3b0cf13675b2b9bfa8a7e79990003207a011c0db880a13"},
- {file = "SQLAlchemy-2.0.30-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:efedba7e13aa9a6c8407c48facfdfa108a5a4128e35f4c68f20c3407e4376aa9"},
- {file = "SQLAlchemy-2.0.30-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:16863e2b132b761891d6c49f0a0f70030e0bcac4fd208117f6b7e053e68668d0"},
- {file = "SQLAlchemy-2.0.30-cp311-cp311-win32.whl", hash = "sha256:2ecabd9ccaa6e914e3dbb2aa46b76dede7eadc8cbf1b8083c94d936bcd5ffb49"},
- {file = "SQLAlchemy-2.0.30-cp311-cp311-win_amd64.whl", hash = "sha256:0b3f4c438e37d22b83e640f825ef0f37b95db9aa2d68203f2c9549375d0b2260"},
- {file = "SQLAlchemy-2.0.30-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5a79d65395ac5e6b0c2890935bad892eabb911c4aa8e8015067ddb37eea3d56c"},
- {file = "SQLAlchemy-2.0.30-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9a5baf9267b752390252889f0c802ea13b52dfee5e369527da229189b8bd592e"},
- {file = "SQLAlchemy-2.0.30-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3cb5a646930c5123f8461f6468901573f334c2c63c795b9af350063a736d0134"},
- {file = "SQLAlchemy-2.0.30-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:296230899df0b77dec4eb799bcea6fbe39a43707ce7bb166519c97b583cfcab3"},
- {file = "SQLAlchemy-2.0.30-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c62d401223f468eb4da32627bffc0c78ed516b03bb8a34a58be54d618b74d472"},
- {file = "SQLAlchemy-2.0.30-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3b69e934f0f2b677ec111b4d83f92dc1a3210a779f69bf905273192cf4ed433e"},
- {file = "SQLAlchemy-2.0.30-cp312-cp312-win32.whl", hash = "sha256:77d2edb1f54aff37e3318f611637171e8ec71472f1fdc7348b41dcb226f93d90"},
- {file = "SQLAlchemy-2.0.30-cp312-cp312-win_amd64.whl", hash = "sha256:b6c7ec2b1f4969fc19b65b7059ed00497e25f54069407a8701091beb69e591a5"},
- {file = "SQLAlchemy-2.0.30-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:5a8e3b0a7e09e94be7510d1661339d6b52daf202ed2f5b1f9f48ea34ee6f2d57"},
- {file = "SQLAlchemy-2.0.30-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b60203c63e8f984df92035610c5fb76d941254cf5d19751faab7d33b21e5ddc0"},
- {file = "SQLAlchemy-2.0.30-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f1dc3eabd8c0232ee8387fbe03e0a62220a6f089e278b1f0aaf5e2d6210741ad"},
- {file = "SQLAlchemy-2.0.30-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:40ad017c672c00b9b663fcfcd5f0864a0a97828e2ee7ab0c140dc84058d194cf"},
- {file = "SQLAlchemy-2.0.30-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e42203d8d20dc704604862977b1470a122e4892791fe3ed165f041e4bf447a1b"},
- {file = "SQLAlchemy-2.0.30-cp37-cp37m-win32.whl", hash = "sha256:2a4f4da89c74435f2bc61878cd08f3646b699e7d2eba97144030d1be44e27584"},
- {file = "SQLAlchemy-2.0.30-cp37-cp37m-win_amd64.whl", hash = "sha256:b6bf767d14b77f6a18b6982cbbf29d71bede087edae495d11ab358280f304d8e"},
- {file = "SQLAlchemy-2.0.30-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bc0c53579650a891f9b83fa3cecd4e00218e071d0ba00c4890f5be0c34887ed3"},
- {file = "SQLAlchemy-2.0.30-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:311710f9a2ee235f1403537b10c7687214bb1f2b9ebb52702c5aa4a77f0b3af7"},
- {file = "SQLAlchemy-2.0.30-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:408f8b0e2c04677e9c93f40eef3ab22f550fecb3011b187f66a096395ff3d9fd"},
- {file = "SQLAlchemy-2.0.30-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37a4b4fb0dd4d2669070fb05b8b8824afd0af57587393015baee1cf9890242d9"},
- {file = "SQLAlchemy-2.0.30-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:a943d297126c9230719c27fcbbeab57ecd5d15b0bd6bfd26e91bfcfe64220621"},
- {file = "SQLAlchemy-2.0.30-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0a089e218654e740a41388893e090d2e2c22c29028c9d1353feb38638820bbeb"},
- {file = "SQLAlchemy-2.0.30-cp38-cp38-win32.whl", hash = "sha256:fa561138a64f949f3e889eb9ab8c58e1504ab351d6cf55259dc4c248eaa19da6"},
- {file = "SQLAlchemy-2.0.30-cp38-cp38-win_amd64.whl", hash = "sha256:7d74336c65705b986d12a7e337ba27ab2b9d819993851b140efdf029248e818e"},
- {file = "SQLAlchemy-2.0.30-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ae8c62fe2480dd61c532ccafdbce9b29dacc126fe8be0d9a927ca3e699b9491a"},
- {file = "SQLAlchemy-2.0.30-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2383146973a15435e4717f94c7509982770e3e54974c71f76500a0136f22810b"},
- {file = "SQLAlchemy-2.0.30-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8409de825f2c3b62ab15788635ccaec0c881c3f12a8af2b12ae4910a0a9aeef6"},
- {file = "SQLAlchemy-2.0.30-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0094c5dc698a5f78d3d1539853e8ecec02516b62b8223c970c86d44e7a80f6c7"},
- {file = "SQLAlchemy-2.0.30-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:edc16a50f5e1b7a06a2dcc1f2205b0b961074c123ed17ebda726f376a5ab0953"},
- {file = "SQLAlchemy-2.0.30-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:f7703c2010355dd28f53deb644a05fc30f796bd8598b43f0ba678878780b6e4c"},
- {file = "SQLAlchemy-2.0.30-cp39-cp39-win32.whl", hash = "sha256:1f9a727312ff6ad5248a4367358e2cf7e625e98b1028b1d7ab7b806b7d757513"},
- {file = "SQLAlchemy-2.0.30-cp39-cp39-win_amd64.whl", hash = "sha256:a0ef36b28534f2a5771191be6edb44cc2673c7b2edf6deac6562400288664221"},
- {file = "SQLAlchemy-2.0.30-py3-none-any.whl", hash = "sha256:7108d569d3990c71e26a42f60474b4c02c8586c4681af5fd67e51a044fdea86a"},
- {file = "SQLAlchemy-2.0.30.tar.gz", hash = "sha256:2b1708916730f4830bc69d6f49d37f7698b5bd7530aca7f04f785f8849e95255"},
-]
-
-[package.dependencies]
-greenlet = {version = "!=0.4.17", markers = "platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\""}
-typing-extensions = ">=4.6.0"
-
-[package.extras]
-aiomysql = ["aiomysql (>=0.2.0)", "greenlet (!=0.4.17)"]
-aioodbc = ["aioodbc", "greenlet (!=0.4.17)"]
-aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"]
-asyncio = ["greenlet (!=0.4.17)"]
-asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (!=0.4.17)"]
-mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2,!=1.1.5)"]
-mssql = ["pyodbc"]
-mssql-pymssql = ["pymssql"]
-mssql-pyodbc = ["pyodbc"]
-mypy = ["mypy (>=0.910)"]
-mysql = ["mysqlclient (>=1.4.0)"]
-mysql-connector = ["mysql-connector-python"]
-oracle = ["cx_oracle (>=8)"]
-oracle-oracledb = ["oracledb (>=1.0.1)"]
-postgresql = ["psycopg2 (>=2.7)"]
-postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"]
-postgresql-pg8000 = ["pg8000 (>=1.29.1)"]
-postgresql-psycopg = ["psycopg (>=3.0.7)"]
-postgresql-psycopg2binary = ["psycopg2-binary"]
-postgresql-psycopg2cffi = ["psycopg2cffi"]
-postgresql-psycopgbinary = ["psycopg[binary] (>=3.0.7)"]
-pymysql = ["pymysql"]
-sqlcipher = ["sqlcipher3_binary"]
-
[[package]]
name = "srsly"
version = "2.4.8"
@@ -4196,17 +3641,6 @@ files = [
{file = "threadpoolctl-3.5.0.tar.gz", hash = "sha256:082433502dd922bf738de0d8bcc4fdcbf0979ff44c42bd40f5af8a282f6fa107"},
]
-[[package]]
-name = "tk"
-version = "0.1.0"
-description = "TensorKit is a deep learning helper between Python and C++."
-optional = false
-python-versions = "*"
-files = [
- {file = "tk-0.1.0-py3-none-any.whl", hash = "sha256:703a69ff0d5ba2bd2f7440582ad10160e4a6561595d33457dc6caa79b9bf4930"},
- {file = "tk-0.1.0.tar.gz", hash = "sha256:60bc8923d5d35f67f5c6bd93d4f0c49d2048114ec077768f959aef36d4ed97f8"},
-]
-
[[package]]
name = "tokenizers"
version = "0.19.1"
@@ -4532,21 +3966,6 @@ files = [
{file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"},
]
-[[package]]
-name = "typing-inspect"
-version = "0.9.0"
-description = "Runtime inspection utilities for typing module."
-optional = false
-python-versions = "*"
-files = [
- {file = "typing_inspect-0.9.0-py3-none-any.whl", hash = "sha256:9ee6fc59062311ef8547596ab6b955e1b8aa46242d854bfc78f4f6b0eff35f9f"},
- {file = "typing_inspect-0.9.0.tar.gz", hash = "sha256:b23fc42ff6f6ef6954e4852c1fb512cdd18dbea03134f91f856a95ccc9461f78"},
-]
-
-[package.dependencies]
-mypy-extensions = ">=0.3.0"
-typing-extensions = ">=3.7.4"
-
[[package]]
name = "tzdata"
version = "2024.1"
@@ -4749,109 +4168,6 @@ MarkupSafe = ">=2.1.1"
[package.extras]
watchdog = ["watchdog (>=2.3)"]
-[[package]]
-name = "yarl"
-version = "1.9.4"
-description = "Yet another URL library"
-optional = false
-python-versions = ">=3.7"
-files = [
- {file = "yarl-1.9.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a8c1df72eb746f4136fe9a2e72b0c9dc1da1cbd23b5372f94b5820ff8ae30e0e"},
- {file = "yarl-1.9.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a3a6ed1d525bfb91b3fc9b690c5a21bb52de28c018530ad85093cc488bee2dd2"},
- {file = "yarl-1.9.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c38c9ddb6103ceae4e4498f9c08fac9b590c5c71b0370f98714768e22ac6fa66"},
- {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d9e09c9d74f4566e905a0b8fa668c58109f7624db96a2171f21747abc7524234"},
- {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8477c1ee4bd47c57d49621a062121c3023609f7a13b8a46953eb6c9716ca392"},
- {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5ff2c858f5f6a42c2a8e751100f237c5e869cbde669a724f2062d4c4ef93551"},
- {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:357495293086c5b6d34ca9616a43d329317feab7917518bc97a08f9e55648455"},
- {file = "yarl-1.9.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:54525ae423d7b7a8ee81ba189f131054defdb122cde31ff17477951464c1691c"},
- {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:801e9264d19643548651b9db361ce3287176671fb0117f96b5ac0ee1c3530d53"},
- {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e516dc8baf7b380e6c1c26792610230f37147bb754d6426462ab115a02944385"},
- {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:7d5aaac37d19b2904bb9dfe12cdb08c8443e7ba7d2852894ad448d4b8f442863"},
- {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:54beabb809ffcacbd9d28ac57b0db46e42a6e341a030293fb3185c409e626b8b"},
- {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bac8d525a8dbc2a1507ec731d2867025d11ceadcb4dd421423a5d42c56818541"},
- {file = "yarl-1.9.4-cp310-cp310-win32.whl", hash = "sha256:7855426dfbddac81896b6e533ebefc0af2f132d4a47340cee6d22cac7190022d"},
- {file = "yarl-1.9.4-cp310-cp310-win_amd64.whl", hash = "sha256:848cd2a1df56ddbffeb375535fb62c9d1645dde33ca4d51341378b3f5954429b"},
- {file = "yarl-1.9.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:35a2b9396879ce32754bd457d31a51ff0a9d426fd9e0e3c33394bf4b9036b099"},
- {file = "yarl-1.9.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c7d56b293cc071e82532f70adcbd8b61909eec973ae9d2d1f9b233f3d943f2c"},
- {file = "yarl-1.9.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d8a1c6c0be645c745a081c192e747c5de06e944a0d21245f4cf7c05e457c36e0"},
- {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b3c1ffe10069f655ea2d731808e76e0f452fc6c749bea04781daf18e6039525"},
- {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:549d19c84c55d11687ddbd47eeb348a89df9cb30e1993f1b128f4685cd0ebbf8"},
- {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7409f968456111140c1c95301cadf071bd30a81cbd7ab829169fb9e3d72eae9"},
- {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e23a6d84d9d1738dbc6e38167776107e63307dfc8ad108e580548d1f2c587f42"},
- {file = "yarl-1.9.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d8b889777de69897406c9fb0b76cdf2fd0f31267861ae7501d93003d55f54fbe"},
- {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:03caa9507d3d3c83bca08650678e25364e1843b484f19986a527630ca376ecce"},
- {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4e9035df8d0880b2f1c7f5031f33f69e071dfe72ee9310cfc76f7b605958ceb9"},
- {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:c0ec0ed476f77db9fb29bca17f0a8fcc7bc97ad4c6c1d8959c507decb22e8572"},
- {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:ee04010f26d5102399bd17f8df8bc38dc7ccd7701dc77f4a68c5b8d733406958"},
- {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:49a180c2e0743d5d6e0b4d1a9e5f633c62eca3f8a86ba5dd3c471060e352ca98"},
- {file = "yarl-1.9.4-cp311-cp311-win32.whl", hash = "sha256:81eb57278deb6098a5b62e88ad8281b2ba09f2f1147c4767522353eaa6260b31"},
- {file = "yarl-1.9.4-cp311-cp311-win_amd64.whl", hash = "sha256:d1d2532b340b692880261c15aee4dc94dd22ca5d61b9db9a8a361953d36410b1"},
- {file = "yarl-1.9.4-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0d2454f0aef65ea81037759be5ca9947539667eecebca092733b2eb43c965a81"},
- {file = "yarl-1.9.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:44d8ffbb9c06e5a7f529f38f53eda23e50d1ed33c6c869e01481d3fafa6b8142"},
- {file = "yarl-1.9.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:aaaea1e536f98754a6e5c56091baa1b6ce2f2700cc4a00b0d49eca8dea471074"},
- {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3777ce5536d17989c91696db1d459574e9a9bd37660ea7ee4d3344579bb6f129"},
- {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fc5fc1eeb029757349ad26bbc5880557389a03fa6ada41703db5e068881e5f2"},
- {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ea65804b5dc88dacd4a40279af0cdadcfe74b3e5b4c897aa0d81cf86927fee78"},
- {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa102d6d280a5455ad6a0f9e6d769989638718e938a6a0a2ff3f4a7ff8c62cc4"},
- {file = "yarl-1.9.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09efe4615ada057ba2d30df871d2f668af661e971dfeedf0c159927d48bbeff0"},
- {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:008d3e808d03ef28542372d01057fd09168419cdc8f848efe2804f894ae03e51"},
- {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:6f5cb257bc2ec58f437da2b37a8cd48f666db96d47b8a3115c29f316313654ff"},
- {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:992f18e0ea248ee03b5a6e8b3b4738850ae7dbb172cc41c966462801cbf62cf7"},
- {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:0e9d124c191d5b881060a9e5060627694c3bdd1fe24c5eecc8d5d7d0eb6faabc"},
- {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3986b6f41ad22988e53d5778f91855dc0399b043fc8946d4f2e68af22ee9ff10"},
- {file = "yarl-1.9.4-cp312-cp312-win32.whl", hash = "sha256:4b21516d181cd77ebd06ce160ef8cc2a5e9ad35fb1c5930882baff5ac865eee7"},
- {file = "yarl-1.9.4-cp312-cp312-win_amd64.whl", hash = "sha256:a9bd00dc3bc395a662900f33f74feb3e757429e545d831eef5bb280252631984"},
- {file = "yarl-1.9.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:63b20738b5aac74e239622d2fe30df4fca4942a86e31bf47a81a0e94c14df94f"},
- {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7d7f7de27b8944f1fee2c26a88b4dabc2409d2fea7a9ed3df79b67277644e17"},
- {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c74018551e31269d56fab81a728f683667e7c28c04e807ba08f8c9e3bba32f14"},
- {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ca06675212f94e7a610e85ca36948bb8fc023e458dd6c63ef71abfd482481aa5"},
- {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5aef935237d60a51a62b86249839b51345f47564208c6ee615ed2a40878dccdd"},
- {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2b134fd795e2322b7684155b7855cc99409d10b2e408056db2b93b51a52accc7"},
- {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d25039a474c4c72a5ad4b52495056f843a7ff07b632c1b92ea9043a3d9950f6e"},
- {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f7d6b36dd2e029b6bcb8a13cf19664c7b8e19ab3a58e0fefbb5b8461447ed5ec"},
- {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:957b4774373cf6f709359e5c8c4a0af9f6d7875db657adb0feaf8d6cb3c3964c"},
- {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:d7eeb6d22331e2fd42fce928a81c697c9ee2d51400bd1a28803965883e13cead"},
- {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:6a962e04b8f91f8c4e5917e518d17958e3bdee71fd1d8b88cdce74dd0ebbf434"},
- {file = "yarl-1.9.4-cp37-cp37m-win32.whl", hash = "sha256:f3bc6af6e2b8f92eced34ef6a96ffb248e863af20ef4fde9448cc8c9b858b749"},
- {file = "yarl-1.9.4-cp37-cp37m-win_amd64.whl", hash = "sha256:ad4d7a90a92e528aadf4965d685c17dacff3df282db1121136c382dc0b6014d2"},
- {file = "yarl-1.9.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ec61d826d80fc293ed46c9dd26995921e3a82146feacd952ef0757236fc137be"},
- {file = "yarl-1.9.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8be9e837ea9113676e5754b43b940b50cce76d9ed7d2461df1af39a8ee674d9f"},
- {file = "yarl-1.9.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:bef596fdaa8f26e3d66af846bbe77057237cb6e8efff8cd7cc8dff9a62278bbf"},
- {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d47552b6e52c3319fede1b60b3de120fe83bde9b7bddad11a69fb0af7db32f1"},
- {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84fc30f71689d7fc9168b92788abc977dc8cefa806909565fc2951d02f6b7d57"},
- {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4aa9741085f635934f3a2583e16fcf62ba835719a8b2b28fb2917bb0537c1dfa"},
- {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:206a55215e6d05dbc6c98ce598a59e6fbd0c493e2de4ea6cc2f4934d5a18d130"},
- {file = "yarl-1.9.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07574b007ee20e5c375a8fe4a0789fad26db905f9813be0f9fef5a68080de559"},
- {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5a2e2433eb9344a163aced6a5f6c9222c0786e5a9e9cac2c89f0b28433f56e23"},
- {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:6ad6d10ed9b67a382b45f29ea028f92d25bc0bc1daf6c5b801b90b5aa70fb9ec"},
- {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:6fe79f998a4052d79e1c30eeb7d6c1c1056ad33300f682465e1b4e9b5a188b78"},
- {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a825ec844298c791fd28ed14ed1bffc56a98d15b8c58a20e0e08c1f5f2bea1be"},
- {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8619d6915b3b0b34420cf9b2bb6d81ef59d984cb0fde7544e9ece32b4b3043c3"},
- {file = "yarl-1.9.4-cp38-cp38-win32.whl", hash = "sha256:686a0c2f85f83463272ddffd4deb5e591c98aac1897d65e92319f729c320eece"},
- {file = "yarl-1.9.4-cp38-cp38-win_amd64.whl", hash = "sha256:a00862fb23195b6b8322f7d781b0dc1d82cb3bcac346d1e38689370cc1cc398b"},
- {file = "yarl-1.9.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:604f31d97fa493083ea21bd9b92c419012531c4e17ea6da0f65cacdcf5d0bd27"},
- {file = "yarl-1.9.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8a854227cf581330ffa2c4824d96e52ee621dd571078a252c25e3a3b3d94a1b1"},
- {file = "yarl-1.9.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ba6f52cbc7809cd8d74604cce9c14868306ae4aa0282016b641c661f981a6e91"},
- {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a6327976c7c2f4ee6816eff196e25385ccc02cb81427952414a64811037bbc8b"},
- {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8397a3817d7dcdd14bb266283cd1d6fc7264a48c186b986f32e86d86d35fbac5"},
- {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e0381b4ce23ff92f8170080c97678040fc5b08da85e9e292292aba67fdac6c34"},
- {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23d32a2594cb5d565d358a92e151315d1b2268bc10f4610d098f96b147370136"},
- {file = "yarl-1.9.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ddb2a5c08a4eaaba605340fdee8fc08e406c56617566d9643ad8bf6852778fc7"},
- {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:26a1dc6285e03f3cc9e839a2da83bcbf31dcb0d004c72d0730e755b33466c30e"},
- {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:18580f672e44ce1238b82f7fb87d727c4a131f3a9d33a5e0e82b793362bf18b4"},
- {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:29e0f83f37610f173eb7e7b5562dd71467993495e568e708d99e9d1944f561ec"},
- {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:1f23e4fe1e8794f74b6027d7cf19dc25f8b63af1483d91d595d4a07eca1fb26c"},
- {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:db8e58b9d79200c76956cefd14d5c90af54416ff5353c5bfd7cbe58818e26ef0"},
- {file = "yarl-1.9.4-cp39-cp39-win32.whl", hash = "sha256:c7224cab95645c7ab53791022ae77a4509472613e839dab722a72abe5a684575"},
- {file = "yarl-1.9.4-cp39-cp39-win_amd64.whl", hash = "sha256:824d6c50492add5da9374875ce72db7a0733b29c2394890aef23d533106e2b15"},
- {file = "yarl-1.9.4-py3-none-any.whl", hash = "sha256:928cecb0ef9d5a7946eb6ff58417ad2fe9375762382f1bf5c55e61645f2c43ad"},
- {file = "yarl-1.9.4.tar.gz", hash = "sha256:566db86717cf8080b99b58b083b773a908ae40f06681e87e589a976faf8246bf"},
-]
-
-[package.dependencies]
-idna = ">=2.0"
-multidict = ">=4.0"
-
[[package]]
name = "zipp"
version = "3.19.2"
@@ -4870,4 +4186,4 @@ test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools",
[metadata]
lock-version = "2.0"
python-versions = "^3.9"
-content-hash = "fb84256cafa26bb35a4776992b4639b9526b80e106ff9de90250da4f926c3df2"
+content-hash = "da4722ceb802967820706b6282c26e3b60a6905b0d0941af03572280ee8b3858"
diff --git a/pyproject.toml b/pyproject.toml
index cd67dfc..91a1ae8 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -38,11 +38,11 @@ pyjwt = "^2.8.0"
python-multipart = "^0.0.9"
pytest-asyncio = "^0.23.7"
textblob = "^0.18.0.post0"
-tk = "0.1.0"
pystray = "0.19.5"
supabase = "^2.6.0"
psycopg2-binary = "^2.9.9"
+en-core-web-sm = {url = "https://github.com/explosion/spacy-models/releases/download/en_core_web_sm-3.8.0/en_core_web_sm-3.8.0-py3-none-any.whl"}
[tool.poetry.group.dev.dependencies]
pytest = "^7.4.3"
pytest-asyncio = "^0.23.2"
diff --git a/topos/api/api_routes.py b/topos/api/api_routes.py
index d5b119d..34a1c64 100644
--- a/topos/api/api_routes.py
+++ b/topos/api/api_routes.py
@@ -5,8 +5,8 @@
from fastapi.responses import JSONResponse
import requests
import signal
-import tkinter as tk
-from tkinter import filedialog
+import glob
+import sys
from topos.FC.conversation_cache_manager import ConversationCacheManager
router = APIRouter()
@@ -34,7 +34,7 @@
cache_manager = ConversationCacheManager(use_postgres=True, db_config=db_config)
else:
cache_manager = ConversationCacheManager()
-
+
class ConversationIDRequest(BaseModel):
conversation_id: str
@@ -58,7 +58,7 @@ async def chat_conversation_analysis(request: ConversationIDRequest):
conversation_id = request.conversation_id
# load conversation
conv_data = cache_manager.load_from_cache(conversation_id)
-
+
if conv_data is None:
raise HTTPException(status_code=404, detail="Conversation not found in cache")
# Initialize counters
@@ -118,7 +118,7 @@ async def chat_conversation_analysis(request: ConversationIDRequest):
for entity in entities:
entity_text_counter[str(entity['text'])] += 1
entity_text_counter_per_user[user][str(entity['text'])] += 1
-
+
emotions = content['commenter']['base_analysis']['emo_27']
for emotion in emotions:
emotion_counter[emotion['label']] += 1
@@ -158,7 +158,7 @@ async def chat_conversation_analysis(request: ConversationIDRequest):
'emotions27': emotion_dict
}
-
+
# Return the conversation or any other response needed
return {"conversation": conversation}
@@ -175,7 +175,7 @@ async def conv_to_image(request: ConversationIDRequest):
if conv_data is None:
raise HTTPException(status_code=404, detail="Conversation not found in cache")
-
+
# model specifications
# TODO UPDATE SO ITS NOT HARDCODED
model = "dolphin-llama3"
@@ -218,7 +218,7 @@ async def conv_to_image(request: ConversationIDRequest):
print(f"\t[ {system_path}")
bytes_list = read_file_as_bytes(file_name)
media_type = "application/json"
-
+
# return the image
return {"file_name" : file_name, "bytes": bytes_list, "prompt": txt_to_img_prompt}
@@ -276,7 +276,7 @@ async def create_next_messages(request: GenNextMessageOptions):
next_message_options = llm_client.generate_response(system_prompt, query, temperature=0)
print(next_message_options)
-
+
# return the options
return {"response" : next_message_options}
@@ -318,13 +318,13 @@ async def create_next_messages(request: ConversationTopicsRequest):
@router.post("/list_models")
async def list_models(provider: str = 'ollama', api_key: str = 'ollama'):
# Define the URLs for different providers
-
+
list_models_urls = {
'ollama': "http://localhost:11434/api/tags",
'openai': "https://api.openai.com/v1/models",
'groq': "https://api.groq.com/openai/v1/models"
}
-
+
if provider not in list_models_urls:
raise HTTPException(status_code=400, detail="Unsupported provider")
@@ -356,20 +356,43 @@ async def test():
@router.post("/get_files")
async def get_files():
- root = tk.Tk()
- root.withdraw()
- filetypes = [("PNG files", "*.png"), ("JPG files", "*.jpg"), ("JPEG files", "*.jpeg")]
- file_path = filedialog.askopenfilename(title="Select an image file",
- filetypes=(filetypes))
- print(file_path)
-
+ # Get the current working directory
+ current_dir = os.getcwd()
+
+ # List all image files in the current directory
+ image_files = glob.glob(os.path.join(current_dir, "*.png")) + \
+ glob.glob(os.path.join(current_dir, "*.jpg")) + \
+ glob.glob(os.path.join(current_dir, "*.jpeg"))
+
+ if not image_files:
+ return {"error": "No image files found in the current directory."}
+
+ # Print available files
+ print("Available image files:")
+ for i, file in enumerate(image_files, 1):
+ print(f"{i}. {os.path.basename(file)}")
+
+ # Get user input
+ while True:
+ try:
+ choice = int(input("Enter the number of the file you want to select: "))
+ if 1 <= choice <= len(image_files):
+ file_path = image_files[choice - 1]
+ break
+ else:
+ print("Invalid choice. Please try again.")
+ except ValueError:
+ print("Please enter a valid number.")
+
+ print(f"Selected file: {file_path}")
+
# Use the os.path module
system_path = os.path.abspath("/")
print(system_path)
bytes_list = read_file_as_bytes(file_path)
media_type = "application/json"
print(type(bytes_list))
- return {"file_name" : [i for i in file_path], "bytes": bytes_list}
+ return {"file_name": [i for i in file_path], "bytes": bytes_list}
def read_file_as_bytes(file_path):
try:
@@ -382,8 +405,8 @@ def read_file_as_bytes(file_path):
except Exception as e:
print(f"An error occurred: {e}")
return None
-
-
+
+
class MermaidChartPayload(BaseModel):
message: str = None
conversation_id: str
@@ -403,12 +426,12 @@ async def generate_mermaid_chart(payload: MermaidChartPayload):
provider = payload.provider# defaults to ollama right now
api_key = payload.api_key
temperature = payload.temperature
-
+
llm_client = LLMController(model_name=model, provider=provider, api_key=api_key)
mermaid_generator = MermaidCreator(llm_client)
-
-
+
+
if full_conversation:
cache_manager = cache_manager
@@ -434,4 +457,4 @@ async def generate_mermaid_chart(payload: MermaidChartPayload):
return {"status": "error", "response": f"Error: {e}", 'completed': True}
except Exception as e:
- return {"status": "error", "message": str(e)}
\ No newline at end of file
+ return {"status": "error", "message": str(e)}
diff --git a/topos/api/p2p_chat_routes.py b/topos/api/p2p_chat_routes.py
index 3eb9454..beec3aa 100644
--- a/topos/api/p2p_chat_routes.py
+++ b/topos/api/p2p_chat_routes.py
@@ -1,8 +1,6 @@
import os
from fastapi import APIRouter, HTTPException, Request
import requests
-import tkinter as tk
-from tkinter import filedialog
from topos.FC.conversation_cache_manager import ConversationCacheManager
from collections import Counter, OrderedDict, defaultdict
from pydantic import BaseModel
@@ -70,7 +68,7 @@ async def process_message(request: Request):
base_analysis = base_token_classifier(message) # this is only an ner dict atm
duration = time.time() - start_time
print(f"\t[ base_token_classifier duration: {duration:.4f} seconds ]")
-
+
# Fetch base, per-message text classifiers
# Start timer for base_text_classifier
if config['calculateModerationTags']:
@@ -82,19 +80,19 @@ async def process_message(request: Request):
logging.error(f"Failed to compute base_text_classifier: {e}")
duration = time.time() - start_time
print(f"\t[ base_text_classifier duration: {duration:.4f} seconds ]")
-
+
conv_cache_manager = cache_manager
dummy_data = {} # Replace with actual processing logic
if config['calculateModerationTags'] or config['calculateInMessageNER']:
print(f"\t[ save to conv cache :: conversation {conversation_id}-{message_id} ]")
try:
dummy_data = {
- message_id :
+ message_id :
{
'user_name': user_name,
'user_id': user_id,
'role': role,
- 'timestamp': datetime.now(),
+ 'timestamp': datetime.now(),
'message': message
}}
except Exception as e:
@@ -114,22 +112,22 @@ async def process_message(request: Request):
print(f"\t[ save to conv cache :: conversation {conversation_id}-{message_id} ]")
# Saving an empty dictionary for the messag id
conv_cache_manager.save_to_cache(conversation_id, {
- message_id :
+ message_id :
{
'user_name': user_name,
'user_id': user_id,
'role': role,
- 'message': message,
- 'timestamp': datetime.now(),
+ 'message': message,
+ 'timestamp': datetime.now(),
}})
dummy_data = {
- message_id :
+ message_id :
{
'user_name': user_name,
'user_id': user_id,
'role': role,
- 'message': message,
- 'timestamp': datetime.now(),
+ 'message': message,
+ 'timestamp': datetime.now(),
}} # Replace with actual processing logic
return {"status": "fetched_user_analysis", "user_message": dummy_data}
diff --git a/topos/services/basic_analytics/token_classifiers.py b/topos/services/basic_analytics/token_classifiers.py
index d67889a..db2044c 100644
--- a/topos/services/basic_analytics/token_classifiers.py
+++ b/topos/services/basic_analytics/token_classifiers.py
@@ -2,19 +2,7 @@
import spacy
from spacy.tokens import Token
-import yaml
-
-from topos.utilities.utils import get_root_directory
-
-# Assuming the config.yaml is in ./topos/ relative to setup.py directory
-config_path = os.path.join(get_root_directory(), 'config.yaml')
-
-with open(config_path, 'r') as file:
- settings = yaml.safe_load(file)
-
-# Load the spacy model setting
-model_name = settings.get('active_spacy_model')
-
+import en_core_web_sm
def get_token_sent(token):
'''
@@ -23,10 +11,9 @@ def get_token_sent(token):
token_span = token.doc[token.i:token.i+1]
return token_span.sent
-
# Now you can use `model_name` in your code
-print(f"[ mem-loader :: Using spaCy model: {model_name} ]")
-nlp = spacy.load(model_name)
+print(f"[ mem-loader :: Using spaCy model: en_core_web_sm ]")
+nlp = en_core_web_sm.load()
Token.set_extension('sent', getter=get_token_sent, force = True)
def get_entity_dict(doc):
From 3500ca75da3d5464c51d35b902c7be0f2a89bb9d Mon Sep 17 00:00:00 2001
From: luc
Date: Tue, 8 Oct 2024 16:50:53 -0400
Subject: [PATCH 05/18] feat: Enhance Nix setup and update README
- Add 'topos' package for direct execution
- Create 'topos' dev shell with auto-start
- Update README with new Nix usage instructions
- Remove outdated Spacy model setup steps
---
README.md | 50 ++++++++++++++++++++++++++++++++++++--------------
flake.nix | 23 ++++++++++++++++++++++-
2 files changed, 58 insertions(+), 15 deletions(-)
diff --git a/README.md b/README.md
index 32a3b1b..99ffad8 100644
--- a/README.md
+++ b/README.md
@@ -5,7 +5,7 @@
Private AI Backend Service
----
+---
# Topos
A personal intelligence service, using your own computer to power private conversations with friends, family, and coworkers, collect/store your own private data, and use AI privately. Runs great with the [chat arena](https://github.com/jonnyjohnson1/chat-arena) app available on desktop and mobile.
@@ -19,17 +19,38 @@ You should be able to launch the Topos service anywhere from your machine.
## Install with nix (Recommended)
If nix is not installed:
-1. Install nix:
- macos/linux: `sh <(curl -L https://nixos.org/nix/install)`
- windows: `sh <(curl -L https://nixos.org/nix/install) --daemon`
+1. Install nix:
+ macos/linux: `sh <(curl -L https://nixos.org/nix/install)`
+ windows: `sh <(curl -L https://nixos.org/nix/install) --daemon`
Run the software with nix:
-1. Download this repo `git clone https://github.com/jonnyjohnson1/topos-cli`
-2. `cd topos-cli`
-3. build the backend service (only run the topos set --spacy trf line if it is your first time setting up)
+2. Download this repo `git clone https://github.com/jonnyjohnson1/topos-cli`
+3. `cd topos-cli`
+3. ~~build the backend service (only run the topos set --spacy trf line if it is your first time setting up)~~
+
+### Production
+First build topos binary (only usable on machines with nix installed)
+```
+nix build .#topos
+```
+run built binary
+```
+./result/bin/topos
+```
+
+### Dev Shell
+```
+nix develop
+run topos
+```
+
+### Dev Shell (auto start server)
+```
+nix develop .#topos
+```
+
+### Poetry Shell
```
-nix-shell
-topos set --spacy trf
-topos run
+nix develop .#poetry
```
## Install Instructions
@@ -49,8 +70,9 @@ brew services start neo4j
install the topos package with the command `just build`
### Step 2: Set the Spacy Model Size
-Set the size of the spacy model you wish to use on your system.
-There are 'small', 'med', 'large', and 'trf'.
+**BROKEN**
+Set the size of the spacy model you wish to use on your system.
+There are 'small', 'med', 'large', and 'trf'.
Use the tag like this.
`topos set --spacy small`
@@ -68,12 +90,12 @@ zrok is opensourced and free.
ngrok has a gated requests/month under its free tier, then requires you pay for it.
1. Be sure you have the `topos` server running already in another terminal.
-2. [Install zrok command](https://docs.zrok.io/docs/getting-started/?_gl=1*1yet1eb*_ga*MTQ1MDc2ODAyNi4xNzE3MDE3MTE3*_ga_V2KMEXWJ10*MTcxNzAxNzExNi4xLjAuMTcxNzAxNzExNi42MC4wLjA.*_gcl_au*NDk3NjM1MzEyLjE3MTcwMTcxMTc.#installing-the-zrok-command)
+2. [Install zrok command](https://docs.zrok.io/docs/getting-started/?_gl=1*1yet1eb*_ga*MTQ1MDc2ODAyNi4xNzE3MDE3MTE3*_ga_V2KMEXWJ10*MTcxNzAxNzExNi4xLjAuMTcxNzAxNzExNi42MC4wLjA.*_gcl_au*NDk3NjM1MzEyLjE3MTcwMTcxMTc.#installing-the-zrok-command)
3. `zrok enable `
4. `zrok status` should show you information
5. Route local path through zrok: `zrok share public http://0.0.0.0:13341`
This will take you to a new screen with an https:// at the top.
-Insert this url into the field under settings-> "Api Endpoints" -> "Custom API"
+Insert this url into the field under settings-> "Api Endpoints" -> "Custom API"
6. After you've insert it into the field, press the test button, and "hello world" should appear next to the button.
[ ] Enable permanent sharing of zrok url [docs](https://docs.zrok.io/docs/guides/docker-share/#permanent-public-share) (requires Docker)
diff --git a/flake.nix b/flake.nix
index 90b3639..abfa32a 100644
--- a/flake.nix
+++ b/flake.nix
@@ -48,7 +48,14 @@
in
{
- packages.default = pkgs.myapp;
+ packages = {
+ default = pkgs.myapp;
+ topos = pkgs.writeShellScriptBin "topos" ''
+ export PATH="${pkgs.myapp}/bin:$PATH"
+ ${pkgs.myapp}/bin/topos run
+ '';
+ };
+
devShells = {
# Shell for app dependencies.
#
@@ -63,6 +70,20 @@
'';
};
+ # Shell for topos
+ #
+ # nix develop .#topos
+ #
+ # Use this shell running topos
+ topos = pkgs.mkShell {
+ inputsFrom = [ pkgs.myapp ];
+
+ shellHook = ''
+ export PATH="${pkgs.myapp}/bin:$PATH"
+ topos run
+ '';
+ };
+
# Shell for poetry.
#
# nix develop .#poetry
From ab05857eeaaccc1d17231a2c5c1310fc3d08ebf4 Mon Sep 17 00:00:00 2001
From: luc
Date: Thu, 10 Oct 2024 20:39:46 -0400
Subject: [PATCH 06/18] fix: Restore spaCy model selection and config file
functionality
This commit addresses regressions from previous changes and improves configuration management:
- Reintroduce ability to change active spaCy model using `topos set --spacy trf`
- Move config.yaml to user's system config directory
- Add toposSetupHook in flake.nix to initialize config file if not present
- Update utilities to use TOPOS_CONFIG_PATH environment variable
- Modify OntologicalFeatureDetection and token_classifiers to load spaCy model from config
- Remove direct spaCy model download from spacy_loader.py
- Update get_config_path utility function
These changes restore the flexibility of spaCy model selection and improve
the overall configuration management of the application.
---
config.yaml | 1 +
flake.nix | 21 +++
poetry.lock | 123 +++++++++++++++++-
pyproject.toml | 3 +
topos/FC/ontological_feature_detection.py | 8 +-
topos/config.yaml | 1 -
topos/downloaders/spacy_loader.py | 16 +--
.../basic_analytics/token_classifiers.py | 17 ++-
topos/utilities/utils.py | 14 +-
9 files changed, 179 insertions(+), 25 deletions(-)
create mode 100644 config.yaml
delete mode 100644 topos/config.yaml
diff --git a/config.yaml b/config.yaml
new file mode 100644
index 0000000..8fde4f5
--- /dev/null
+++ b/config.yaml
@@ -0,0 +1 @@
+active_spacy_model: en_core_web_trf
diff --git a/flake.nix b/flake.nix
index abfa32a..43a7f40 100644
--- a/flake.nix
+++ b/flake.nix
@@ -46,6 +46,25 @@
);
};
+ configFile = pkgs.copyPathToStore ./config.yaml;
+ yq = pkgs.yq-go;
+
+ # Note: This only loads the settings from the repos config file
+ # if one is not already set in the user's .config directory.
+ toposSetupHook = ''
+ export TOPOS_CONFIG_PATH="$HOME/.config/topos/config.yaml"
+ mkdir -p "$(dirname "$TOPOS_CONFIG_PATH")"
+ if [ ! -f "$TOPOS_CONFIG_PATH" ]; then
+ echo "Creating new config file at $TOPOS_CONFIG_PATH"
+ echo "# Topos Configuration" > "$TOPOS_CONFIG_PATH"
+ ${yq}/bin/yq eval ${configFile} | while IFS= read -r line; do
+ echo "$line" >> "$TOPOS_CONFIG_PATH"
+ done
+ echo "Config file created at $TOPOS_CONFIG_PATH"
+ else
+ echo "Config file already exists at $TOPOS_CONFIG_PATH"
+ fi
+ '';
in
{
packages = {
@@ -67,6 +86,7 @@
shellHook = ''
export PATH="${pkgs.myapp}/bin:$PATH"
+ ${toposSetupHook}
'';
};
@@ -80,6 +100,7 @@
shellHook = ''
export PATH="${pkgs.myapp}/bin:$PATH"
+ ${toposSetupHook}
topos run
'';
};
diff --git a/poetry.lock b/poetry.lock
index e7cb313..cb68c04 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -474,6 +474,66 @@ tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.1
[package.extras]
toml = ["tomli"]
+[[package]]
+name = "curated-tokenizers"
+version = "0.0.9"
+description = "Lightweight piece tokenization library"
+optional = false
+python-versions = ">=3.6"
+files = [
+ {file = "curated-tokenizers-0.0.9.tar.gz", hash = "sha256:c93d47e54ab3528a6db2796eeb4bdce5d44e8226c671e42c2f23522ab1d0ce25"},
+ {file = "curated_tokenizers-0.0.9-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:19d3a2570dbbd08bbdae4c79d187fb150ea3b663c2f060bd1e4a050a1358cfd1"},
+ {file = "curated_tokenizers-0.0.9-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:799b8a9a1603b7d12683017409bf338bff925aa9806fbad0925ac550501afdf8"},
+ {file = "curated_tokenizers-0.0.9-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cfc4541c3e5738d74dbf859eb87c26112178b7a91be1d99a4bdced8182f4a73"},
+ {file = "curated_tokenizers-0.0.9-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a61acd1c66aea2198702b2a1418a6f3bf1241e3e302c1295a5878e892010642"},
+ {file = "curated_tokenizers-0.0.9-cp310-cp310-win_amd64.whl", hash = "sha256:00a9eff167481494f967ad0efc5c53164d460d4f40d816f6c132f69c8584a735"},
+ {file = "curated_tokenizers-0.0.9-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:899128d78177ca0ac668addc33b430020f737dd08bc6bf3753ff4d9ba0e41e75"},
+ {file = "curated_tokenizers-0.0.9-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2d1fef3e861df50bd6337364a87f447fbd0a6f01c095cec121b7404d15512138"},
+ {file = "curated_tokenizers-0.0.9-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76ff13e8c19f7cdb03441ca5ec9ce85f133da7fd5b9cc574d8d18af41ba8a50a"},
+ {file = "curated_tokenizers-0.0.9-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4079b1cb2220cb76deb271fa55f4867be3764f15e8fdb1bfc0a2041081570224"},
+ {file = "curated_tokenizers-0.0.9-cp311-cp311-win_amd64.whl", hash = "sha256:c661d09ffe7a4a9175f28d76034e01c87df9df6fedb998151abbf201f28f1aa0"},
+ {file = "curated_tokenizers-0.0.9-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e66aedfeae0c91f3f3e2980b17933b3d08f3fba6c8ba7057b9b05d596e8a0b27"},
+ {file = "curated_tokenizers-0.0.9-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2abbb571666a9c9b3a15f9df022e25ed1137e9fa8346788aaa747c00f940a3c6"},
+ {file = "curated_tokenizers-0.0.9-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:64b9991a9720a0ce8cc72d29791fd73f2cc2bef0241b002fd2a756ec8a629143"},
+ {file = "curated_tokenizers-0.0.9-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:35fb208a01f2b3f22172596915d229859549a2d76e484be976dd728b1ca3bdec"},
+ {file = "curated_tokenizers-0.0.9-cp312-cp312-win_amd64.whl", hash = "sha256:209d756694c7fb000a0b642016eb6e71c740cfce293adcbf3384aa2a1e701eb2"},
+ {file = "curated_tokenizers-0.0.9-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:04c44758348515dcd36b63cda6b3417ce028d150695d4d2cc16022fa5fab41dd"},
+ {file = "curated_tokenizers-0.0.9-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:66c29358530c3fa7a3e25cd05c0d6d5fefa4807e02265e0803503f9cc98ab8fc"},
+ {file = "curated_tokenizers-0.0.9-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:009fc6f30b2e38a7d2b8ad8be47816b78c53225c0985b97ecffc45806311b0f2"},
+ {file = "curated_tokenizers-0.0.9-cp36-cp36m-win_amd64.whl", hash = "sha256:df353261c37c5cc70c9e2a64adb88bcad847c171aa40ed2734fce70796fc2e1e"},
+ {file = "curated_tokenizers-0.0.9-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:6a94dcc4ef63bfd7e05bf11f3081fd472e241e70230207a6c8087fa778fff6a9"},
+ {file = "curated_tokenizers-0.0.9-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ad60ace40c52c2eb447b0ca0cb1234a57a704641eb95344bbaf6239d359dae9"},
+ {file = "curated_tokenizers-0.0.9-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d135d80ed31a7b10b192f8a26147e4b95854f11963f2f6fdd4c95d870c36fd11"},
+ {file = "curated_tokenizers-0.0.9-cp37-cp37m-win_amd64.whl", hash = "sha256:ed857074ea7362755ffaa027ecbcc047f78dc461bba3ba9e1dfa00f433b7e4b0"},
+ {file = "curated_tokenizers-0.0.9-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:28613e6a78e3456f5683e8f05f180a5aab0f18732282de8a236c87d66ff6e9d1"},
+ {file = "curated_tokenizers-0.0.9-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:1a5df6be9f1d1de9659b1fa34f2b53449c405097816cab5eb505a02d4767dd9e"},
+ {file = "curated_tokenizers-0.0.9-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffd667d08bf7c560afd02998aab8fcb5aaa16c9c69cf79d9e4ac684196a5b046"},
+ {file = "curated_tokenizers-0.0.9-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:41b830f3f6d493adfd0ef9fe793ee07ca2a47d6198f95b3dabe3bc9600af3cc4"},
+ {file = "curated_tokenizers-0.0.9-cp38-cp38-win_amd64.whl", hash = "sha256:a6aa7972ebcab5fa3486b4b0f991bd0d3ba7b7d2643508ac8bdaf8af5701d7ce"},
+ {file = "curated_tokenizers-0.0.9-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fafa009ee45d2599bf321c194a6a71c05372fb2d325640458d0d70320229b8ff"},
+ {file = "curated_tokenizers-0.0.9-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ce0b2369783a4645a3806d94ec42c523de4e2e23932200f452e05d3b2ea81481"},
+ {file = "curated_tokenizers-0.0.9-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:47a1219418654a26c1e039d08f37246d9917af4c0bf492441095f4a5c1072ee2"},
+ {file = "curated_tokenizers-0.0.9-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4307cab745488937b66948a1ce4bf86ad978c9fed29c93f2a72882b32d344689"},
+ {file = "curated_tokenizers-0.0.9-cp39-cp39-win_amd64.whl", hash = "sha256:81e90b046cb439466e7f3775619f87036a94c1d851146b45e5c63277b2d9ee07"},
+]
+
+[package.dependencies]
+regex = ">=2022"
+
+[[package]]
+name = "curated-transformers"
+version = "0.1.1"
+description = "Curated transformer models"
+optional = false
+python-versions = ">=3.6"
+files = [
+ {file = "curated-transformers-0.1.1.tar.gz", hash = "sha256:4671f03314df30efda2ec2b59bc7692ea34fcea44cb65382342c16684e8a2119"},
+ {file = "curated_transformers-0.1.1-py2.py3-none-any.whl", hash = "sha256:d716063d73d803c6925d2dab56fde9b9ab8e89e663c2c0587804944ba488ff01"},
+]
+
+[package.dependencies]
+torch = ">=1.12.0"
+
[[package]]
name = "cycler"
version = "0.12.1"
@@ -650,6 +710,34 @@ files = [
[package.extras]
dev = ["coverage", "coveralls", "pytest"]
+[[package]]
+name = "en_core_web_lg"
+version = "3.8.0"
+description = "English pipeline optimized for CPU. Components: tok2vec, tagger, parser, senter, ner, attribute_ruler, lemmatizer."
+optional = false
+python-versions = "*"
+files = [
+ {file = "en_core_web_lg-3.8.0-py3-none-any.whl", hash = "sha256:293e9547a655b25499198ab15a525b05b9407a75f10255e405e8c3854329ab63"},
+]
+
+[package.source]
+type = "url"
+url = "https://github.com/explosion/spacy-models/releases/download/en_core_web_lg-3.8.0/en_core_web_lg-3.8.0-py3-none-any.whl"
+
+[[package]]
+name = "en_core_web_md"
+version = "3.8.0"
+description = "English pipeline optimized for CPU. Components: tok2vec, tagger, parser, senter, ner, attribute_ruler, lemmatizer."
+optional = false
+python-versions = "*"
+files = [
+ {file = "en_core_web_md-3.8.0-py3-none-any.whl", hash = "sha256:5e6329fe3fecedb1d1a02c3ea2172ee0fede6cea6e4aefb6a02d832dba78a310"},
+]
+
+[package.source]
+type = "url"
+url = "https://github.com/explosion/spacy-models/releases/download/en_core_web_md-3.8.0/en_core_web_md-3.8.0-py3-none-any.whl"
+
[[package]]
name = "en_core_web_sm"
version = "3.8.0"
@@ -664,6 +752,23 @@ files = [
type = "url"
url = "https://github.com/explosion/spacy-models/releases/download/en_core_web_sm-3.8.0/en_core_web_sm-3.8.0-py3-none-any.whl"
+[[package]]
+name = "en_core_web_trf"
+version = "3.8.0"
+description = "English transformer pipeline (Transformer(name='roberta-base', piece_encoder='byte-bpe', stride=104, type='roberta', width=768, window=144, vocab_size=50265)). Components: transformer, tagger, parser, ner, attribute_ruler, lemmatizer."
+optional = false
+python-versions = "*"
+files = [
+ {file = "en_core_web_trf-3.8.0-py3-none-any.whl", hash = "sha256:272a31e9d8530d1e075351d30a462d7e80e31da23574f1b274e200f3fff35bf5"},
+]
+
+[package.dependencies]
+spacy-curated-transformers = ">=0.2.2,<1.0.0"
+
+[package.source]
+type = "url"
+url = "https://github.com/explosion/spacy-models/releases/download/en_core_web_trf-3.8.0/en_core_web_trf-3.8.0-py3-none-any.whl"
+
[[package]]
name = "exceptiongroup"
version = "1.2.1"
@@ -3336,6 +3441,22 @@ lookups = ["spacy-lookups-data (>=1.0.3,<1.1.0)"]
th = ["pythainlp (>=2.0)"]
transformers = ["spacy-transformers (>=1.1.2,<1.4.0)"]
+[[package]]
+name = "spacy-curated-transformers"
+version = "0.3.0"
+description = "Curated transformer models for spaCy pipelines"
+optional = false
+python-versions = ">=3.9"
+files = [
+ {file = "spacy_curated_transformers-0.3.0-py2.py3-none-any.whl", hash = "sha256:ddfd33e81b53ad798dac841ab022189f9543718ff874eda1081fce6ff93de377"},
+ {file = "spacy_curated_transformers-0.3.0.tar.gz", hash = "sha256:989a6bf2aa7becd1ac8c3be5f245cd489223d4e16e7218f6b69479c7e2689937"},
+]
+
+[package.dependencies]
+curated-tokenizers = ">=0.0.9,<0.1.0"
+curated-transformers = ">=0.1.0,<0.2.0"
+torch = ">=1.12.0"
+
[[package]]
name = "spacy-legacy"
version = "3.0.12"
@@ -4186,4 +4307,4 @@ test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools",
[metadata]
lock-version = "2.0"
python-versions = "^3.9"
-content-hash = "da4722ceb802967820706b6282c26e3b60a6905b0d0941af03572280ee8b3858"
+content-hash = "c84fc9b6664a4af5e8252628955097c476976b14019aa8c155169851761c4bf3"
diff --git a/pyproject.toml b/pyproject.toml
index 91a1ae8..98c362e 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -43,6 +43,9 @@ pystray = "0.19.5"
supabase = "^2.6.0"
psycopg2-binary = "^2.9.9"
en-core-web-sm = {url = "https://github.com/explosion/spacy-models/releases/download/en_core_web_sm-3.8.0/en_core_web_sm-3.8.0-py3-none-any.whl"}
+en-core-web-lg = {url = "https://github.com/explosion/spacy-models/releases/download/en_core_web_lg-3.8.0/en_core_web_lg-3.8.0-py3-none-any.whl"}
+en-core-web-md = {url = "https://github.com/explosion/spacy-models/releases/download/en_core_web_md-3.8.0/en_core_web_md-3.8.0-py3-none-any.whl"}
+en-core-web-trf = {url = "https://github.com/explosion/spacy-models/releases/download/en_core_web_trf-3.8.0/en_core_web_trf-3.8.0-py3-none-any.whl"}
[tool.poetry.group.dev.dependencies]
pytest = "^7.4.3"
pytest-asyncio = "^0.23.2"
diff --git a/topos/FC/ontological_feature_detection.py b/topos/FC/ontological_feature_detection.py
index 9c5e9ca..7e96e65 100644
--- a/topos/FC/ontological_feature_detection.py
+++ b/topos/FC/ontological_feature_detection.py
@@ -10,7 +10,7 @@
from datetime import datetime
from topos.services.database.app_state import AppState
-from topos.utilities.utils import get_root_directory
+from topos.utilities.utils import get_config_path
import os
import yaml
@@ -37,11 +37,11 @@ def __init__(self, neo4j_uri, neo4j_user, neo4j_password, neo4j_database_name, u
self.tokenizer = AutoTokenizer.from_pretrained("dslim/bert-base-NER")
self.model = AutoModelForTokenClassification.from_pretrained("dslim/bert-base-NER")
# Assuming the config.yaml is in ./topos/ relative to setup.py directory
- config_path = os.path.join(get_root_directory(), 'config.yaml')
+ config_path = get_config_path()
with open(config_path, 'r') as file:
settings = yaml.safe_load(file)
-
+
spacy_model_name = settings.get('active_spacy_model')
# Load SpaCy models
@@ -639,4 +639,4 @@ def get_connected_nodes(self, node_id, edges):
# paragraph = (
# "John, a software engineer from New York, bought a new laptop from Amazon on Saturday. "
# "He later met with his friend Alice, who is a data scientist at Google, for coffee at Starbucks. "
-# "They discussed a variety of topics including the recent advancements in arti
\ No newline at end of file
+# "They discussed a variety of topics including the recent advancements in arti
diff --git a/topos/config.yaml b/topos/config.yaml
deleted file mode 100644
index 3c7ab5d..0000000
--- a/topos/config.yaml
+++ /dev/null
@@ -1 +0,0 @@
-active_spacy_model: en_core_web_trf
\ No newline at end of file
diff --git a/topos/downloaders/spacy_loader.py b/topos/downloaders/spacy_loader.py
index 63a3f65..e29c1fc 100644
--- a/topos/downloaders/spacy_loader.py
+++ b/topos/downloaders/spacy_loader.py
@@ -1,8 +1,6 @@
-import subprocess
import yaml
import os
-from ..utilities.utils import get_python_command, get_root_directory
-
+from ..utilities.utils import get_config_path
def download_spacy_model(model_selection):
if model_selection == 'small':
@@ -15,17 +13,13 @@ def download_spacy_model(model_selection):
model_name = "en_core_web_trf"
else: #default
model_name = "en_core_web_sm"
-
- python_command = get_python_command()
-
+
# Define the path to the config.yaml file
- config_path = os.path.join(get_root_directory(), 'config.yaml')
+ config_path = get_config_path()
try:
- subprocess.run([python_command, '-m', 'spacy', 'download', model_name], check=True)
# Write updated settings to YAML file
with open(config_path, 'w') as file:
yaml.dump({'active_spacy_model': model_name}, file)
- print(f"Successfully downloaded '{model_name}' spaCy model.")
print(f"'{model_name}' set as active model.")
- except subprocess.CalledProcessError as e:
- print(f"Error downloading '{model_name}' spaCy model: {e}")
\ No newline at end of file
+ except Exception as e:
+ print(f"An error occurred setting config.yaml: {e}")
diff --git a/topos/services/basic_analytics/token_classifiers.py b/topos/services/basic_analytics/token_classifiers.py
index db2044c..e509dd8 100644
--- a/topos/services/basic_analytics/token_classifiers.py
+++ b/topos/services/basic_analytics/token_classifiers.py
@@ -2,7 +2,18 @@
import spacy
from spacy.tokens import Token
-import en_core_web_sm
+import yaml
+
+from topos.utilities.utils import get_config_path
+
+# Assuming the config.yaml is in ./topos/ relative to setup.py directory
+config_path = get_config_path()
+
+with open(config_path, 'r') as file:
+ settings = yaml.safe_load(file)
+
+# Load the spacy model setting
+model_name = settings.get('active_spacy_model')
def get_token_sent(token):
'''
@@ -12,8 +23,8 @@ def get_token_sent(token):
return token_span.sent
# Now you can use `model_name` in your code
-print(f"[ mem-loader :: Using spaCy model: en_core_web_sm ]")
-nlp = en_core_web_sm.load()
+print(f"[ mem-loader :: Using spaCy model: {model_name} ]")
+nlp = spacy.load(model_name)
Token.set_extension('sent', getter=get_token_sent, force = True)
def get_entity_dict(doc):
diff --git a/topos/utilities/utils.py b/topos/utilities/utils.py
index 5115c28..ff1be6a 100644
--- a/topos/utilities/utils.py
+++ b/topos/utilities/utils.py
@@ -3,7 +3,6 @@
import os
import shutil
-
def get_python_command():
if shutil.which("python"):
return "python"
@@ -12,21 +11,26 @@ def get_python_command():
else:
raise EnvironmentError("No Python interpreter found")
+def get_config_path():
+ config_path = os.getenv('TOPOS_CONFIG_PATH')
+ if not config_path:
+ raise EnvironmentError("TOPOS_CONFIG_PATH environment variable is not set")
+ return config_path
def get_root_directory():
# Get the current file's directory
current_file_directory = os.path.dirname(os.path.abspath(__file__))
-
+
# Find the first occurrence of "topos" from the right
topos_index = current_file_directory.rfind("topos")
-
+
if topos_index != -1:
# Get the path up to the first "topos" directory
base_topos_directory = current_file_directory[:topos_index + len("topos")]
return base_topos_directory
else:
raise ValueError("The 'topos' directory was not found in the path.")
-
+
def parse_json(data):
import json
return json.loads(data)
@@ -57,4 +61,4 @@ def generate_hex_code(n_digits):
return ''.join(random.choice('0123456789ABCDEF') for _ in range(n_digits))
def generate_deci_code(n_digits):
- return ''.join(random.choice('0123456789') for _ in range(n_digits))
\ No newline at end of file
+ return ''.join(random.choice('0123456789') for _ in range(n_digits))
From 446e4c77bdf5e2287fc41f61728c82e14bffa333 Mon Sep 17 00:00:00 2001
From: luc
Date: Wed, 9 Oct 2024 20:21:14 -0400
Subject: [PATCH 07/18] feat: Integrate PostgreSQL setup in Nix development
environment
- Add PostgreSQL as a package dependency in development shells
- Create a PostgreSQL setup hook that:
- Initializes the database directory
- Starts the PostgreSQL server
- Creates necessary tables (conversation_cache, entities, relations)
- Sets up user roles and permissions
- Load environment variables from .env_dev file
- Incorporate PostgreSQL setup in both default and topos development shells
These changes streamline the development process by automatically setting up
and configuring PostgreSQL when entering the Nix development environment,
ensuring a consistent database setup across all developers.
---
flake.nix | 72 +++++++++++++++++++++++++++++++++++++++++++++++++++++--
1 file changed, 70 insertions(+), 2 deletions(-)
diff --git a/flake.nix b/flake.nix
index 43a7f40..b5f7f66 100644
--- a/flake.nix
+++ b/flake.nix
@@ -46,6 +46,7 @@
);
};
+ envFile = pkgs.writeText "env_dev" (builtins.readFile ./.env_dev);
configFile = pkgs.copyPathToStore ./config.yaml;
yq = pkgs.yq-go;
@@ -65,6 +66,71 @@
echo "Config file already exists at $TOPOS_CONFIG_PATH"
fi
'';
+
+ postgresSetupHook = ''
+ # SETUP POSTGRES SERVER
+ echo "Loading environment variables from Nix store"
+ export $(cat ${envFile} | xargs)
+
+ # Define PGDATA and LOGFILE based on environment variables
+ export PGDATA=$(pwd)/pgdata
+ LOGFILE=$(pwd)/pgdata/postgresql.log
+
+ echo "Initializing PostgreSQL data directory at $PGDATA"
+
+ echo "PGDATA: $PGDATA"
+ if [ ! -d "$PGDATA" ]; then
+ initdb -D "$PGDATA" | tee -a $LOGFILE
+ fi
+
+ echo "Stopping any existing PostgreSQL server..."
+ pg_ctl -D "$PGDATA" stop || echo "No existing server to stop."
+
+ echo "Starting PostgreSQL server..."
+ pg_ctl -D "$PGDATA" -l $LOGFILE start
+
+ # Wait for PostgreSQL to start
+ sleep 2
+
+ # Set up the test database, role, and tables
+ psql -d $POSTGRES_DB <
Date: Wed, 9 Oct 2024 23:26:39 -0400
Subject: [PATCH 08/18] fix: Improve PostgreSQL setup in Nix development
environment
- Ensure PGDATA directory exists before initialization
- Use pg_isready to wait for PostgreSQL to start properly
- Create the database if it doesn't exist
- Remove redundant user creation (assuming user already exists)
- Improve error handling in SQL execution
- Adjust log file location and server stop/start commands
- Clean up environment variable loading
These changes ensure a more robust and reliable PostgreSQL setup
process when entering the Nix development environment.
---
.env_dev | 4 ++--
flake.nix | 39 ++++++++++++++++++++++++++++++---------
2 files changed, 32 insertions(+), 11 deletions(-)
diff --git a/.env_dev b/.env_dev
index 4b58f03..c1268cb 100644
--- a/.env_dev
+++ b/.env_dev
@@ -9,7 +9,7 @@ ONE_API_API_KEY="sk-oneapi.local123"
SUPABASE_URL=
SUPABASE_KEY=
POSTGRES_DB=test_topos_db
-POSTGRES_USER=your_username
+POSTGRES_USER=username
POSTGRES_PASSWORD=your_password
POSTGRES_HOST=localhost
-POSTGRES_PORT=5432
\ No newline at end of file
+POSTGRES_PORT=5432
diff --git a/flake.nix b/flake.nix
index b5f7f66..0bfd29f 100644
--- a/flake.nix
+++ b/flake.nix
@@ -70,30 +70,52 @@
postgresSetupHook = ''
# SETUP POSTGRES SERVER
echo "Loading environment variables from Nix store"
- export $(cat ${envFile} | xargs)
+ export $(cat ${envFile} | xargs)
# Define PGDATA and LOGFILE based on environment variables
export PGDATA=$(pwd)/pgdata
- LOGFILE=$(pwd)/pgdata/postgresql.log
+ LOGFILE=$PGDATA/postgresql.log
echo "Initializing PostgreSQL data directory at $PGDATA"
echo "PGDATA: $PGDATA"
if [ ! -d "$PGDATA" ]; then
- initdb -D "$PGDATA" | tee -a $LOGFILE
+ mkdir -p "$PGDATA"
+ initdb -D "$PGDATA"
fi
echo "Stopping any existing PostgreSQL server..."
- pg_ctl -D "$PGDATA" stop || echo "No existing server to stop."
+ pg_ctl -D "$PGDATA" stop -s -m fast || echo "No existing server to stop."
echo "Starting PostgreSQL server..."
- pg_ctl -D "$PGDATA" -l $LOGFILE start
+ pg_ctl -D "$PGDATA" -l $LOGFILE start -w
# Wait for PostgreSQL to start
- sleep 2
+ for i in {1..10}; do
+ if pg_isready -q; then
+ break
+ fi
+ echo "Waiting for PostgreSQL to start..."
+ sleep 1
+ done
+
+ if ! pg_isready -q; then
+ echo "Failed to start PostgreSQL. Check the logs at $LOGFILE"
+ exit 1
+ fi
+
+ # Create the database if it doesn't exist
+ if ! psql -lqt | cut -d \| -f 1 | grep -qw "$POSTGRES_DB"; then
+ createdb "$POSTGRES_DB"
+ fi
+
+ # Create the user if they don't exist
+ if ! psql -tAc "SELECT 1 FROM pg_roles WHERE rolname='$POSTGRES_USER'" | grep -q 1; then
+ createuser -s "$POSTGRES_USER"
+ fi
# Set up the test database, role, and tables
- psql -d $POSTGRES_DB <
Date: Sat, 12 Oct 2024 11:58:14 -0400
Subject: [PATCH 09/18] feat: Add initial Kafka setup in Nix development
environment (WIP)
- Add kafkaSetupHook to initialize and start Kafka in Kraft mode
- Configure Kafka server properties for a single-node setup
- Initialize Kafka storage and start the Kafka server
- Create a 'chat_topic' for initial testing
Note: This is a work in progress and may require further refinement and testing.
Future commits will focus on integrating this setup with the existing
development environment and ensuring proper error handling and cleanup.
---
flake.nix | 42 ++++++++++++++++++++++++++++++++++++++++++
1 file changed, 42 insertions(+)
diff --git a/flake.nix b/flake.nix
index 0bfd29f..513b3ef 100644
--- a/flake.nix
+++ b/flake.nix
@@ -152,6 +152,48 @@
# FINISH POSTGRES SERVER
'';
+ kafkaSetupHook = ''
+ echo "Starting Kafka in Kraft mode..."
+
+ # Set up necessary environment variables
+ export KAFKA_HEAP_OPTS="-Xmx512M -Xms512M"
+ export KAFKA_KRAFT_MODE=true
+ echo ${pkgs.apacheKafka}
+
+ # Prepare a default config for Kraft mode
+ if [ ! -f ./kafka.properties ]; then
+ echo "Initializing Kafka Kraft mode..."
+
+ # Server 1 Kraft
+ cp ${pkgs.apacheKafka}/config/kraft/server.properties ./server-1.properties
+ sudo sed -i '57!s/PLAINTEXT/MQ/g' server-1.properties
+ sudo sed -i '30s/.*/controller.quorum.voters=1@localhost:9091/' server-1.properties
+ sudo sed -i '78s|.*|log.dirs=/tmp/kraft-combined-logs/server-1|' server-1.properties
+ sudo sed -i '27s|.*|node.id=1|' server-1.properties
+ sudo sed -i '42s|.*|listeners=MQ://:9092,CONTROLLER://:9091|' server-1.properties
+ sudo sed -i '92s|.*|offsets.topic.replication.factor=1|' server-1.properties
+ sudo sed -i '57s|.*|listener.security.protocol.map=CONTROLLER:PLAINTEXT,MQ:PLAINTEXT,SSL:SSL,SASL_PLAINTEXT:SASL_PLAINTEXT,SASL_SSL:SASL_SSL|' server-1.properties
+
+ fi
+
+ # Step 1
+ KAFKA_CLUSTER_ID="$(${pkgs.apacheKafka}/bin/kafka-storage.sh random-uuid)"
+
+ # Step 2
+ ${pkgs.apacheKafka}/bin/kafka-storage.sh format -t $KAFKA_CLUSTER_ID -c ./server-1.properties
+
+ # Step 3
+ ${pkgs.apacheKafka}/bin/kafka-server-start.sh ./server-1.properties &
+
+ # Step 4
+ echo "Kafka environment is ready to use and running in detached terminals."
+
+ # Step 5
+ ${pkgs.apacheKafka}/bin/kafka-topics.sh --create --topic chat_topic --bootstrap-server localhost:9092 --partitions 1 --replication-factor 1
+
+ sleep 3
+ '';
+
in
{
packages = {
From 9e5272db0a38a085cd66dbeb9f1746a8373e7c6c Mon Sep 17 00:00:00 2001
From: luc
Date: Sun, 13 Oct 2024 22:09:35 -0400
Subject: [PATCH 10/18] refactor: Replace flake-utils with flake-parts and
improve flake structure
- Replace flake-utils with flake-parts for improved flake composition
- Restructure flake.nix for better organization and readability
- Rename 'myapp' to 'toposPoetryEnv' for clarity
- Separate Poetry environment and application building
- Introduce a common 'overrides' configuration for Poetry packages
- Update package definitions and development shells
- Adjust PATH exports in shell environments
- Streamline the default package to use the topos script directly
These changes aim to enhance the maintainability and flexibility of our
Nix setup while preserving existing functionality. The use of flake-parts
allows for a more modular and extensible configuration.
---
flake.lock | 36 +++++++++++++++++++++++++----
flake.nix | 67 ++++++++++++++++++++++++++++++------------------------
2 files changed, 69 insertions(+), 34 deletions(-)
diff --git a/flake.lock b/flake.lock
index a5eb791..1ab5a19 100644
--- a/flake.lock
+++ b/flake.lock
@@ -1,5 +1,23 @@
{
"nodes": {
+ "flake-parts": {
+ "inputs": {
+ "nixpkgs-lib": "nixpkgs-lib"
+ },
+ "locked": {
+ "lastModified": 1727826117,
+ "narHash": "sha256-K5ZLCyfO/Zj9mPFldf3iwS6oZStJcU4tSpiXTMYaaL0=",
+ "owner": "hercules-ci",
+ "repo": "flake-parts",
+ "rev": "3d04084d54bedc3d6b8b736c70ef449225c361b1",
+ "type": "github"
+ },
+ "original": {
+ "owner": "hercules-ci",
+ "repo": "flake-parts",
+ "type": "github"
+ }
+ },
"flake-utils": {
"inputs": {
"systems": "systems"
@@ -55,11 +73,21 @@
"type": "github"
}
},
+ "nixpkgs-lib": {
+ "locked": {
+ "lastModified": 1727825735,
+ "narHash": "sha256-0xHYkMkeLVQAMa7gvkddbPqpxph+hDzdu1XdGPJR+Os=",
+ "type": "tarball",
+ "url": "https://github.com/NixOS/nixpkgs/archive/fb192fec7cc7a4c26d51779e9bab07ce6fa5597a.tar.gz"
+ },
+ "original": {
+ "type": "tarball",
+ "url": "https://github.com/NixOS/nixpkgs/archive/fb192fec7cc7a4c26d51779e9bab07ce6fa5597a.tar.gz"
+ }
+ },
"poetry2nix": {
"inputs": {
- "flake-utils": [
- "flake-utils"
- ],
+ "flake-utils": "flake-utils",
"nix-github-actions": "nix-github-actions",
"nixpkgs": [
"nixpkgs"
@@ -83,7 +111,7 @@
},
"root": {
"inputs": {
- "flake-utils": "flake-utils",
+ "flake-parts": "flake-parts",
"nixpkgs": "nixpkgs",
"poetry2nix": "poetry2nix"
}
diff --git a/flake.nix b/flake.nix
index 513b3ef..c970d8f 100644
--- a/flake.nix
+++ b/flake.nix
@@ -2,24 +2,26 @@
description = "topos";
inputs = {
- flake-utils.url = "github:numtide/flake-utils";
+ flake-parts.url = "github:hercules-ci/flake-parts";
nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable";
poetry2nix = {
url = "github:nix-community/poetry2nix";
inputs.nixpkgs.follows = "nixpkgs";
- inputs.flake-utils.follows = "flake-utils";
};
};
- outputs = { self, nixpkgs, flake-utils, poetry2nix }:
- flake-utils.lib.eachSystem ["aarch64-darwin"] (system:
+ outputs = { self, nixpkgs, flake-parts, poetry2nix }@inputs:
+ flake-parts.lib.mkFlake { inherit inputs; } {
+ imports = [ ];
+ systems = [ "x86_64-linux" "aarch64-darwin" ];
+ perSystem = { pkgs, system, ... }:
let
pkgs = import nixpkgs {
inherit system;
overlays = [
- poetry2nix.overlays.default
+ inputs.poetry2nix.overlays.default
(final: prev: {
- myapp = final.callPackage myapp { };
+ toposPoetryEnv = final.callPackage toposPoetryEnv { };
pythonPackagesExtensions = prev.pythonPackagesExtensions ++ [
(python-final: python-prev: {
pystray = python-final.callPackage ./overlays/pystray/default.nix { };
@@ -28,22 +30,23 @@
})
];
};
-
# see https://github.com/nix-community/poetry2nix/tree/master#api for more functions and examples.
- myapp = { poetry2nix, lib }: poetry2nix.mkPoetryApplication {
+ #TODO: Figure out how to add setuptools to all the packages which need it, this is currently not working as expected.
+ overrides = pkgs.poetry2nix.overrides.withDefaults (final: super:
+ pkgs.lib.mapAttrs
+ (attr: systems: super.${attr}.overridePythonAttrs
+ (old: {
+ nativeBuildInputs = (old.nativeBuildInputs or [ ]) ++ map (a: final.${a}) systems;
+ }))
+ {
+ # https://github.com/nix-community/poetry2nix/blob/master/docs/edgecases.md#modulenotfounderror-no-module-named-packagename
+ package = [ "setuptools" ];
+ }
+ );
+ toposPoetryEnv = pkgs.poetry2nix.mkPoetryEnv {
projectDir = self;
preferWheels = true;
- overrides = poetry2nix.overrides.withDefaults (final: super:
- lib.mapAttrs
- (attr: systems: super.${attr}.overridePythonAttrs
- (old: {
- nativeBuildInputs = (old.nativeBuildInputs or [ ]) ++ map (a: final.${a}) systems;
- }))
- {
- # https://github.com/nix-community/poetry2nix/blob/master/docs/edgecases.md#modulenotfounderror-no-module-named-packagename
- package = [ "setuptools" ];
- }
- );
+ inherit overrides;
};
envFile = pkgs.writeText "env_dev" (builtins.readFile ./.env_dev);
@@ -196,11 +199,15 @@
in
{
- packages = {
- default = pkgs.myapp;
- topos = pkgs.writeShellScriptBin "topos" ''
- export PATH="${pkgs.myapp}/bin:$PATH"
- ${pkgs.myapp}/bin/topos run
+ packages = rec {
+ topos = pkgs.poetry2nix.mkPoetryApplication {
+ projectDir = self;
+ preferWheels = true;
+ inherit overrides;
+ };
+ default = pkgs.writeShellScriptBin "topos" ''
+ ${toposSetupHook}
+ ${topos}/bin/topos "$@"
'';
};
@@ -211,10 +218,10 @@
#
# Use this shell for developing your app.
default = pkgs.mkShell {
- inputsFrom = [ pkgs.myapp ];
+ inputsFrom = [ toposPoetryEnv ];
packages = [ pkgs.postgresql ];
shellHook = ''
- export PATH="${pkgs.myapp}/bin:$PATH"
+ export PATH="${toposPoetryEnv}/bin:$PATH"
${toposSetupHook}
${postgresSetupHook}
'';
@@ -226,10 +233,10 @@
#
# Use this shell running topos
topos = pkgs.mkShell {
- inputsFrom = [ pkgs.myapp ];
+ inputsFrom = [ toposPoetryEnv ];
packages = [ pkgs.postgresql ];
shellHook = ''
- export PATH="${pkgs.myapp}/bin:$PATH"
+ export PATH="${toposPoetryEnv}/bin:$PATH"
${toposSetupHook}
${postgresSetupHook}
topos run
@@ -246,6 +253,6 @@
};
};
legacyPackages = pkgs;
+ };
+};
}
- );
-}
From dcabe4bd9727e396d9bc7e4c11f58074b6c1e02c Mon Sep 17 00:00:00 2001
From: luc
Date: Sun, 13 Oct 2024 22:26:07 -0400
Subject: [PATCH 11/18] feat: Integrate process-compose and services-flake for
improved service management
- Add process-compose-flake and services-flake as new inputs
- Implement a new process-compose configuration for managing services
- Integrate Ollama LLM service with configurable models
- Refactor PostgreSQL setup to use services-flake
- Add a new topos-service.nix for defining the Topos service
- Update flake.nix to use the new service configurations
- Modify .env_dev to use 127.0.0.1 instead of localhost for POSTGRES_HOST
- Simplify devShells configuration
- Remove manual setup hooks for PostgreSQL and Kafka
These changes aim to provide a more robust and manageable service
configuration, leveraging process-compose for better service orchestration
and dependency management.
---
.env_dev | 2 +-
flake.lock | 34 +++-
flake.nix | 442 +++++++++++++++++++++++-----------------------
topos-service.nix | 28 +++
4 files changed, 280 insertions(+), 226 deletions(-)
create mode 100644 topos-service.nix
diff --git a/.env_dev b/.env_dev
index c1268cb..75ae375 100644
--- a/.env_dev
+++ b/.env_dev
@@ -11,5 +11,5 @@ SUPABASE_KEY=
POSTGRES_DB=test_topos_db
POSTGRES_USER=username
POSTGRES_PASSWORD=your_password
-POSTGRES_HOST=localhost
+POSTGRES_HOST=127.0.0.1
POSTGRES_PORT=5432
diff --git a/flake.lock b/flake.lock
index 1ab5a19..9567a9d 100644
--- a/flake.lock
+++ b/flake.lock
@@ -109,11 +109,43 @@
"type": "github"
}
},
+ "process-compose-flake": {
+ "locked": {
+ "lastModified": 1728868941,
+ "narHash": "sha256-yEMzxZfy+EE9gSqn++SyZeAVHXYupFT8Wyf99Z/CXXU=",
+ "owner": "Platonic-Systems",
+ "repo": "process-compose-flake",
+ "rev": "29301aec92d73c9b075fcfd06a6fb18665bfe6b5",
+ "type": "github"
+ },
+ "original": {
+ "owner": "Platonic-Systems",
+ "repo": "process-compose-flake",
+ "type": "github"
+ }
+ },
"root": {
"inputs": {
"flake-parts": "flake-parts",
"nixpkgs": "nixpkgs",
- "poetry2nix": "poetry2nix"
+ "poetry2nix": "poetry2nix",
+ "process-compose-flake": "process-compose-flake",
+ "services-flake": "services-flake"
+ }
+ },
+ "services-flake": {
+ "locked": {
+ "lastModified": 1728811751,
+ "narHash": "sha256-IrwycNtt6jxJGCi+QJ8Bbzt9flg0vNeGLAR0KBbj4a8=",
+ "owner": "juspay",
+ "repo": "services-flake",
+ "rev": "e9f663036f3b1b1a12b0f136628ef93a8be92443",
+ "type": "github"
+ },
+ "original": {
+ "owner": "juspay",
+ "repo": "services-flake",
+ "type": "github"
}
},
"systems": {
diff --git a/flake.nix b/flake.nix
index c970d8f..543c318 100644
--- a/flake.nix
+++ b/flake.nix
@@ -8,251 +8,245 @@
url = "github:nix-community/poetry2nix";
inputs.nixpkgs.follows = "nixpkgs";
};
+ process-compose-flake.url = "github:Platonic-Systems/process-compose-flake";
+ services-flake.url = "github:juspay/services-flake";
};
- outputs = { self, nixpkgs, flake-parts, poetry2nix }@inputs:
+ outputs = { self, nixpkgs, flake-parts, poetry2nix, process-compose-flake, services-flake }@inputs:
flake-parts.lib.mkFlake { inherit inputs; } {
- imports = [ ];
- systems = [ "x86_64-linux" "aarch64-darwin" ];
- perSystem = { pkgs, system, ... }:
- let
- pkgs = import nixpkgs {
- inherit system;
- overlays = [
- inputs.poetry2nix.overlays.default
- (final: prev: {
- toposPoetryEnv = final.callPackage toposPoetryEnv { };
- pythonPackagesExtensions = prev.pythonPackagesExtensions ++ [
- (python-final: python-prev: {
- pystray = python-final.callPackage ./overlays/pystray/default.nix { };
- })
+ imports = [ inputs.process-compose-flake.flakeModule ];
+ systems = [ "x86_64-linux" "aarch64-darwin" ];
+ perSystem = { self', pkgs, system, lib, ... }:
+ let
+ pkgs = import nixpkgs {
+ inherit system;
+ overlays = [
+ inputs.poetry2nix.overlays.default
+ (final: prev: {
+ toposPoetryEnv = final.callPackage toposPoetryEnv { };
+ pythonPackagesExtensions = prev.pythonPackagesExtensions ++ [
+ (python-final: python-prev: {
+ pystray = python-final.callPackage ./overlays/pystray/default.nix { };
+ })
+ ];
+ })
];
- })
- ];
- };
- # see https://github.com/nix-community/poetry2nix/tree/master#api for more functions and examples.
- #TODO: Figure out how to add setuptools to all the packages which need it, this is currently not working as expected.
- overrides = pkgs.poetry2nix.overrides.withDefaults (final: super:
- pkgs.lib.mapAttrs
- (attr: systems: super.${attr}.overridePythonAttrs
- (old: {
- nativeBuildInputs = (old.nativeBuildInputs or [ ]) ++ map (a: final.${a}) systems;
- }))
- {
- # https://github.com/nix-community/poetry2nix/blob/master/docs/edgecases.md#modulenotfounderror-no-module-named-packagename
- package = [ "setuptools" ];
- }
- );
- toposPoetryEnv = pkgs.poetry2nix.mkPoetryEnv {
- projectDir = self;
- preferWheels = true;
- inherit overrides;
- };
-
- envFile = pkgs.writeText "env_dev" (builtins.readFile ./.env_dev);
- configFile = pkgs.copyPathToStore ./config.yaml;
- yq = pkgs.yq-go;
-
- # Note: This only loads the settings from the repos config file
- # if one is not already set in the user's .config directory.
- toposSetupHook = ''
- export TOPOS_CONFIG_PATH="$HOME/.config/topos/config.yaml"
- mkdir -p "$(dirname "$TOPOS_CONFIG_PATH")"
- if [ ! -f "$TOPOS_CONFIG_PATH" ]; then
- echo "Creating new config file at $TOPOS_CONFIG_PATH"
- echo "# Topos Configuration" > "$TOPOS_CONFIG_PATH"
- ${yq}/bin/yq eval ${configFile} | while IFS= read -r line; do
- echo "$line" >> "$TOPOS_CONFIG_PATH"
- done
- echo "Config file created at $TOPOS_CONFIG_PATH"
- else
- echo "Config file already exists at $TOPOS_CONFIG_PATH"
- fi
- '';
-
- postgresSetupHook = ''
- # SETUP POSTGRES SERVER
- echo "Loading environment variables from Nix store"
- export $(cat ${envFile} | xargs)
-
- # Define PGDATA and LOGFILE based on environment variables
- export PGDATA=$(pwd)/pgdata
- LOGFILE=$PGDATA/postgresql.log
-
- echo "Initializing PostgreSQL data directory at $PGDATA"
-
- echo "PGDATA: $PGDATA"
- if [ ! -d "$PGDATA" ]; then
- mkdir -p "$PGDATA"
- initdb -D "$PGDATA"
- fi
-
- echo "Stopping any existing PostgreSQL server..."
- pg_ctl -D "$PGDATA" stop -s -m fast || echo "No existing server to stop."
+ };
- echo "Starting PostgreSQL server..."
- pg_ctl -D "$PGDATA" -l $LOGFILE start -w
+ # see https://github.com/nix-community/poetry2nix/tree/master#api for more functions and examples.
+ #TODO: Figure out how to add setuptools to all the packages which need it, this is currently not working as expected.
+ overrides = pkgs.poetry2nix.overrides.withDefaults (final: super:
+ pkgs.lib.mapAttrs
+ (attr: systems: super.${attr}.overridePythonAttrs
+ (old: {
+ nativeBuildInputs = (old.nativeBuildInputs or [ ]) ++ map (a: final.${a}) systems;
+ }))
+ {
+ # https://github.com/nix-community/poetry2nix/blob/master/docs/edgecases.md#modulenotfounderror-no-module-named-packagename
+ package = [ "setuptools" ];
+ }
+ );
+ toposPoetryEnv = pkgs.poetry2nix.mkPoetryEnv {
+ projectDir = self;
+ preferWheels = true;
+ inherit overrides;
+ };
- # Wait for PostgreSQL to start
- for i in {1..10}; do
- if pg_isready -q; then
- break
+ envFile = pkgs.writeText "env_dev" (builtins.readFile ./.env_dev);
+ parseEnvFile = envFile:
+ let
+ content = builtins.readFile envFile;
+ lines = lib.filter (l: l != "" && !lib.hasPrefix "#" l) (lib.splitString "\n" content);
+ parseLine = l:
+ let
+ parts = lib.splitString "=" l;
+ in
+ { name = lib.head parts; value = lib.concatStringsSep "=" (lib.tail parts); };
+ in
+ builtins.listToAttrs (map parseLine lines);
+ envVars = parseEnvFile ./.env_dev;
+
+ configFile = pkgs.copyPathToStore ./config.yaml;
+ yq = pkgs.yq-go;
+
+ # Note: This only loads the settings from the repos config file
+ # if one is not already set in the user's .config directory.
+ toposSetupHook = ''
+ export $(cat ${envFile} | xargs)
+ export TOPOS_CONFIG_PATH="$HOME/.config/topos/config.yaml"
+ mkdir -p "$(dirname "$TOPOS_CONFIG_PATH")"
+ if [ ! -f "$TOPOS_CONFIG_PATH" ]; then
+ echo "Creating new config file at $TOPOS_CONFIG_PATH"
+ echo "# Topos Configuration" > "$TOPOS_CONFIG_PATH"
+ ${yq}/bin/yq eval ${configFile} | while IFS= read -r line; do
+ echo "$line" >> "$TOPOS_CONFIG_PATH"
+ done
+ echo "Config file created at $TOPOS_CONFIG_PATH"
+ else
+ echo "Config file already exists at $TOPOS_CONFIG_PATH"
fi
- echo "Waiting for PostgreSQL to start..."
- sleep 1
- done
-
- if ! pg_isready -q; then
- echo "Failed to start PostgreSQL. Check the logs at $LOGFILE"
- exit 1
- fi
-
- # Create the database if it doesn't exist
- if ! psql -lqt | cut -d \| -f 1 | grep -qw "$POSTGRES_DB"; then
- createdb "$POSTGRES_DB"
- fi
-
- # Create the user if they don't exist
- if ! psql -tAc "SELECT 1 FROM pg_roles WHERE rolname='$POSTGRES_USER'" | grep -q 1; then
- createuser -s "$POSTGRES_USER"
- fi
-
- # Set up the test database, role, and tables
- psql -v ON_ERROR_STOP=1 -d $POSTGRES_DB <
Date: Tue, 15 Oct 2024 07:46:38 -0400
Subject: [PATCH 12/18] feat: Enhance Kafka setup and improve service
dependencies
- Replace manual Kafka setup with services-flake's apache-kafka service
- Add Zookeeper service as a dependency for Kafka
- Introduce kafkaPreStartup script to create the chat_topic
- Update PostgreSQL service name from "pg1" to "pg" for consistency
- Refine service dependencies:
- Kafka now depends on Zookeeper
- Topos depends on both PostgreSQL and Kafka
- Remove redundant kafkaSetupHook
- Adjust Kafka settings for simplified configuration
- Include JVM options for Kafka to set heap size
These changes streamline the service setup process and ensure proper
initialization order, improving the overall reliability and maintainability
of the development environment.
---
flake.nix | 90 ++++++++++++++++++++++++++++---------------------------
1 file changed, 46 insertions(+), 44 deletions(-)
diff --git a/flake.nix b/flake.nix
index 543c318..d8c95ab 100644
--- a/flake.nix
+++ b/flake.nix
@@ -69,6 +69,11 @@
configFile = pkgs.copyPathToStore ./config.yaml;
yq = pkgs.yq-go;
+ kafkaPreStartup = ''
+ echo "Kafka is ready. Creating topic..."
+ ${pkgs.apacheKafka}/bin/kafka-topics.sh --create --topic chat_topic --bootstrap-server localhost:9092 --partitions 1 --replication-factor 1 --if-not-exists
+ '';
+
# Note: This only loads the settings from the repos config file
# if one is not already set in the user's .config directory.
toposSetupHook = ''
@@ -85,48 +90,7 @@
else
echo "Config file already exists at $TOPOS_CONFIG_PATH"
fi
- '';
-
- kafkaSetupHook = ''
- echo "Starting Kafka in Kraft mode..."
-
- # Set up necessary environment variables
- export KAFKA_HEAP_OPTS="-Xmx512M -Xms512M"
- export KAFKA_KRAFT_MODE=true
- echo ${pkgs.apacheKafka}
-
- # Prepare a default config for Kraft mode
- if [ ! -f ./kafka.properties ]; then
- echo "Initializing Kafka Kraft mode..."
-
- # Server 1 Kraft
- cp ${pkgs.apacheKafka}/config/kraft/server.properties ./server-1.properties
- sudo sed -i '57!s/PLAINTEXT/MQ/g' server-1.properties
- sudo sed -i '30s/.*/controller.quorum.voters=1@localhost:9091/' server-1.properties
- sudo sed -i '78s|.*|log.dirs=/tmp/kraft-combined-logs/server-1|' server-1.properties
- sudo sed -i '27s|.*|node.id=1|' server-1.properties
- sudo sed -i '42s|.*|listeners=MQ://:9092,CONTROLLER://:9091|' server-1.properties
- sudo sed -i '92s|.*|offsets.topic.replication.factor=1|' server-1.properties
- sudo sed -i '57s|.*|listener.security.protocol.map=CONTROLLER:PLAINTEXT,MQ:PLAINTEXT,SSL:SSL,SASL_PLAINTEXT:SASL_PLAINTEXT,SASL_SSL:SASL_SSL|' server-1.properties
-
- fi
-
- # Step 1
- KAFKA_CLUSTER_ID="$(${pkgs.apacheKafka}/bin/kafka-storage.sh random-uuid)"
-
- # Step 2
- ${pkgs.apacheKafka}/bin/kafka-storage.sh format -t $KAFKA_CLUSTER_ID -c ./server-1.properties
-
- # Step 3
- ${pkgs.apacheKafka}/bin/kafka-server-start.sh ./server-1.properties &
-
- # Step 4
- echo "Kafka environment is ready to use and running in detached terminals."
-
- # Step 5
- ${pkgs.apacheKafka}/bin/kafka-topics.sh --create --topic chat_topic --bootstrap-server localhost:9092 --partitions 1 --replication-factor 1
-
- sleep 3
+ ${kafkaPreStartup}
'';
in
@@ -156,7 +120,7 @@
models = [ "phi3" ];
};
- postgres."pg1" = {
+ postgres."pg" = {
enable = true;
package = pkgs.postgresql_16.withPackages (p: [ p.pgvector ]);
port = 5432;
@@ -203,10 +167,48 @@
};
};
+ zookeeper."zookeeper".enable = true;
+
+ apache-kafka."kafka" = {
+ enable = true;
+ port = 9092;
+ settings = {
+ "offsets.topic.replication.factor" = 1;
+ "zookeeper.connect" = [ "localhost:2181" ];
+ };
+ # settings = {
+ # "broker.id" = 1;
+ # "log.dirs" = [ "/tmp/kraft-combined-logs/server-1" ];
+ # "listeners" = [ "PLAINTEXT://localhost:9092" "CONTROLLER://localhost:9091" ];
+ # "listener.security.protocol.map" = "CONTROLLER:PLAINTEXT,PLAINTEXT:PLAINTEXT";
+ # "advertised.listeners" = "PLAINTEXT://localhost:9092";
+ # "controller.quorum.voters" = "1@localhost:9091";
+ # "controller.listener.names" = "CONTROLLER";
+ # "process.roles" = "broker,controller";
+ # "node.id" = 1;
+ # "offsets.topic.replication.factor" = 1;
+ # "transaction.state.log.replication.factor" = 1;
+ # "transaction.state.log.min.isr" = 1;
+ # "auto.create.topics.enable" = true;
+ # "num.partitions" = 1;
+ # };
+ # clusterId = "$(${pkgs.apacheKafka}/bin/kafka-storage.sh random-uuid)";
+ formatLogDirs = true;
+ formatLogDirsIgnoreFormatted = true;
+ jvmOptions = [
+ "-Xmx512M"
+ "-Xms512M"
+ ];
+ };
+
topos.enable = true;
topos.args = [ "run" ];
};
- settings.processes.topos.depends_on.pg1.condition = "process_healthy";
+ settings.processes = {
+ kafka.depends_on."zookeeper".condition = "process_healthy";
+ topos.depends_on.pg.condition = "process_healthy";
+ topos.depends_on.kafka.condition = "process_healthy";
+ };
};
packages = rec {
From f0639a73a613c5011168b59604cda26f00ac00f7 Mon Sep 17 00:00:00 2001
From: luc
Date: Tue, 15 Oct 2024 10:25:28 -0400
Subject: [PATCH 13/18] refactor: Clean up flake.nix and reorganize Nix-related
files
- Update project description to "topos-cli"
- Reorganize overlays and services into a dedicated 'nix' directory
- Simplify service configurations:
- Remove commented-out Kafka settings
- Standardize service naming (e.g., "ollama" instead of "ollama1")
- Update TOPOS_CONFIG_PATH to use ~/.topos instead of ~/.config/topos
- Remove unused tkinter overlay
- Adjust data directory paths for consistency
- Clean up comments and whitespace
These changes improve the maintainability of our Nix configuration and
provide a cleaner structure for future development.
---
flake.nix | 35 ++++++------------
{overlays => nix/overlays}/pystray/build.nix | 0
.../overlays}/pystray/default.nix | 0
.../services/topos-service.nix | 0
overlays/tkinter/default.nix | 36 -------------------
5 files changed, 10 insertions(+), 61 deletions(-)
rename {overlays => nix/overlays}/pystray/build.nix (100%)
rename {overlays => nix/overlays}/pystray/default.nix (100%)
rename topos-service.nix => nix/services/topos-service.nix (100%)
delete mode 100644 overlays/tkinter/default.nix
diff --git a/flake.nix b/flake.nix
index d8c95ab..ae86be5 100644
--- a/flake.nix
+++ b/flake.nix
@@ -1,5 +1,5 @@
{
- description = "topos";
+ description = "topos-cli";
inputs = {
flake-parts.url = "github:hercules-ci/flake-parts";
@@ -26,7 +26,7 @@
toposPoetryEnv = final.callPackage toposPoetryEnv { };
pythonPackagesExtensions = prev.pythonPackagesExtensions ++ [
(python-final: python-prev: {
- pystray = python-final.callPackage ./overlays/pystray/default.nix { };
+ pystray = python-final.callPackage ./nix/overlays/pystray/default.nix { };
})
];
})
@@ -78,7 +78,7 @@
# if one is not already set in the user's .config directory.
toposSetupHook = ''
export $(cat ${envFile} | xargs)
- export TOPOS_CONFIG_PATH="$HOME/.config/topos/config.yaml"
+ export TOPOS_CONFIG_PATH="$HOME/.topos/config.yaml"
mkdir -p "$(dirname "$TOPOS_CONFIG_PATH")"
if [ ! -f "$TOPOS_CONFIG_PATH" ]; then
echo "Creating new config file at $TOPOS_CONFIG_PATH"
@@ -98,18 +98,19 @@
process-compose."services-flake-topos" = { config, ... }: {
imports = [
inputs.services-flake.processComposeModules.default
- (import ./topos-service.nix { inherit pkgs lib config; topos = self'.packages.topos; })
+ (import ./nix/services/topos-service.nix { inherit pkgs lib config; topos = self'.packages.topos; })
];
- services = let dataDirBase = "$HOME/.services-flake/llm"; in {
+ services = let dataDirBase = "$HOME/.topos"; in {
# Backend service to perform inference on LLM models
- ollama."ollama1" = {
+ ollama."ollama" = {
enable = true;
# The models are usually huge, downloading them in every project
# directory can lead to a lot of duplication. Change here to a
# directory where the Ollama models can be stored and shared across
# projects.
- dataDir = "${dataDirBase}/ollama1";
+
+ # dataDir = "${dataDirBase}/ollama";
# Define the models to download when our app starts
#
@@ -125,7 +126,7 @@
package = pkgs.postgresql_16.withPackages (p: [ p.pgvector ]);
port = 5432;
listen_addresses = "127.0.0.1";
-
+ # dataDir = "${dataDirBase}/pg";
initialDatabases = [
{ name = "${envVars.POSTGRES_DB}"; }
];
@@ -172,27 +173,11 @@
apache-kafka."kafka" = {
enable = true;
port = 9092;
+ # dataDir = "${dataDirBase}/kafka";
settings = {
"offsets.topic.replication.factor" = 1;
"zookeeper.connect" = [ "localhost:2181" ];
};
- # settings = {
- # "broker.id" = 1;
- # "log.dirs" = [ "/tmp/kraft-combined-logs/server-1" ];
- # "listeners" = [ "PLAINTEXT://localhost:9092" "CONTROLLER://localhost:9091" ];
- # "listener.security.protocol.map" = "CONTROLLER:PLAINTEXT,PLAINTEXT:PLAINTEXT";
- # "advertised.listeners" = "PLAINTEXT://localhost:9092";
- # "controller.quorum.voters" = "1@localhost:9091";
- # "controller.listener.names" = "CONTROLLER";
- # "process.roles" = "broker,controller";
- # "node.id" = 1;
- # "offsets.topic.replication.factor" = 1;
- # "transaction.state.log.replication.factor" = 1;
- # "transaction.state.log.min.isr" = 1;
- # "auto.create.topics.enable" = true;
- # "num.partitions" = 1;
- # };
- # clusterId = "$(${pkgs.apacheKafka}/bin/kafka-storage.sh random-uuid)";
formatLogDirs = true;
formatLogDirsIgnoreFormatted = true;
jvmOptions = [
diff --git a/overlays/pystray/build.nix b/nix/overlays/pystray/build.nix
similarity index 100%
rename from overlays/pystray/build.nix
rename to nix/overlays/pystray/build.nix
diff --git a/overlays/pystray/default.nix b/nix/overlays/pystray/default.nix
similarity index 100%
rename from overlays/pystray/default.nix
rename to nix/overlays/pystray/default.nix
diff --git a/topos-service.nix b/nix/services/topos-service.nix
similarity index 100%
rename from topos-service.nix
rename to nix/services/topos-service.nix
diff --git a/overlays/tkinter/default.nix b/overlays/tkinter/default.nix
deleted file mode 100644
index a473a2f..0000000
--- a/overlays/tkinter/default.nix
+++ /dev/null
@@ -1,36 +0,0 @@
-{ lib
-, stdenv
-, buildPythonPackage
-, python
-, isPyPy
-}:
-
-buildPythonPackage {
- pname = "tkinter";
- version = python.version;
- format = "other";
-
- disabled = isPyPy;
-
- installPhase =
- ''
- # Move the tkinter module
- mkdir -p $out/${python.sitePackages}
- mv lib/${python.libPrefix}/lib-dynload/_tkinter* $out/${python.sitePackages}/
- ''
- + lib.optionalString (!stdenv.isDarwin) ''
- # Update the rpath to point to python without x11Support
- old_rpath=$(patchelf --print-rpath $out/${python.sitePackages}/_tkinter*)
- new_rpath=$(sed "s#${python}#${python}#g" <<< "$old_rpath" )
- patchelf --set-rpath $new_rpath $out/${python.sitePackages}/_tkinter*
- '';
-
- meta = python.meta // {
- description = "The standard Python interface to the Tcl/Tk GUI toolkit";
- longDescription = ''
- The tkinter package ("Tk interface") is the standard Python interface to
- the Tcl/Tk GUI toolkit. Both Tk and tkinter are available on most Unix
- platforms, including macOS, as well as on Windows systems.
- '';
- };
-}
From 170d35ccec531a4e711618d76ed802b67ba4ec37 Mon Sep 17 00:00:00 2001
From: luc
Date: Tue, 15 Oct 2024 10:39:13 -0400
Subject: [PATCH 14/18] docs: Update README and justfile for improved Nix usage
- README.md:
- Add instructions for running Topos and its dependencies using `nix run`
- Reorganize setup instructions into "Production" and "Development" sections
- Remove outdated instructions for setting up Spacy models
- Remove requirement for python-tk
- Update development shell instructions
- justfile:
- Update 'run' recipe to use `nix run .` instead of `topos run`
These changes simplify the setup process for new users and developers,
leveraging Nix to manage dependencies and provide a consistent environment.
The updated documentation reflects our shift towards a more Nix-centric
workflow, making it easier for contributors to get started with the project.
---
README.md | 28 +++++++++++++++-------------
justfile | 6 +++---
2 files changed, 18 insertions(+), 16 deletions(-)
diff --git a/README.md b/README.md
index 99ffad8..f34044a 100644
--- a/README.md
+++ b/README.md
@@ -22,12 +22,21 @@ If nix is not installed:
1. Install nix:
macos/linux: `sh <(curl -L https://nixos.org/nix/install)`
windows: `sh <(curl -L https://nixos.org/nix/install) --daemon`
-Run the software with nix:
-2. Download this repo `git clone https://github.com/jonnyjohnson1/topos-cli`
-3. `cd topos-cli`
-3. ~~build the backend service (only run the topos set --spacy trf line if it is your first time setting up)~~
+2. Run Topos and all its dependencies:
+ ```
+ nix run github:jonnyjohnson1/topos-cli
+ ```
+ This will start all services including Topos, Postgres, Kafka, and Ollama.
+
+## Development
+Clone the repository:
+```
+git clone https://github.com/jonnyjohnson1/topos-cli
+cd topos-cli
+```
-### Production
+For development, you have several options:
+### Build Binary
First build topos binary (only usable on machines with nix installed)
```
nix build .#topos
@@ -40,12 +49,7 @@ run built binary
### Dev Shell
```
nix develop
-run topos
-```
-
-### Dev Shell (auto start server)
-```
-nix develop .#topos
+topos run
```
### Poetry Shell
@@ -56,7 +60,6 @@ nix develop .#poetry
## Install Instructions
requires `brew install just`
requires `brew install poetry`
-requires `brew install python-tk`
## Graph Database - Install Neo4j
@@ -70,7 +73,6 @@ brew services start neo4j
install the topos package with the command `just build`
### Step 2: Set the Spacy Model Size
-**BROKEN**
Set the size of the spacy model you wish to use on your system.
There are 'small', 'med', 'large', and 'trf'.
diff --git a/justfile b/justfile
index b7b5845..effe003 100644
--- a/justfile
+++ b/justfile
@@ -1,9 +1,9 @@
build:
poetry build
pip install .
-
+
run:
- topos run
+ nix run .
zrok:
zrok share public http://0.0.0.0:13341
@@ -21,4 +21,4 @@ python:
create-dmg 'dist/main' --overwrite
dmg:
- create-dmg topos.app --volicon "topos/assets/topos_blk_rounded.png" --icon "topos/assets/topos_blk_rounded.png"
\ No newline at end of file
+ create-dmg topos.app --volicon "topos/assets/topos_blk_rounded.png" --icon "topos/assets/topos_blk_rounded.png"
From 99a7659c552364d4cb77fc3534df6b4b6e94b5e7 Mon Sep 17 00:00:00 2001
From: luc
Date: Tue, 15 Oct 2024 14:28:42 -0400
Subject: [PATCH 15/18] feat: Update Ollama model to dolphin-llama3
- Replace "phi3" with "dolphin-llama3" in the Ollama service configuration
---
flake.nix | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/flake.nix b/flake.nix
index ae86be5..611c073 100644
--- a/flake.nix
+++ b/flake.nix
@@ -118,7 +118,7 @@
# models manually in the UI.
#
# Search for the models here: https://ollama.com/library
- models = [ "phi3" ];
+ models = [ "dolphin-llama3" ];
};
postgres."pg" = {
From 7520e54c3ce3f755a4f49ba399c3872169cee83d Mon Sep 17 00:00:00 2001
From: luc
Date: Tue, 15 Oct 2024 08:20:11 -0400
Subject: [PATCH 16/18] feat: Implement group management and missed message
functionality (WIP)
- Add new tables in PostgreSQL for groups, users, and user-group associations
- Implement GroupManagementService and GroupManagerSQLite for managing users and groups
- Add MissedMessageManager and MissedMessageService for handling missed messages
- Update utilities with new helper functions
The following files were copied from Jhonny's kafka-chat-server branch
(commit: d4179da733421670d401468d3cb4390b28eb8e71):
- topos/services/messages/group_management_service.py
- topos/services/messages/group_manager.py
- topos/services/messages/missed_message_manager.py
- topos/services/messages/missed_message_service.py
These additions lay the groundwork for implementing group-based messaging
and ensuring users can retrieve messages they missed while offline.
---
flake.nix | 22 ++
.../messages/group_management_service.py | 49 ++++
topos/services/messages/group_manager.py | 247 ++++++++++++++++++
.../messages/missed_message_manager.py | 64 +++++
.../messages/missed_message_service.py | 22 ++
topos/utilities/utils.py | 3 +
6 files changed, 407 insertions(+)
create mode 100644 topos/services/messages/group_management_service.py
create mode 100644 topos/services/messages/group_manager.py
create mode 100644 topos/services/messages/missed_message_manager.py
create mode 100644 topos/services/messages/missed_message_service.py
diff --git a/flake.nix b/flake.nix
index 611c073..4f8150f 100644
--- a/flake.nix
+++ b/flake.nix
@@ -154,6 +154,28 @@
PRIMARY KEY (source_id, relation_type, target_id)
);
+ CREATE TABLE IF NOT EXISTS groups (
+ group_id TEXT PRIMARY KEY,
+ group_name TEXT NOT NULL UNIQUE
+ );
+
+ CREATE TABLE IF NOT EXISTS users (
+ user_id TEXT PRIMARY KEY,
+ username TEXT NOT NULL UNIQUE,
+ last_seen_online TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP
+ );
+
+ CREATE TABLE IF NOT EXISTS user_groups (
+ user_id TEXT,
+ group_id TEXT,
+ FOREIGN KEY (user_id) REFERENCES users (user_id),
+ FOREIGN KEY (group_id) REFERENCES groups (group_id),
+ PRIMARY KEY (user_id, group_id)
+ );
+
+ CREATE INDEX IF NOT EXISTS idx_user_groups_user_id ON user_groups (user_id);
+ CREATE INDEX IF NOT EXISTS idx_user_groups_group_id ON user_groups (group_id);
+
GRANT ALL PRIVILEGES ON DATABASE ${envVars.POSTGRES_DB} TO ${envVars.POSTGRES_USER};
GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA public TO ${envVars.POSTGRES_USER};
GRANT ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA public TO ${envVars.POSTGRES_USER};
diff --git a/topos/services/messages/group_management_service.py b/topos/services/messages/group_management_service.py
new file mode 100644
index 0000000..75ff269
--- /dev/null
+++ b/topos/services/messages/group_management_service.py
@@ -0,0 +1,49 @@
+from typing import List, Optional
+from topos.services.messages.group_manager import GroupManagerSQLite
+
+class GroupManagementService:
+ def __init__(self) -> None:
+ self.group_manager = GroupManagerSQLite() # this implementation can be swapped for oother implementations out based on env var, use if statements
+ # any other house keeping can be done here too
+
+ def create_group(self, group_name: str) -> str:
+ return self.group_manager.create_group(group_name=group_name)
+
+ def create_user(self, user_id:str,username: str) -> str:
+ return self.group_manager.create_user(user_id,username)
+
+ def add_user_to_group(self, user_id: str, group_id: str) -> bool:
+ return self.group_manager.add_user_to_group(user_id=user_id,group_id=group_id)
+
+ def remove_user_from_group(self, user_id: str, group_id: str) -> bool:
+ return self.group_manager.remove_user_from_group(user_id=user_id,group_id=group_id)
+
+ def get_user_groups(self, user_id: str) -> List[dict]:
+ return self.group_manager.get_user_groups(user_id)
+
+ def get_group_users(self, group_id: str) -> List[dict]:
+ return self.group_manager.get_group_users(group_id)
+
+ def get_group_by_id(self, group_id: str) -> Optional[dict]:
+ return self.group_manager.get_group_by_id(group_id)
+
+ def get_user_by_id(self, user_id: str) -> Optional[dict]:
+ return self.group_manager.get_user_by_id(user_id)
+
+ def get_group_by_name(self, group_name: str) -> Optional[dict]:
+ return self.group_manager.get_group_by_name(group_name)
+
+ def get_user_by_username(self, username: str) -> Optional[dict]:
+ return self.get_user_by_username(username)
+
+ def delete_group(self, group_id: str) -> bool:
+ return self.group_manager.delete_group(group_id)
+
+ def delete_user(self, user_id: str) -> bool:
+ return self.group_manager.delete_user(user_id)
+
+ def set_user_last_seen_online(self,user_id:str)-> bool:
+ return self.group_manager.set_user_last_seen_online(user_id)
+
+ def get_user_last_seen_online(self,user_id:str)-> bool:
+ return self.group_manager.get_user_last_seen_online(user_id)
diff --git a/topos/services/messages/group_manager.py b/topos/services/messages/group_manager.py
new file mode 100644
index 0000000..468f7f5
--- /dev/null
+++ b/topos/services/messages/group_manager.py
@@ -0,0 +1,247 @@
+from datetime import datetime
+import sqlite3
+import uuid
+from typing import List, Optional, Dict
+from topos.utilities.utils import generate_deci_code
+
+class GroupManagerSQLite:
+ def __init__(self, db_file: str = '../db/user.db'):
+ self.db_file = db_file
+
+ # Initialize empty caches
+ self.groups_cache: Dict[str, Dict] = {} # group_id -> group_info
+ self.users_cache: Dict[str, Dict] = {} # user_id -> user_info
+ self.user_groups_cache: Dict[str, List[str]] = {} # user_id -> list of group_ids
+ self.group_users_cache: Dict[str, List[str]] = {} # group_id -> list of user_ids
+
+ def _get_group_from_db(self, group_id: str) -> Optional[Dict]:
+ with sqlite3.connect(self.db_file) as conn:
+ cursor = conn.cursor()
+ cursor.execute('SELECT group_id, group_name FROM groups WHERE group_id = ?', (group_id,))
+ result = cursor.fetchone()
+ print(result)
+ if result:
+ return {"group_id": result[0], "group_name": result[1]}
+ return None
+
+ def _get_user_from_db(self, user_id: str) -> Optional[Dict]:
+ with sqlite3.connect(self.db_file) as conn:
+ cursor = conn.cursor()
+ cursor.execute('SELECT user_id, username FROM users WHERE user_id = ?', (user_id,))
+ result = cursor.fetchone()
+ if result:
+ return {"user_id": result[0], "username": result[1]}
+ return None
+
+ def _get_user_groups_from_db(self, user_id: str) -> List[str]:
+ with sqlite3.connect(self.db_file) as conn:
+ cursor = conn.cursor()
+ cursor.execute('SELECT group_id FROM user_groups WHERE user_id = ?', (user_id,))
+ return [row[0] for row in cursor.fetchall()]
+
+ def _get_group_users_from_db(self, group_id: str) -> List[str]:
+ with sqlite3.connect(self.db_file) as conn:
+ cursor = conn.cursor()
+ cursor.execute('SELECT user_id FROM user_groups WHERE group_id = ?', (group_id,))
+ return [row[0] for row in cursor.fetchall()]
+
+ def create_group(self, group_name: str) -> str:
+ group_id = generate_deci_code(6)
+ with sqlite3.connect(self.db_file) as conn:
+ cursor = conn.cursor()
+ cursor.execute('INSERT INTO groups (group_id, group_name) VALUES (?, ?)', (group_id, group_name))
+ conn.commit()
+
+ # Update cache
+ self.groups_cache[group_id] = {"group_id": group_id, "group_name": group_name}
+ self.group_users_cache[group_id] = []
+
+ return group_id
+
+ def create_user(self, user_id:str,username: str,) -> str:
+
+ with sqlite3.connect(self.db_file) as conn:
+ cursor = conn.cursor()
+ cursor.execute('INSERT INTO users (user_id, username) VALUES (?, ?)', (user_id, username))
+ conn.commit()
+
+ # Update cache
+ self.users_cache[user_id] = {"user_id": user_id, "username": username}
+ self.user_groups_cache[user_id] = []
+
+ return user_id
+
+ def add_user_to_group(self, user_id: str, group_id: str) -> bool:
+ try:
+ with sqlite3.connect(self.db_file) as conn:
+ cursor = conn.cursor()
+ cursor.execute('INSERT INTO user_groups (user_id, group_id) VALUES (?, ?)', (user_id, group_id))
+ conn.commit()
+
+ # Update cache if the entries exist
+ if user_id in self.user_groups_cache:
+ self.user_groups_cache[user_id].append(group_id)
+ if group_id in self.group_users_cache:
+ self.group_users_cache[group_id].append(user_id)
+
+ return True
+ except sqlite3.IntegrityError:
+ return False # User already in group or user/group doesn't exist
+
+ def remove_user_from_group(self, user_id: str, group_id: str) -> bool:
+ with sqlite3.connect(self.db_file) as conn:
+ cursor = conn.cursor()
+ cursor.execute('DELETE FROM user_groups WHERE user_id = ? AND group_id = ?', (user_id, group_id))
+ conn.commit()
+ if cursor.rowcount > 0:
+ # Update cache if the entries exist
+ if user_id in self.user_groups_cache:
+ self.user_groups_cache[user_id].remove(group_id)
+ if group_id in self.group_users_cache:
+ self.group_users_cache[group_id].remove(user_id)
+ return True
+ return False
+
+ def get_user_groups(self, user_id: str) -> List[dict]:
+ if user_id not in self.user_groups_cache:
+ self.user_groups_cache[user_id] = self._get_user_groups_from_db(user_id)
+
+ return [self.get_group_by_id(group_id) for group_id in self.user_groups_cache[user_id]]
+
+ def get_group_users(self, group_id: str) -> List[dict]:
+ if group_id not in self.group_users_cache:
+ self.group_users_cache[group_id] = self._get_group_users_from_db(group_id)
+
+ return [self.get_user_by_id(user_id) for user_id in self.group_users_cache[group_id]]
+
+ def get_group_by_id(self, group_id: str) -> Optional[dict]:
+ if group_id not in self.groups_cache:
+ group = self._get_group_from_db(group_id)
+ if group:
+ self.groups_cache[group_id] = group
+ else:
+ return None
+ return self.groups_cache[group_id]
+
+ def get_user_by_id(self, user_id: str) -> Optional[dict]:
+ if user_id not in self.users_cache:
+ user = self._get_user_from_db(user_id)
+ if user:
+ self.users_cache[user_id] = user
+ else:
+ return None
+ return self.users_cache[user_id]
+
+ def get_group_by_name(self, group_name: str) -> Optional[dict]:
+ # This operation requires a full DB scan if not in cache
+ for group in self.groups_cache.values():
+ if group['group_name'] == group_name:
+ return group
+
+ with sqlite3.connect(self.db_file) as conn:
+ cursor = conn.cursor()
+ cursor.execute('SELECT group_id, group_name FROM groups WHERE group_name = ?', (group_name,))
+ result = cursor.fetchone()
+ if result:
+ group = {"group_id": result[0], "group_name": result[1]}
+ self.groups_cache[group['group_id']] = group
+ return group
+ return None
+
+ def get_user_by_username(self, username: str) -> Optional[dict]:
+ # This operation requires a full DB scan if not in cache
+ for user in self.users_cache.values():
+ if user['username'] == username:
+ return user
+
+ with sqlite3.connect(self.db_file) as conn:
+ cursor = conn.cursor()
+ cursor.execute('SELECT user_id, username FROM users WHERE username = ?', (username,))
+ result = cursor.fetchone()
+ if result:
+ user = {"user_id": result[0], "username": result[1]}
+ self.users_cache[user['user_id']] = user
+ return user
+ return None
+
+ def delete_group(self, group_id: str) -> bool:
+ with sqlite3.connect(self.db_file) as conn:
+ cursor = conn.cursor()
+ cursor.execute('DELETE FROM user_groups WHERE group_id = ?', (group_id,))
+ cursor.execute('DELETE FROM groups WHERE group_id = ?', (group_id,))
+ conn.commit()
+ if cursor.rowcount > 0:
+ # Update cache
+ self.groups_cache.pop(group_id, None)
+ self.group_users_cache.pop(group_id, None)
+ for user_groups in self.user_groups_cache.values():
+ if group_id in user_groups:
+ user_groups.remove(group_id)
+ return True
+ return False
+
+ def delete_user(self, user_id: str) -> bool:
+ with sqlite3.connect(self.db_file) as conn:
+ cursor = conn.cursor()
+ cursor.execute('DELETE FROM user_groups WHERE user_id = ?', (user_id,))
+ cursor.execute('DELETE FROM users WHERE user_id = ?', (user_id,))
+ conn.commit()
+ if cursor.rowcount > 0:
+ # Update cache
+ self.users_cache.pop(user_id, None)
+ self.user_groups_cache.pop(user_id, None)
+ for group_users in self.group_users_cache.values():
+ if user_id in group_users:
+ group_users.remove(user_id)
+ return True
+ return False
+ def get_user_last_seen_online(self, user_id: str) -> str:
+ """
+ Get the last_seen_online timestamp for a given user_id.
+
+ :param user_id: The ID of the user
+ :return: The last seen timestamp as a string, or None if user not found
+ """
+ with sqlite3.connect(self.db_file) as conn:
+ cursor = conn.cursor()
+ try:
+ cursor.execute('''
+ SELECT last_seen_online
+ FROM users
+ WHERE user_id = ?
+ ''', (user_id,))
+
+ result = cursor.fetchone()
+ if result:
+ return result[0]
+ else:
+ print(f"User with ID {user_id} not found.")
+ return None
+ except sqlite3.Error as e:
+ print(f"An error occurred: {e}")
+ return None
+
+ def set_user_last_seen_online(self, user_id: str) -> bool:
+ """
+ Set the last_seen_online timestamp for a given user_id to the current time.
+
+ :param user_id: The ID of the user
+ :return: True if successful, False if user not found
+ """
+ with sqlite3.connect(self.db_file) as conn:
+ cursor = conn.cursor()
+ try:
+ cursor.execute('''
+ UPDATE users
+ SET last_seen_online = ?
+ WHERE user_id = ?
+ ''', (datetime.now().replace(microsecond=0), user_id))
+
+ if cursor.rowcount == 0:
+ print(f"User with ID {user_id} not found.")
+ return False
+
+ return True
+ except sqlite3.Error as e:
+ print(f"An error occurred: {e}")
+ return False
diff --git a/topos/services/messages/missed_message_manager.py b/topos/services/messages/missed_message_manager.py
new file mode 100644
index 0000000..9da7a88
--- /dev/null
+++ b/topos/services/messages/missed_message_manager.py
@@ -0,0 +1,64 @@
+import asyncio
+import json
+from aiokafka import AIOKafkaConsumer, TopicPartition
+from typing import List, Set, Dict, Any
+
+KAFKA_BOOTSTRAP_SERVERS = 'localhost:9092'
+KAFKA_TOPIC = 'chat_topic'
+class MissedMessageManager:
+
+ async def get_filtered_missed_messages(self,
+ timestamp_ms: int,
+ key_filter: Set[str]
+ # max_messages: int = 1000
+ ) -> List[Dict[str, str]]:
+ consumer = AIOKafkaConsumer(
+ KAFKA_TOPIC,
+ bootstrap_servers=KAFKA_BOOTSTRAP_SERVERS,
+ group_id=None, # Set to None to avoid committing offsets
+ auto_offset_reset='earliest'
+ )
+
+ try:
+ await consumer.start()
+
+ # Get partitions for the topic
+ partitions = consumer.partitions_for_topic(KAFKA_TOPIC)
+ if not partitions:
+ raise ValueError(f"Topic '{KAFKA_TOPIC}' not found")
+
+ # Create TopicPartition objects
+ tps = [TopicPartition(KAFKA_TOPIC, p) for p in partitions]
+
+ # Find offsets for the given timestamp
+ offsets = await consumer.offsets_for_times({tp: timestamp_ms for tp in tps})
+ print(offsets)
+ # Seek to the correct offset for each partition
+ for tp, offset_and_timestamp in offsets.items():
+ if offset_and_timestamp is None:
+ # If no offset found for the timestamp, seek to the end
+ consumer.seek_to_end(tp)
+ else:
+ print(tp)
+ print(offset_and_timestamp.offset)
+ consumer.seek(tp, offset_and_timestamp.offset)
+
+ # Collect filtered messages
+ missed_messages = []
+ while True:
+ try:
+ message = await asyncio.wait_for(consumer.getone(), timeout=1.0)
+ if message.key and message.key.decode() in key_filter:
+ missed_messages.append({
+ "key": message.key.decode(),
+ "value": json.loads(message.value.decode()),
+ "msg_type": "MISSED"
+ })
+ except asyncio.TimeoutError:
+ # No more messages within the timeout period
+ break
+
+ return missed_messages
+
+ finally:
+ await consumer.stop()
diff --git a/topos/services/messages/missed_message_service.py b/topos/services/messages/missed_message_service.py
new file mode 100644
index 0000000..69df6e4
--- /dev/null
+++ b/topos/services/messages/missed_message_service.py
@@ -0,0 +1,22 @@
+from topos.services.messages.missed_message_manager import MissedMessageManager
+from topos.services.messages.group_management_service import GroupManagementService
+from topos.utilities.utils import sqlite_timestamp_to_ms
+
+KAFKA_TOPIC = 'chat_topic'
+
+class MissedMessageService:
+ def __init__(self) -> None:
+ self.missed_message_manager = MissedMessageManager()
+ pass
+ # houskeeping if required
+ # if you need to inject the group management service here it could be an option ??
+
+ async def get_missed_messages(self,user_id :str ,group_management_service :GroupManagementService):
+ last_seen = group_management_service.get_user_last_seen_online(user_id=user_id)
+ if(last_seen):
+ users_groups = group_management_service.get_user_groups(user_id=user_id)
+ group_ids = [group["group_id"] for group in users_groups]
+ # get the last timestamp msg processed by the user
+ return await self.missed_message_manager.get_filtered_missed_messages(key_filter=group_ids,timestamp_ms=sqlite_timestamp_to_ms(last_seen))
+ else:
+ return []
diff --git a/topos/utilities/utils.py b/topos/utilities/utils.py
index ff1be6a..41ceca7 100644
--- a/topos/utilities/utils.py
+++ b/topos/utilities/utils.py
@@ -62,3 +62,6 @@ def generate_hex_code(n_digits):
def generate_deci_code(n_digits):
return ''.join(random.choice('0123456789') for _ in range(n_digits))
+
+def generate_group_name() -> str:
+ return 'GRP-'.join(random.choices(string.ascii_uppercase + string.digits, k=8))
From 358d3c982b8f3636e2751e733f486b9659dbd2d9 Mon Sep 17 00:00:00 2001
From: luc
Date: Tue, 15 Oct 2024 08:50:11 -0400
Subject: [PATCH 17/18] refactor: Migrate group management from SQLite to
PostgreSQL (WIP)
- Rename GroupManagerSQLite to GroupManagerPostgres
- Update GroupManagementService to use PostgreSQL connection parameters
- Refactor database operations in GroupManagerPostgres to use psycopg2
- Remove in-memory caching in favor of direct database queries
- Update MissedMessageService to use the new GroupManagementService
- Improve timestamp handling in get_missed_messages
These changes align the group management system with our PostgreSQL database setup and improve overall consistency and reliability of the messaging system.
```
This commit message summarizes the main changes, focusing on the migration from SQLite to PostgreSQL and the associated refactoring of the group management and missed message services. It also highlights the removal of in-memory caching and improvements in timestamp handling.
---
.../messages/group_management_service.py | 21 +-
topos/services/messages/group_manager.py | 296 +++++-------------
.../messages/missed_message_service.py | 25 +-
3 files changed, 104 insertions(+), 238 deletions(-)
diff --git a/topos/services/messages/group_management_service.py b/topos/services/messages/group_management_service.py
index 75ff269..d82f6dd 100644
--- a/topos/services/messages/group_management_service.py
+++ b/topos/services/messages/group_management_service.py
@@ -1,22 +1,21 @@
from typing import List, Optional
-from topos.services.messages.group_manager import GroupManagerSQLite
+from topos.services.messages.group_manager import GroupManagerPostgres
class GroupManagementService:
- def __init__(self) -> None:
- self.group_manager = GroupManagerSQLite() # this implementation can be swapped for oother implementations out based on env var, use if statements
- # any other house keeping can be done here too
+ def __init__(self, db_params: dict) -> None:
+ self.group_manager = GroupManagerPostgres(db_params)
def create_group(self, group_name: str) -> str:
return self.group_manager.create_group(group_name=group_name)
- def create_user(self, user_id:str,username: str) -> str:
- return self.group_manager.create_user(user_id,username)
+ def create_user(self, user_id: str, username: str) -> str:
+ return self.group_manager.create_user(user_id, username)
def add_user_to_group(self, user_id: str, group_id: str) -> bool:
- return self.group_manager.add_user_to_group(user_id=user_id,group_id=group_id)
+ return self.group_manager.add_user_to_group(user_id=user_id, group_id=group_id)
def remove_user_from_group(self, user_id: str, group_id: str) -> bool:
- return self.group_manager.remove_user_from_group(user_id=user_id,group_id=group_id)
+ return self.group_manager.remove_user_from_group(user_id=user_id, group_id=group_id)
def get_user_groups(self, user_id: str) -> List[dict]:
return self.group_manager.get_user_groups(user_id)
@@ -34,7 +33,7 @@ def get_group_by_name(self, group_name: str) -> Optional[dict]:
return self.group_manager.get_group_by_name(group_name)
def get_user_by_username(self, username: str) -> Optional[dict]:
- return self.get_user_by_username(username)
+ return self.group_manager.get_user_by_username(username)
def delete_group(self, group_id: str) -> bool:
return self.group_manager.delete_group(group_id)
@@ -42,8 +41,8 @@ def delete_group(self, group_id: str) -> bool:
def delete_user(self, user_id: str) -> bool:
return self.group_manager.delete_user(user_id)
- def set_user_last_seen_online(self,user_id:str)-> bool:
+ def set_user_last_seen_online(self, user_id: str) -> bool:
return self.group_manager.set_user_last_seen_online(user_id)
- def get_user_last_seen_online(self,user_id:str)-> bool:
+ def get_user_last_seen_online(self, user_id: str) -> Optional[str]:
return self.group_manager.get_user_last_seen_online(user_id)
diff --git a/topos/services/messages/group_manager.py b/topos/services/messages/group_manager.py
index 468f7f5..2020f02 100644
--- a/topos/services/messages/group_manager.py
+++ b/topos/services/messages/group_manager.py
@@ -1,247 +1,117 @@
+import psycopg2
+from psycopg2.extras import DictCursor
from datetime import datetime
-import sqlite3
-import uuid
from typing import List, Optional, Dict
from topos.utilities.utils import generate_deci_code
-class GroupManagerSQLite:
- def __init__(self, db_file: str = '../db/user.db'):
- self.db_file = db_file
-
- # Initialize empty caches
- self.groups_cache: Dict[str, Dict] = {} # group_id -> group_info
- self.users_cache: Dict[str, Dict] = {} # user_id -> user_info
- self.user_groups_cache: Dict[str, List[str]] = {} # user_id -> list of group_ids
- self.group_users_cache: Dict[str, List[str]] = {} # group_id -> list of user_ids
-
- def _get_group_from_db(self, group_id: str) -> Optional[Dict]:
- with sqlite3.connect(self.db_file) as conn:
- cursor = conn.cursor()
- cursor.execute('SELECT group_id, group_name FROM groups WHERE group_id = ?', (group_id,))
- result = cursor.fetchone()
- print(result)
- if result:
- return {"group_id": result[0], "group_name": result[1]}
- return None
-
- def _get_user_from_db(self, user_id: str) -> Optional[Dict]:
- with sqlite3.connect(self.db_file) as conn:
- cursor = conn.cursor()
- cursor.execute('SELECT user_id, username FROM users WHERE user_id = ?', (user_id,))
- result = cursor.fetchone()
- if result:
- return {"user_id": result[0], "username": result[1]}
- return None
-
- def _get_user_groups_from_db(self, user_id: str) -> List[str]:
- with sqlite3.connect(self.db_file) as conn:
- cursor = conn.cursor()
- cursor.execute('SELECT group_id FROM user_groups WHERE user_id = ?', (user_id,))
- return [row[0] for row in cursor.fetchall()]
-
- def _get_group_users_from_db(self, group_id: str) -> List[str]:
- with sqlite3.connect(self.db_file) as conn:
- cursor = conn.cursor()
- cursor.execute('SELECT user_id FROM user_groups WHERE group_id = ?', (group_id,))
- return [row[0] for row in cursor.fetchall()]
+class GroupManagerPostgres:
+ def __init__(self, db_params: Dict[str, str]):
+ self.db_params = db_params
+
+ def _get_connection(self):
+ return psycopg2.connect(**self.db_params)
def create_group(self, group_name: str) -> str:
group_id = generate_deci_code(6)
- with sqlite3.connect(self.db_file) as conn:
- cursor = conn.cursor()
- cursor.execute('INSERT INTO groups (group_id, group_name) VALUES (?, ?)', (group_id, group_name))
- conn.commit()
-
- # Update cache
- self.groups_cache[group_id] = {"group_id": group_id, "group_name": group_name}
- self.group_users_cache[group_id] = []
-
+ with self._get_connection() as conn:
+ with conn.cursor() as cur:
+ cur.execute('INSERT INTO groups (group_id, group_name) VALUES (%s, %s)', (group_id, group_name))
return group_id
- def create_user(self, user_id:str,username: str,) -> str:
-
- with sqlite3.connect(self.db_file) as conn:
- cursor = conn.cursor()
- cursor.execute('INSERT INTO users (user_id, username) VALUES (?, ?)', (user_id, username))
- conn.commit()
-
- # Update cache
- self.users_cache[user_id] = {"user_id": user_id, "username": username}
- self.user_groups_cache[user_id] = []
-
+ def create_user(self, user_id: str, username: str) -> str:
+ with self._get_connection() as conn:
+ with conn.cursor() as cur:
+ cur.execute('INSERT INTO users (user_id, username) VALUES (%s, %s)', (user_id, username))
return user_id
def add_user_to_group(self, user_id: str, group_id: str) -> bool:
try:
- with sqlite3.connect(self.db_file) as conn:
- cursor = conn.cursor()
- cursor.execute('INSERT INTO user_groups (user_id, group_id) VALUES (?, ?)', (user_id, group_id))
- conn.commit()
-
- # Update cache if the entries exist
- if user_id in self.user_groups_cache:
- self.user_groups_cache[user_id].append(group_id)
- if group_id in self.group_users_cache:
- self.group_users_cache[group_id].append(user_id)
-
+ with self._get_connection() as conn:
+ with conn.cursor() as cur:
+ cur.execute('INSERT INTO user_groups (user_id, group_id) VALUES (%s, %s)', (user_id, group_id))
return True
- except sqlite3.IntegrityError:
- return False # User already in group or user/group doesn't exist
+ except psycopg2.IntegrityError:
+ return False
def remove_user_from_group(self, user_id: str, group_id: str) -> bool:
- with sqlite3.connect(self.db_file) as conn:
- cursor = conn.cursor()
- cursor.execute('DELETE FROM user_groups WHERE user_id = ? AND group_id = ?', (user_id, group_id))
- conn.commit()
- if cursor.rowcount > 0:
- # Update cache if the entries exist
- if user_id in self.user_groups_cache:
- self.user_groups_cache[user_id].remove(group_id)
- if group_id in self.group_users_cache:
- self.group_users_cache[group_id].remove(user_id)
- return True
- return False
+ with self._get_connection() as conn:
+ with conn.cursor() as cur:
+ cur.execute('DELETE FROM user_groups WHERE user_id = %s AND group_id = %s', (user_id, group_id))
+ return cur.rowcount > 0
def get_user_groups(self, user_id: str) -> List[dict]:
- if user_id not in self.user_groups_cache:
- self.user_groups_cache[user_id] = self._get_user_groups_from_db(user_id)
-
- return [self.get_group_by_id(group_id) for group_id in self.user_groups_cache[user_id]]
+ with self._get_connection() as conn:
+ with conn.cursor(cursor_factory=DictCursor) as cur:
+ cur.execute('''
+ SELECT g.group_id, g.group_name
+ FROM groups g
+ JOIN user_groups ug ON g.group_id = ug.group_id
+ WHERE ug.user_id = %s
+ ''', (user_id,))
+ return [dict(row) for row in cur.fetchall()]
def get_group_users(self, group_id: str) -> List[dict]:
- if group_id not in self.group_users_cache:
- self.group_users_cache[group_id] = self._get_group_users_from_db(group_id)
-
- return [self.get_user_by_id(user_id) for user_id in self.group_users_cache[group_id]]
+ with self._get_connection() as conn:
+ with conn.cursor(cursor_factory=DictCursor) as cur:
+ cur.execute('''
+ SELECT u.user_id, u.username
+ FROM users u
+ JOIN user_groups ug ON u.user_id = ug.user_id
+ WHERE ug.group_id = %s
+ ''', (group_id,))
+ return [dict(row) for row in cur.fetchall()]
def get_group_by_id(self, group_id: str) -> Optional[dict]:
- if group_id not in self.groups_cache:
- group = self._get_group_from_db(group_id)
- if group:
- self.groups_cache[group_id] = group
- else:
- return None
- return self.groups_cache[group_id]
+ with self._get_connection() as conn:
+ with conn.cursor(cursor_factory=DictCursor) as cur:
+ cur.execute('SELECT group_id, group_name FROM groups WHERE group_id = %s', (group_id,))
+ result = cur.fetchone()
+ return dict(result) if result else None
def get_user_by_id(self, user_id: str) -> Optional[dict]:
- if user_id not in self.users_cache:
- user = self._get_user_from_db(user_id)
- if user:
- self.users_cache[user_id] = user
- else:
- return None
- return self.users_cache[user_id]
+ with self._get_connection() as conn:
+ with conn.cursor(cursor_factory=DictCursor) as cur:
+ cur.execute('SELECT user_id, username FROM users WHERE user_id = %s', (user_id,))
+ result = cur.fetchone()
+ return dict(result) if result else None
def get_group_by_name(self, group_name: str) -> Optional[dict]:
- # This operation requires a full DB scan if not in cache
- for group in self.groups_cache.values():
- if group['group_name'] == group_name:
- return group
-
- with sqlite3.connect(self.db_file) as conn:
- cursor = conn.cursor()
- cursor.execute('SELECT group_id, group_name FROM groups WHERE group_name = ?', (group_name,))
- result = cursor.fetchone()
- if result:
- group = {"group_id": result[0], "group_name": result[1]}
- self.groups_cache[group['group_id']] = group
- return group
- return None
+ with self._get_connection() as conn:
+ with conn.cursor(cursor_factory=DictCursor) as cur:
+ cur.execute('SELECT group_id, group_name FROM groups WHERE group_name = %s', (group_name,))
+ result = cur.fetchone()
+ return dict(result) if result else None
def get_user_by_username(self, username: str) -> Optional[dict]:
- # This operation requires a full DB scan if not in cache
- for user in self.users_cache.values():
- if user['username'] == username:
- return user
-
- with sqlite3.connect(self.db_file) as conn:
- cursor = conn.cursor()
- cursor.execute('SELECT user_id, username FROM users WHERE username = ?', (username,))
- result = cursor.fetchone()
- if result:
- user = {"user_id": result[0], "username": result[1]}
- self.users_cache[user['user_id']] = user
- return user
- return None
+ with self._get_connection() as conn:
+ with conn.cursor(cursor_factory=DictCursor) as cur:
+ cur.execute('SELECT user_id, username FROM users WHERE username = %s', (username,))
+ result = cur.fetchone()
+ return dict(result) if result else None
def delete_group(self, group_id: str) -> bool:
- with sqlite3.connect(self.db_file) as conn:
- cursor = conn.cursor()
- cursor.execute('DELETE FROM user_groups WHERE group_id = ?', (group_id,))
- cursor.execute('DELETE FROM groups WHERE group_id = ?', (group_id,))
- conn.commit()
- if cursor.rowcount > 0:
- # Update cache
- self.groups_cache.pop(group_id, None)
- self.group_users_cache.pop(group_id, None)
- for user_groups in self.user_groups_cache.values():
- if group_id in user_groups:
- user_groups.remove(group_id)
- return True
- return False
+ with self._get_connection() as conn:
+ with conn.cursor() as cur:
+ cur.execute('DELETE FROM user_groups WHERE group_id = %s', (group_id,))
+ cur.execute('DELETE FROM groups WHERE group_id = %s', (group_id,))
+ return cur.rowcount > 0
def delete_user(self, user_id: str) -> bool:
- with sqlite3.connect(self.db_file) as conn:
- cursor = conn.cursor()
- cursor.execute('DELETE FROM user_groups WHERE user_id = ?', (user_id,))
- cursor.execute('DELETE FROM users WHERE user_id = ?', (user_id,))
- conn.commit()
- if cursor.rowcount > 0:
- # Update cache
- self.users_cache.pop(user_id, None)
- self.user_groups_cache.pop(user_id, None)
- for group_users in self.group_users_cache.values():
- if user_id in group_users:
- group_users.remove(user_id)
- return True
- return False
- def get_user_last_seen_online(self, user_id: str) -> str:
- """
- Get the last_seen_online timestamp for a given user_id.
-
- :param user_id: The ID of the user
- :return: The last seen timestamp as a string, or None if user not found
- """
- with sqlite3.connect(self.db_file) as conn:
- cursor = conn.cursor()
- try:
- cursor.execute('''
- SELECT last_seen_online
- FROM users
- WHERE user_id = ?
- ''', (user_id,))
-
- result = cursor.fetchone()
- if result:
- return result[0]
- else:
- print(f"User with ID {user_id} not found.")
- return None
- except sqlite3.Error as e:
- print(f"An error occurred: {e}")
- return None
+ with self._get_connection() as conn:
+ with conn.cursor() as cur:
+ cur.execute('DELETE FROM user_groups WHERE user_id = %s', (user_id,))
+ cur.execute('DELETE FROM users WHERE user_id = %s', (user_id,))
+ return cur.rowcount > 0
+
+ def get_user_last_seen_online(self, user_id: str) -> Optional[str]:
+ with self._get_connection() as conn:
+ with conn.cursor() as cur:
+ cur.execute('SELECT last_seen_online FROM users WHERE user_id = %s', (user_id,))
+ result = cur.fetchone()
+ return result[0].isoformat() if result else None
def set_user_last_seen_online(self, user_id: str) -> bool:
- """
- Set the last_seen_online timestamp for a given user_id to the current time.
-
- :param user_id: The ID of the user
- :return: True if successful, False if user not found
- """
- with sqlite3.connect(self.db_file) as conn:
- cursor = conn.cursor()
- try:
- cursor.execute('''
- UPDATE users
- SET last_seen_online = ?
- WHERE user_id = ?
- ''', (datetime.now().replace(microsecond=0), user_id))
-
- if cursor.rowcount == 0:
- print(f"User with ID {user_id} not found.")
- return False
-
- return True
- except sqlite3.Error as e:
- print(f"An error occurred: {e}")
- return False
+ with self._get_connection() as conn:
+ with conn.cursor() as cur:
+ cur.execute('UPDATE users SET last_seen_online = %s WHERE user_id = %s', (datetime.now(), user_id))
+ return cur.rowcount > 0
diff --git a/topos/services/messages/missed_message_service.py b/topos/services/messages/missed_message_service.py
index 69df6e4..5577d6a 100644
--- a/topos/services/messages/missed_message_service.py
+++ b/topos/services/messages/missed_message_service.py
@@ -1,22 +1,19 @@
from topos.services.messages.missed_message_manager import MissedMessageManager
from topos.services.messages.group_management_service import GroupManagementService
-from topos.utilities.utils import sqlite_timestamp_to_ms
-
-KAFKA_TOPIC = 'chat_topic'
+from datetime import datetime
+import pytz
class MissedMessageService:
- def __init__(self) -> None:
+ def __init__(self, db_params: dict) -> None:
self.missed_message_manager = MissedMessageManager()
- pass
- # houskeeping if required
- # if you need to inject the group management service here it could be an option ??
+ self.group_management_service = GroupManagementService(db_params)
- async def get_missed_messages(self,user_id :str ,group_management_service :GroupManagementService):
- last_seen = group_management_service.get_user_last_seen_online(user_id=user_id)
- if(last_seen):
- users_groups = group_management_service.get_user_groups(user_id=user_id)
- group_ids = [group["group_id"] for group in users_groups]
- # get the last timestamp msg processed by the user
- return await self.missed_message_manager.get_filtered_missed_messages(key_filter=group_ids,timestamp_ms=sqlite_timestamp_to_ms(last_seen))
+ async def get_missed_messages(self, user_id: str):
+ last_seen = self.group_management_service.get_user_last_seen_online(user_id=user_id)
+ if last_seen:
+ users_groups = self.group_management_service.get_user_groups(user_id=user_id)
+ group_ids = set(group["group_id"] for group in users_groups)
+ timestamp_ms = int(datetime.fromisoformat(last_seen).replace(tzinfo=pytz.UTC).timestamp() * 1000)
+ return await self.missed_message_manager.get_filtered_missed_messages(key_filter=group_ids, timestamp_ms=timestamp_ms)
else:
return []
From 1b5372b2d6f90ccd71e79c2d2ce28dbc41e2e98c Mon Sep 17 00:00:00 2001
From: luc
Date: Tue, 15 Oct 2024 09:24:47 -0400
Subject: [PATCH 18/18] feat: Implement Kafka-based chat system and group
management API (WIP)
This commit introduces a Kafka-based chat system and adds group management functionality:
- Add KafkaManager class for handling Kafka producer and consumer operations
- Implement WebSocket endpoint for real-time chat communication
- Integrate Kafka message broadcasting with WebSocket connections
- Add API endpoints for:
- Retrieving missed messages
- Creating chat groups
- Joining existing chat groups
- Update FastAPI app to use CORS middleware and lifespan management
- Implement GroupManagementService and MissedMessageService integration
These changes establish the foundation for a scalable, real-time chat system
with group management capabilities, leveraging Kafka for message persistence
and distribution.
---
poetry.lock | 65 +++++++++++++++++++++++-
pyproject.toml | 1 +
topos/api/api.py | 51 +++++++++++++++++--
topos/api/api_routes.py | 45 +++++++++++++---
topos/services/messages/kafka_manager.py | 32 ++++++++++++
5 files changed, 183 insertions(+), 11 deletions(-)
create mode 100644 topos/services/messages/kafka_manager.py
diff --git a/poetry.lock b/poetry.lock
index cb68c04..f248bd4 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -31,6 +31,58 @@ test-prod = ["parameterized", "pytest (>=7.2.0,<=8.0.0)", "pytest-subtests", "py
test-trackers = ["comet-ml", "dvclive", "tensorboard", "wandb"]
testing = ["bitsandbytes", "datasets", "diffusers", "evaluate", "parameterized", "pytest (>=7.2.0,<=8.0.0)", "pytest-subtests", "pytest-xdist", "scikit-learn", "scipy", "timm", "torchpippy (>=0.2.0)", "tqdm", "transformers"]
+[[package]]
+name = "aiokafka"
+version = "0.11.0"
+description = "Kafka integration with asyncio"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "aiokafka-0.11.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:926f93fb6a39891fd4364494432b479c0602f9cac708778d4a262a2c2e20d3b4"},
+ {file = "aiokafka-0.11.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38e1917e706c1158d5e1f612d1fc1b40f706dc46c534e73ab4de8ae2868a31be"},
+ {file = "aiokafka-0.11.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:516e1d68d9a377860b2e17453580afe304605bc71894f684d3e7b6618f6f939f"},
+ {file = "aiokafka-0.11.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:acfd0a5d0aec762ba73eeab73b23edce14f315793f063b6a4b223b6f79e36bb8"},
+ {file = "aiokafka-0.11.0-cp310-cp310-win32.whl", hash = "sha256:0d80590c4ef0ba546a299cee22ea27c3360c14241ec43a8e6904653f7b22d328"},
+ {file = "aiokafka-0.11.0-cp310-cp310-win_amd64.whl", hash = "sha256:1d519bf9875ac867fb19d55de3750833b1eb6379a08de29a68618e24e6a49fc0"},
+ {file = "aiokafka-0.11.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0e957b42ae959365efbb45c9b5de38032c573608553c3670ad8695cc210abec9"},
+ {file = "aiokafka-0.11.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:224db2447f6c1024198d8342e7099198f90401e2fa29c0762afbc51eadf5c490"},
+ {file = "aiokafka-0.11.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ef3e7c8a923e502caa4d24041f2be778fd7f9ee4587bf0bcb4f74cac05122fa"},
+ {file = "aiokafka-0.11.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:59f4b935589ebb244620afad8bf3320e3bc86879a8b1c692ad06bd324f6c6127"},
+ {file = "aiokafka-0.11.0-cp311-cp311-win32.whl", hash = "sha256:560839ae6bc13e71025d71e94df36980f5c6e36a64916439e598b6457267a37f"},
+ {file = "aiokafka-0.11.0-cp311-cp311-win_amd64.whl", hash = "sha256:1f8ae91f0373830e4664376157fe61b611ca7e573d8a559b151aef5bf53df46c"},
+ {file = "aiokafka-0.11.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4e0cc080a7f4c659ee4e1baa1c32adedcccb105a52156d4909f357d76fac0dc1"},
+ {file = "aiokafka-0.11.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55a07a39d82c595223a17015ea738d152544cee979d3d6d822707a082465621c"},
+ {file = "aiokafka-0.11.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3711fa64ee8640dcd4cb640f1030f9439d02e85acd57010d09053017092d8cc2"},
+ {file = "aiokafka-0.11.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:818a6f8e44b02113b9e795bee6029c8a4e525ab38f29d7adb0201f3fec74c808"},
+ {file = "aiokafka-0.11.0-cp312-cp312-win32.whl", hash = "sha256:8ba981956243767b37c929845c398fda2a2e35a4034d218badbe2b62e6f98f96"},
+ {file = "aiokafka-0.11.0-cp312-cp312-win_amd64.whl", hash = "sha256:9a478a14fd23fd1ffe9c7a21238d818b5f5e0626f7f06146b687f3699298391b"},
+ {file = "aiokafka-0.11.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0973a245b8b9daf8ef6814253a80a700f1f54d2da7d88f6fe479f46e0fd83053"},
+ {file = "aiokafka-0.11.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ee0c61a2dcabbe4474ff237d708f9bd663dd2317e03a9cb7239a212c9ee05b12"},
+ {file = "aiokafka-0.11.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:230170ce2e8a0eb852e2e8b78b08ce2e29b77dfe2c51bd56f5ab4be0f332a63b"},
+ {file = "aiokafka-0.11.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eac78a009b713e28b5b4c4daae9d062acbf2b7980e5734467643a810134583b5"},
+ {file = "aiokafka-0.11.0-cp38-cp38-win32.whl", hash = "sha256:73584be8ba7906e3f33ca0f08f6af21a9ae31b86c6b635b93db3b1e6f452657b"},
+ {file = "aiokafka-0.11.0-cp38-cp38-win_amd64.whl", hash = "sha256:d724b6fc484e453b373052813e4e543fc028a22c3fbda10e13b6829740000b8a"},
+ {file = "aiokafka-0.11.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:419dd28c8ed6e926061bdc60929af08a6b52f1721e1179d9d21cc72ae28fd6f6"},
+ {file = "aiokafka-0.11.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1c85f66eb3564c5e74d8e4c25df4ac1fd94f1a6f6e66f005aafa6f791bde215"},
+ {file = "aiokafka-0.11.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eaafe134de57b184f3c030e1a11051590caff7953c8bf58048eefd8d828e39d7"},
+ {file = "aiokafka-0.11.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:807f699cf916369b1a512e4f2eaec714398c202d8803328ef8711967d99a56ce"},
+ {file = "aiokafka-0.11.0-cp39-cp39-win32.whl", hash = "sha256:d59fc7aec088c9ffc02d37e61591f053459bd11912cf04c70ac4f7e60405667d"},
+ {file = "aiokafka-0.11.0-cp39-cp39-win_amd64.whl", hash = "sha256:702aec15b63bad5e4476294bcb1cb177559149fce3e59335794f004c279cbd6a"},
+ {file = "aiokafka-0.11.0.tar.gz", hash = "sha256:f2def07fe1720c4fe37c0309e355afa9ff4a28e0aabfe847be0692461ac69352"},
+]
+
+[package.dependencies]
+async-timeout = "*"
+packaging = "*"
+typing-extensions = ">=4.10.0"
+
+[package.extras]
+all = ["cramjam (>=2.8.0)", "gssapi"]
+gssapi = ["gssapi"]
+lz4 = ["cramjam (>=2.8.0)"]
+snappy = ["cramjam"]
+zstd = ["cramjam"]
+
[[package]]
name = "annotated-types"
version = "0.7.0"
@@ -64,6 +116,17 @@ doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphin
test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"]
trio = ["trio (>=0.23)"]
+[[package]]
+name = "async-timeout"
+version = "4.0.3"
+description = "Timeout context manager for asyncio programs"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "async-timeout-4.0.3.tar.gz", hash = "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f"},
+ {file = "async_timeout-4.0.3-py3-none-any.whl", hash = "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028"},
+]
+
[[package]]
name = "beautifulsoup4"
version = "4.12.3"
@@ -4307,4 +4370,4 @@ test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools",
[metadata]
lock-version = "2.0"
python-versions = "^3.9"
-content-hash = "c84fc9b6664a4af5e8252628955097c476976b14019aa8c155169851761c4bf3"
+content-hash = "70c38253f5dc9e36e1391566f1502005111a2ea7a3adbf13d951a3a13aabcc36"
diff --git a/pyproject.toml b/pyproject.toml
index 98c362e..69c0878 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -46,6 +46,7 @@ en-core-web-sm = {url = "https://github.com/explosion/spacy-models/releases/down
en-core-web-lg = {url = "https://github.com/explosion/spacy-models/releases/download/en_core_web_lg-3.8.0/en_core_web_lg-3.8.0-py3-none-any.whl"}
en-core-web-md = {url = "https://github.com/explosion/spacy-models/releases/download/en_core_web_md-3.8.0/en_core_web_md-3.8.0-py3-none-any.whl"}
en-core-web-trf = {url = "https://github.com/explosion/spacy-models/releases/download/en_core_web_trf-3.8.0/en_core_web_trf-3.8.0-py3-none-any.whl"}
+aiokafka = "^0.11.0"
[tool.poetry.group.dev.dependencies]
pytest = "^7.4.3"
pytest-asyncio = "^0.23.2"
diff --git a/topos/api/api.py b/topos/api/api.py
index d4a935d..5362b97 100644
--- a/topos/api/api.py
+++ b/topos/api/api.py
@@ -1,13 +1,56 @@
-from fastapi import FastAPI
+from fastapi import FastAPI, WebSocket, WebSocketDisconnect
+from fastapi.middleware.cors import CORSMiddleware
+from fastapi.concurrency import asynccontextmanager
+import asyncio
from ..config import setup_config, get_ssl_certificates
from .websocket_handlers import router as websocket_router
from .api_routes import router as api_router
from .p2p_chat_routes import router as p2p_chat_router
from .debate_routes import router as debate_router
import uvicorn
+from topos.services.messages.kafka_manager import KafkaManager
+
+
+# Kafka configuration
+KAFKA_BOOTSTRAP_SERVERS = 'localhost:9092'
+KAFKA_TOPIC = 'chat_topic'
+
+kafka_manager = KafkaManager(KAFKA_BOOTSTRAP_SERVERS, KAFKA_TOPIC)
# Create the FastAPI application instance
-app = FastAPI()
+@asynccontextmanager
+async def lifespan(app: FastAPI):
+ await kafka_manager.start()
+ consume_task = asyncio.create_task(kafka_manager.consume_messages(broadcast_message))
+ yield
+ consume_task.cancel()
+ await kafka_manager.stop()
+
+app = FastAPI(lifespan=lifespan)
+
+app.add_middleware(
+ CORSMiddleware,
+ allow_origins=["*"],
+ allow_credentials=True,
+ allow_methods=["*"],
+ allow_headers=["*"],
+)
+
+async def broadcast_message(message, group_id):
+ # Implement the logic to broadcast the message to all connected WebSocket clients in the group
+ pass
+
+@app.websocket("/ws")
+async def websocket_endpoint(websocket: WebSocket):
+ await websocket.accept()
+ try:
+ while True:
+ data = await websocket.receive_text()
+ # Process the received message and send it to Kafka
+ await kafka_manager.send_message(group_id, data)
+ except WebSocketDisconnect:
+ # Handle disconnection
+ pass
# Configure the application using settings from config.py
setup_config(app)
@@ -39,8 +82,8 @@ def start_web_api():
"""Function to start the API in web mode with SSL."""
certs = get_ssl_certificates()
uvicorn.run(app, host="0.0.0.0", port=13341, ssl_keyfile=certs['key_path'], ssl_certfile=certs['cert_path'])
-
+
def start_hosted_service():
"""Function to start the API in web mode with SSL."""
- uvicorn.run(app, host="0.0.0.0", port=8000)
\ No newline at end of file
+ uvicorn.run(app, host="0.0.0.0", port=8000)
diff --git a/topos/api/api_routes.py b/topos/api/api_routes.py
index 34a1c64..22ae18c 100644
--- a/topos/api/api_routes.py
+++ b/topos/api/api_routes.py
@@ -1,22 +1,20 @@
# api_routes.py
-
import os
-from fastapi import APIRouter, HTTPException, Request
+from fastapi import APIRouter, HTTPException, Request, Depends
from fastapi.responses import JSONResponse
import requests
import signal
import glob
import sys
+from pydantic import BaseModel
from topos.FC.conversation_cache_manager import ConversationCacheManager
-router = APIRouter()
-
+from topos.services.messages.group_management_service import GroupManagementService
+from topos.services.messages.missed_message_service import MissedMessageService
from collections import Counter, OrderedDict, defaultdict
from pydantic import BaseModel
-
from ..generations.chat_gens import LLMController
from ..utilities.utils import create_conversation_string
from ..services.ontology_service.mermaid_chart import MermaidCreator
-
import logging
db_config = {
@@ -27,6 +25,19 @@
"port": os.getenv("POSTGRES_PORT")
}
+router = APIRouter()
+
+class MissedMessagesRequest(BaseModel):
+ user_id: str
+
+class CreateGroupRequest(BaseModel):
+ group_name: str
+ user_id: str
+
+class JoinGroupRequest(BaseModel):
+ group_id: str
+ user_id: str
+
logging.info(f"Database configuration: {db_config}")
use_postgres = True
@@ -458,3 +469,25 @@ async def generate_mermaid_chart(payload: MermaidChartPayload):
except Exception as e:
return {"status": "error", "message": str(e)}
+
+@router.post("/chat/missed-messages")
+async def get_missed_messages(request: MissedMessagesRequest):
+ missed_message_service = MissedMessageService(db_config)
+ group_management_service = GroupManagementService(db_config)
+ return await missed_message_service.get_missed_messages(user_id=request.user_id, group_management_service=group_management_service)
+
+@router.post("/chat/create-group")
+async def create_group(request: CreateGroupRequest):
+ group_management_service = GroupManagementService(db_config)
+ group_id = group_management_service.create_group(request.group_name)
+ group_management_service.add_user_to_group(request.user_id, group_id)
+ return {"group_id": group_id}
+
+@router.post("/chat/join-group")
+async def join_group(request: JoinGroupRequest):
+ group_management_service = GroupManagementService(db_config)
+ if group_management_service.get_group_by_id(request.group_id):
+ group_management_service.add_user_to_group(request.user_id, request.group_id)
+ return {"status": "success"}
+ else:
+ raise HTTPException(status_code=404, detail="Group not found")
diff --git a/topos/services/messages/kafka_manager.py b/topos/services/messages/kafka_manager.py
new file mode 100644
index 0000000..1ecc75f
--- /dev/null
+++ b/topos/services/messages/kafka_manager.py
@@ -0,0 +1,32 @@
+from aiokafka import AIOKafkaProducer, AIOKafkaConsumer
+import json
+import asyncio
+
+class KafkaManager:
+ def __init__(self, bootstrap_servers, topic):
+ self.bootstrap_servers = bootstrap_servers
+ self.topic = topic
+ self.producer = None
+ self.consumer = None
+
+ async def start(self):
+ self.producer = AIOKafkaProducer(bootstrap_servers=self.bootstrap_servers)
+ self.consumer = AIOKafkaConsumer(
+ self.topic,
+ bootstrap_servers=self.bootstrap_servers,
+ )
+ await self.producer.start()
+ await self.consumer.start()
+
+ async def stop(self):
+ await self.producer.stop()
+ await self.consumer.stop()
+
+ async def send_message(self, key, value):
+ await self.producer.send_and_wait(self.topic, key=key.encode('utf-8'), value=json.dumps(value).encode('utf-8'))
+
+ async def consume_messages(self, callback):
+ async for msg in self.consumer:
+ message = json.loads(msg.value.decode('utf-8'))
+ group_id = msg.key.decode('utf-8')
+ await callback(message, group_id)