diff --git a/.github/workflows/app-deploy.yaml b/.github/workflows/app-deploy.yaml index 67fb2232..78d5ce1d 100644 --- a/.github/workflows/app-deploy.yaml +++ b/.github/workflows/app-deploy.yaml @@ -1,4 +1,4 @@ -name: Release +name: Release image on: workflow_dispatch: @@ -55,40 +55,3 @@ jobs: docker push ghcr.io/hyperleda/hyperleda:$GIT_VERSION docker tag ghcr.io/hyperleda/hyperleda:$GIT_VERSION ghcr.io/hyperleda/hyperleda:latest docker push ghcr.io/hyperleda/hyperleda:latest - - deploy-test: - name: Deploy to test environment - needs: push-docker - environment: testing - runs-on: ubuntu-latest - env: - HOST: ${{ secrets.BACKEND_HOST }} - BACKEND_USER: ${{ secrets.BACKEND_USER }} - steps: - - name: Checkout code - uses: actions/checkout@v4 - - - name: Install uv - uses: astral-sh/setup-uv@v5 - - - name: Install the project - run: uv sync --all-extras --dev - - - name: Setup SSH agent - uses: webfactory/ssh-agent@v0.9.0 - with: - ssh-private-key: ${{ secrets.SSH_PRIVATE_KEY }} - - - name: Write private key and config - run: | - mkdir infra/settings - echo "${{ secrets.TEST_DEPLOY_CONFIG }}" >> infra/settings/test.yaml - echo "${{ secrets.SSH_PRIVATE_KEY }}" >> infra/hyperleda_rsa - - - name: Add backend to known hosts - run: | - ssh-keyscan -H ${{ secrets.BACKEND_HOST }} >> ~/.ssh/known_hosts - - - name: Copy files and restart the backend - run: | - make deploy-test diff --git a/infra/.env b/infra/.env deleted file mode 100644 index 80834719..00000000 --- a/infra/.env +++ /dev/null @@ -1 +0,0 @@ -HOST=127.0.0.1 diff --git a/infra/.gitignore b/infra/.gitignore deleted file mode 100644 index fe5dd4a2..00000000 --- a/infra/.gitignore +++ /dev/null @@ -1 +0,0 @@ -settings \ No newline at end of file diff --git a/infra/configs/nginx/nginx.conf b/infra/configs/nginx/nginx.conf deleted file mode 100644 index 09a836cb..00000000 --- a/infra/configs/nginx/nginx.conf +++ /dev/null @@ -1,32 +0,0 @@ -server { - listen 80 default_server; - server_name _; - - access_log /var/log/nginx/access.log; - error_log /var/log/nginx/error.log; - - location /ping { - return 200 'pong'; - } - - location /admin/api/ { - proxy_pass http://adminapi:8080/admin/api/; - proxy_set_header Host $host; - proxy_set_header X-Real-IP $remote_addr; - proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; - } - - location /api/ { - proxy_pass http://dataapi:8081/api/; - proxy_set_header Host $host; - proxy_set_header X-Real-IP $remote_addr; - proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; - } - - location / { - proxy_pass http://webapp:3000; - proxy_set_header Host $host; - proxy_set_header X-Real-IP $remote_addr; - proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; - } -} \ No newline at end of file diff --git a/infra/deploy.py b/infra/deploy.py deleted file mode 100644 index 3e509dd4..00000000 --- a/infra/deploy.py +++ /dev/null @@ -1,86 +0,0 @@ -import os -import pathlib -import subprocess -import sys -from dataclasses import dataclass - -import deployment -import structlog - - -def get_git_version() -> str: - result = subprocess.run(["git", "rev-parse", "--short", "HEAD"], capture_output=True, text=True, check=True) - return result.stdout.strip() - - -@dataclass -class EnvParams: - connection: deployment.ConnectionContext - remote_base_path: str - configs_env: str - ads_token: str = "" - postgres_password: str = "" - - ads_token_env = "CLIENTS_ADS_TOKEN" - postgres_password_env = "POSTGRES_PASSWORD" - - @classmethod - def from_yaml(cls, path: str) -> "EnvParams": - import yaml - - with pathlib.Path(path).open("r") as f: - data = yaml.safe_load(f) - - data["connection"] = deployment.ConnectionContext(**data["connection"]) - - params = cls(**data) - - params.ads_token = params.ads_token or os.getenv(params.ads_token_env) or "" - params.postgres_password = params.postgres_password or os.getenv(params.postgres_password_env) or "" - - return params - - -def get_spec(params: EnvParams) -> deployment.RemoteSpec: - return deployment.RemoteSpec( - [ - deployment.RemoteFile( - "infra/docker-compose.yaml", - "docker-compose.yaml", - ), - deployment.RemoteContent( - get_git_version(), - "version.txt", - ), - deployment.RemoteDirectory( - "postgres/migrations", - "postgres/migrations", - ), - deployment.RemoteDirectory( - "infra/configs/nginx", - "configs", - ), - deployment.RemoteDirectory( - pathlib.Path("configs") / params.configs_env, - "configs", - ), - ], - root_dir=pathlib.Path(params.remote_base_path), - env_vars={ - "CLIENTS_ADS_TOKEN": params.ads_token, - "POSTGRES_PASSWORD": params.postgres_password, - }, - ) - - -if __name__ == "__main__": - logger = structlog.get_logger() - - config = sys.argv[1] - params = EnvParams.from_yaml(config) - - spec = get_spec(params) - print(spec) - - spec.apply(params.connection, logger) - spec.reload() diff --git a/infra/deployment.py b/infra/deployment.py deleted file mode 100644 index 8e07bfee..00000000 --- a/infra/deployment.py +++ /dev/null @@ -1,187 +0,0 @@ -import io -import os -import pathlib -from dataclasses import dataclass, field -from typing import IO - -import structlog -from fabric import Connection - - -@dataclass -class RemoteFile: - local_path: pathlib.Path | str - remote_path: pathlib.Path | str - - def __str__(self) -> str: - return f"{self.local_path} --> {self.remote_path}" - - -@dataclass -class RemoteContent: - content: str - remote_path: pathlib.Path | str - - def __str__(self) -> str: - return f'"{self.content}" --> {self.remote_path}' - - -@dataclass -class RemoteDirectory: - local_path: pathlib.Path | str - remote_path: pathlib.Path | str - - def _get_filenames(self) -> list[str]: - filenames: list[str] = [] - - for _, _, fnames in os.walk(self.local_path): - filenames.extend(fnames) - - filenames.sort() - - return filenames - - def __str__(self) -> str: - filenames = self._get_filenames() - if len(filenames) == 0: - return "" - - lines = [] - lines.append(f"{self.local_path} --> {self.remote_path}") - - for filename in filenames: - lines.append(f"\t/{filename}") - - return "\n".join(lines) - - def to_files(self) -> list[RemoteFile]: - files: list[RemoteFile] = [] - - local_base = pathlib.Path(self.local_path) - remote_base = pathlib.Path(self.remote_path) - filenames = self._get_filenames() - - for filename in filenames: - local_file = local_base / filename - remote_file = remote_base / filename - files.append(RemoteFile(local_file, remote_file)) - - return files - - -RemoteData = RemoteFile | RemoteContent | RemoteDirectory - - -@dataclass -class ConnectionContext: - host: str - user: str - private_key_filename: str - - -def _run_command( - logger: structlog.stdlib.BoundLogger, - connection: Connection, - cmd: str, - params: dict[str, str] | None = None, -): - logger.debug("Running command", cmd=cmd) - - if params: - cmd = cmd.format(**params) - - connection.run(cmd) - - -def _apply_item( - logger: structlog.stdlib.BoundLogger, - connection: Connection, - path_on_remote: str | pathlib.Path, - file_like: IO | str, -): - remote_path = pathlib.Path(path_on_remote) - - if isinstance(file_like, str): - logger.debug("Copying file", src=str(file_like), dst=str(remote_path)) - else: - logger.debug("Writing file", dst=str(remote_path)) - - connection.put(file_like, str(remote_path)) - - -@dataclass -class RemoteSpec: - data: list[RemoteData] - root_dir: pathlib.Path - env_vars: dict[str, str] = field(default_factory=dict) - - def add(self, data: RemoteData | list[RemoteData]): - if isinstance(data, list): - self.data.extend(data) - return - - self.data.append(data) - - def __str__(self) -> str: - lines = [ - "------- Params -------", - f"Root directory: {self.root_dir}", - "", - "------- Environment variables -------", - ] - - lines.extend(self.env_vars.keys()) - - lines.extend( - [ - "", - "------- Directory structure -------", - ] - ) - - for entry in self.data: - lines.append(str(entry)) - - return "\n".join(lines) - - def apply(self, ctx: ConnectionContext, logger: structlog.stdlib.BoundLogger): - self.connection = Connection( - host=ctx.host, - user=ctx.user, - connect_kwargs={"key_filename": ctx.private_key_filename}, - ) - self.logger = logger - - for item in self.data: - if isinstance(item, RemoteFile): - path = self.root_dir / item.remote_path - _run_command(self.logger, self.connection, f"mkdir -p {str(path.parent)}") - - _apply_item(self.logger, self.connection, path, str(item.local_path)) - elif isinstance(item, RemoteContent): - path = self.root_dir / item.remote_path - _run_command(self.logger, self.connection, f"mkdir -p {str(path.parent)}") - - _apply_item(self.logger, self.connection, path, io.StringIO(item.content)) - elif isinstance(item, RemoteDirectory): - _run_command(self.logger, self.connection, f"mkdir -p {str(self.root_dir / item.remote_path)}") - files = item.to_files() - - for file in files: - _apply_item(self.logger, self.connection, self.root_dir / file.remote_path, str(file.local_path)) - - _run_command(self.logger, self.connection, f"cd {self.root_dir} && docker compose pull") - - def reload(self): - env_strings = [] - params = {} - - for key, value in self.env_vars.items(): - env_strings.append(f"{key}=" + "{" + key + "}") - params[key] = value - - env_string = " ".join(env_strings) - - _run_command( - self.logger, self.connection, f"cd {self.root_dir} && {env_string} docker compose up -d", params=params - ) diff --git a/infra/docker-compose.yaml b/infra/docker-compose.yaml deleted file mode 100644 index ed1f331f..00000000 --- a/infra/docker-compose.yaml +++ /dev/null @@ -1,111 +0,0 @@ -services: - hyperledadb: - image: postgis/postgis:17-3.5 - environment: - POSTGRES_DB: "hyperleda" - POSTGRES_USER: "hyperleda" - POSTGRES_PASSWORD: ${POSTGRES_PASSWORD:-password} - ports: - - "5432:5432" - volumes: - - ./postgres/data:/var/lib/postgresql/data - restart: unless-stopped - healthcheck: - test: psql 'host=localhost port=5432 dbname=hyperleda user=hyperleda password=$${POSTGRES_PASSWORD}' -qtA -c 'select 1;' || exit 1 - timeout: 5s - interval: 5s - retries: 5 - - migrate: - build: - dockerfile: postgres/dockerfile - environment: - POSTGRES_PASSWORD: ${POSTGRES_PASSWORD:-password} - entrypoint: /bin/sh -c "cd database/postgres && pgmigrate -c \"postgresql://hyperledadb:5432/hyperleda?user=hyperleda&password=$${POSTGRES_PASSWORD}\" -t latest migrate" - volumes: - - .:/database - depends_on: - hyperledadb: - condition: service_healthy - - wait-for-migrate: - image: busybox - depends_on: - migrate: - condition: service_completed_successfully - entrypoint: /bin/sh -c "echo 'Migration completed successfully'" - - adminapi: - image: ghcr.io/hyperleda/hyperleda:latest - entrypoint: ["uv", "run", "main.py", "adminapi"] - environment: - - CONFIG=configs/adminapi.yaml - - CLIENTS_ADS_TOKEN - - STORAGE_PASSWORD=${POSTGRES_PASSWORD:-password} - volumes: - - ./configs:/usr/src/app/configs - restart: unless-stopped - healthcheck: - test: ["CMD", "curl", "-f", "http://localhost:8080/ping"] - interval: 5s - timeout: 5s - retries: 5 - depends_on: - - wait-for-migrate - logging: - driver: local - - dataapi: - image: ghcr.io/hyperleda/hyperleda:latest - entrypoint: ["uv", "run", "main.py", "dataapi"] - environment: - - CONFIG=configs/dataapi.yaml - - SERVER_PORT=8081 - - STORAGE_PASSWORD=${POSTGRES_PASSWORD:-password} - volumes: - - ./configs:/usr/src/app/configs - restart: unless-stopped - healthcheck: - test: ["CMD", "curl", "-f", "http://localhost:8081/ping"] - interval: 5s - timeout: 5s - retries: 5 - depends_on: - - wait-for-migrate - logging: - driver: local - - webapp: - image: ghcr.io/hyperleda/hyperleda-webapp:latest - depends_on: - dataapi: - condition: service_healthy - healthcheck: - test: ["CMD", "curl", "-f", "-LI", "http://localhost:3000/"] - interval: 5s - timeout: 5s - retries: 5 - restart: unless-stopped - - nginx: - image: nginx:latest - ports: - - "81:80" - volumes: - - ./configs/nginx.conf:/etc/nginx/conf.d/default.conf - - ./logs:/var/log/nginx - depends_on: - adminapi: - condition: service_healthy - dataapi: - condition: service_healthy - webapp: - condition: service_healthy - restart: unless-stopped - healthcheck: - test: ["CMD", "curl", "-f", "http://localhost:80/ping"] - interval: 5s - timeout: 5s - retries: 5 - logging: - driver: local diff --git a/infra/makefile b/infra/makefile deleted file mode 100644 index 4d7d1cd3..00000000 --- a/infra/makefile +++ /dev/null @@ -1,5 +0,0 @@ -copy-files: - set -a && . .env.local && set +a && uv run scripts.py copy-files - -deploy: - set -a && . .env.local && set +a && uv run scripts.py deploy diff --git a/makefile b/makefile index 2a4f275f..3c451619 100644 --- a/makefile +++ b/makefile @@ -102,7 +102,7 @@ fix: fix-unsafe: uvx ruff check --config=pyproject.toml --unsafe-fixes --fix -## Deploy +## Release GIT_VERSION = `git rev-parse --short master` @@ -113,9 +113,3 @@ image-build: image-push: docker push ghcr.io/hyperleda/hyperleda:$(GIT_VERSION) docker push ghcr.io/hyperleda/hyperleda:latest - -deploy-test: - uv run infra/deploy.py infra/settings/test.yaml - -deploy-prod: - uv run infra/deploy.py infra/settings/prod.yaml diff --git a/pyproject.toml b/pyproject.toml index 8f3aa7e2..afe6c5e9 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -2,14 +2,14 @@ name = "hyperleda-backend" version = "0.1.0" readme = "README.md" -requires-python = ">=3.13" +requires-python = "~=3.13.0" dependencies = [ - "astropy>=7.0.0", - "astroquery>=0.4.9.post1", + "astropy>=7.1.0", + "astroquery>=0.4.10", "bcrypt>=4.2.1", "click>=8.1.8", "jinja2>=3.1.5", - "pandas>=2.2.3", + "pandas>=2.3.2", "psycopg[binary,pool]>=3.2.4", "pyyaml>=6.0.2", "requests>=2.32.3", @@ -28,7 +28,7 @@ dependencies = [ [project.optional-dependencies] test = [ - "pandas-stubs>=2.2.3.241126", + "pandas-stubs>=2.3.2.250827", "parameterized>=0.9.0", "pytest>=8.3.4", "testcontainers-postgres>=0.0.1rc1",