diff --git a/.github/workflows/build-load-test-image.yml b/.github/workflows/build-load-test-image.yml new file mode 100644 index 0000000..e1ce375 --- /dev/null +++ b/.github/workflows/build-load-test-image.yml @@ -0,0 +1,68 @@ +name: Build Load Test Image + +on: + push: + branches: + - main + - develop + paths: + - "apps/load-tests/**" + - "package.json" + - "yarn.lock" + pull_request: + branches: + - main + - develop + paths: + - "apps/load-tests/**" + - "package.json" + - "yarn.lock" + +jobs: + build: + name: Build Docker Image + runs-on: ubuntu-latest + permissions: + contents: read + packages: write + + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + - name: Log in to GitHub Container Registry + uses: docker/login-action@v3 + with: + registry: ghcr.io + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Extract metadata + id: meta + uses: docker/metadata-action@v5 + with: + images: ghcr.io/${{ github.repository }}/load-test + tags: | + type=ref,event=branch + type=ref,event=pr + type=semver,pattern={{version}} + type=semver,pattern={{major}}.{{minor}} + type=sha,prefix={{branch}}- + type=raw,value=latest,enable={{is_default_branch}} + + - name: Build and push Docker image + uses: docker/build-push-action@v5 + with: + context: . + file: ./apps/load-tests/Dockerfile + push: ${{ github.event_name != 'pull_request' }} + tags: ${{ steps.meta.outputs.tags }} + labels: ${{ steps.meta.outputs.labels }} + cache-from: type=gha + cache-to: type=gha,mode=max + + - name: Image digest + run: echo "Image built: ${{ steps.meta.outputs.tags }}" diff --git a/.github/workflows/load-test.yml b/.github/workflows/load-test.yml new file mode 100644 index 0000000..575193f --- /dev/null +++ b/.github/workflows/load-test.yml @@ -0,0 +1,112 @@ +name: Load Test + +on: + workflow_dispatch: + inputs: + environment: + description: "Target environment" + required: true + type: choice + options: + - dev + - uat + - prod + scenario: + description: "Test scenario" + required: true + type: choice + options: + - connection-storm + - steady-state + connections: + description: "Number of connections" + required: false + type: string + default: "100" + duration: + description: "Test duration in seconds (for steady-state)" + required: false + type: string + default: "60" + rampUp: + description: "Ramp-up time in seconds" + required: false + type: string + default: "10" + +jobs: + load-test: + name: Run Load Test + runs-on: ubuntu-latest + + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Set up Node.js + uses: actions/setup-node@v4 + with: + node-version: "20" + + - name: Install dependencies + run: yarn install --frozen-lockfile + + - name: Validate inputs + run: | + echo "Environment: ${{ github.event.inputs.environment }}" + echo "Scenario: ${{ github.event.inputs.scenario }}" + echo "Connections: ${{ github.event.inputs.connections }}" + echo "Duration: ${{ github.event.inputs.duration }}" + echo "Ramp-up: ${{ github.event.inputs.rampUp }}" + + - name: Require manual approval for production + if: github.event.inputs.environment == 'prod' + uses: trstringer/manual-approval@v1 + with: + secret: ${{ github.TOKEN }} + approvers: "chakra-guy,adonesky1,jiexi,ffmcgee725,wenfix" + minimum-approvals: 1 + issue-title: "Approve production load test" + issue-body: | + Production load test requested: + - Environment: ${{ github.event.inputs.environment }} + - Scenario: ${{ github.event.inputs.scenario }} + - Connections: ${{ github.event.inputs.connections }} + - Duration: ${{ github.event.inputs.duration }}s + - Ramp-up: ${{ github.event.inputs.rampUp }}s + + **⚠️ WARNING: This will run load tests against PRODUCTION** + + Please review and approve if this is intentional. + + - name: Run load test + env: + RELAY_URL_DEV: ${{ secrets.RELAY_URL_DEV }} + RELAY_URL_UAT: ${{ secrets.RELAY_URL_UAT }} + RELAY_URL_PROD: ${{ secrets.RELAY_URL_PROD }} + run: | + cd apps/load-tests + yarn start \ + --environment "${{ github.event.inputs.environment }}" \ + --scenario "${{ github.event.inputs.scenario }}" \ + --connections "${{ github.event.inputs.connections }}" \ + --duration "${{ github.event.inputs.duration }}" \ + --ramp-up "${{ github.event.inputs.rampUp }}" \ + --output "results/load-test-$(date +%Y%m%d-%H%M%S).json" + + - name: Upload results + if: always() + uses: actions/upload-artifact@v4 + with: + name: load-test-results-${{ github.event.inputs.environment }}-${{ github.run_number }} + path: apps/load-tests/results/*.json + retention-days: 30 + + - name: Display results summary + if: always() + run: | + echo "## Load Test Results" >> $GITHUB_STEP_SUMMARY + echo "" >> $GITHUB_STEP_SUMMARY + echo "- Environment: ${{ github.event.inputs.environment }}" >> $GITHUB_STEP_SUMMARY + echo "- Scenario: ${{ github.event.inputs.scenario }}" >> $GITHUB_STEP_SUMMARY + echo "- Results uploaded as artifact" >> $GITHUB_STEP_SUMMARY diff --git a/apps/load-tests/.dockerignore b/apps/load-tests/.dockerignore new file mode 100644 index 0000000..1c69bf7 --- /dev/null +++ b/apps/load-tests/.dockerignore @@ -0,0 +1,44 @@ +# Dependencies +node_modules/ +.yarn/ + +# Build outputs +dist/ +*.tsbuildinfo + +# Test results +results/ +*.json +!package.json +!tsconfig.json + +# Git +.git/ +.gitignore + +# IDE +.vscode/ +.idea/ +*.swp +*.swo + +# Environment files +.env +.env.local +.env.*.local + +# Documentation +README.md +*.md + +# CI/CD +.github/ + +# Infrastructure state +.infrastructure/ + +# Logs +*.log +npm-debug.log* +yarn-debug.log* +yarn-error.log* diff --git a/apps/load-tests/.env.example b/apps/load-tests/.env.example new file mode 100644 index 0000000..8b54dd7 --- /dev/null +++ b/apps/load-tests/.env.example @@ -0,0 +1,20 @@ +# Load Test Runner Configuration +# ================================ + +# Target relay server URL (required for running tests) +# RELAY_URL=ws://localhost:8000/connection/websocket +# RELAY_URL=wss://mm-sdk-relay.api.cx.metamask.io/connection/websocket + +# DigitalOcean Infrastructure Configuration +# ========================================== + +# DigitalOcean API token (required for infra commands) +# Get this from: https://cloud.digitalocean.com/account/api/tokens +DIGITALOCEAN_TOKEN= + +# SSH key fingerprint registered with DigitalOcean (required for infra commands) +# Find this in: https://cloud.digitalocean.com/account/security +SSH_KEY_FINGERPRINT= + +# Path to SSH private key (optional, defaults to ~/.ssh/id_rsa) +# SSH_PRIVATE_KEY_PATH=~/.ssh/id_rsa diff --git a/apps/load-tests/.gitignore b/apps/load-tests/.gitignore new file mode 100644 index 0000000..aaae9ef --- /dev/null +++ b/apps/load-tests/.gitignore @@ -0,0 +1,11 @@ +# Environment files (except example) +.env +.env.local + +# Results (except .gitkeep) +results/* +!results/.gitkeep + +# Infrastructure state +results/.infra-state.json + diff --git a/apps/load-tests/DEVOPS_QUESTIONS.md b/apps/load-tests/DEVOPS_QUESTIONS.md new file mode 100644 index 0000000..86bcf5c --- /dev/null +++ b/apps/load-tests/DEVOPS_QUESTIONS.md @@ -0,0 +1,413 @@ +# DevOps Open Questions: Load Testing Infrastructure + +This document outlines open questions and required changes for setting up AWS-based load testing infrastructure. + +## Current State + +The load testing system is fully functional locally and ready for AWS integration. All code includes abstraction layers that allow swapping AWS implementations without changing core logic. + +**What works now:** + +- ✅ Environment configuration (env vars + config file) +- ✅ Result uploader abstraction (local file system) +- ✅ Metadata collection (runner type, git SHA, container ID) +- ✅ Docker containerization +- ✅ GitHub Actions workflow (runs directly, not via AWS yet) + +## Open Questions + +### 1. Container Registry + +**Question:** Which container registry should we use? + +**Current:** GitHub Container Registry (GHCR) - `ghcr.io/${{ github.repository }}/load-test` + +**Options:** + +- **GHCR** (current): Already configured, works with GitHub Actions + - Pros: No additional setup, integrated with GitHub + - Cons: External to AWS, potential egress costs +- **AWS ECR**: Native AWS container registry + - Pros: AWS-native, better integration with ECS/EC2, potentially cheaper + - Cons: Requires AWS setup, GitHub Actions needs AWS credentials +- **Hybrid**: Build in GitHub Actions, push to ECR + - Pros: CI validation in GitHub, storage in AWS + - Cons: More complex setup + +**Recommendation:** ECR for AWS-native integration, but GHCR is fine if simpler. + +**Required Changes:** + +- If ECR: Update `.github/workflows/build-load-test-image.yml` to push to ECR instead of GHCR +- Add AWS credentials to GitHub Actions secrets +- Update image pull location in AWS infrastructure + +--- + +### 2. Infrastructure Choice + +**Question:** Where should load generators run? + +**Options:** + +- **ECS Fargate** (recommended): Serverless containers, auto-scaling, pay-per-use + - Pros: No instance management, scales automatically, cost-effective + - Cons: Cold start latency, 15-minute timeout limit +- **EC2 Spot Instances**: Cost-effective, requires instance management + - Pros: Very cheap, full control + - Cons: Can be interrupted, requires instance management +- **EC2 On-Demand**: Standard instances + - Pros: Reliable, full control + - Cons: More expensive, requires instance management +- **Lambda**: Serverless functions + - Pros: Fully serverless, very cheap + - Cons: 15-minute timeout limit, may not suit long-running tests + +**Recommendation:** ECS Fargate for simplicity and cost-effectiveness. + +**Required Changes:** + +- Create ECS cluster: `load-test-cluster` +- Create task definition template +- Configure Fargate launch type (CPU/memory requirements TBD) +- Set up CloudWatch log group: `/ecs/load-test` + +--- + +### 3. AWS Account Structure + +**Question:** Which AWS account should host this infrastructure? + +**Options:** + +- Same account as production relay servers +- Separate account for load testing +- Separate account per environment (dev/UAT/prod) + +**Recommendation:** Separate account for load testing to isolate costs and permissions. + +**Required Changes:** + +- Determine account structure +- Set up cross-account access if needed +- Configure IAM roles and permissions + +--- + +### 4. Network Configuration + +**Question:** What network configuration is needed? + +**Open Questions:** + +- Do ECS tasks need VPC configuration? +- Public vs private subnets? +- Security groups and network ACLs? +- How do tasks access the relay servers (public internet vs VPC peering)? + +**Required Changes:** + +- Configure VPC (if needed) +- Set up security groups +- Configure network access to relay servers + +--- + +### 5. Secrets Management + +**Question:** How should environment URLs be stored? + +**Current:** GitHub Secrets (`RELAY_URL_DEV`, `RELAY_URL_UAT`, `RELAY_URL_PROD`) + +**Proposed:** AWS Secrets Manager + +**Required Setup:** + +- Create secrets: + - `load-test/dev/relay-url` + - `load-test/uat/relay-url` + - `load-test/prod/relay-url` +- IAM role for ECS tasks with Secrets Manager read permissions +- Update code to read from Secrets Manager (abstraction layer ready) + +**Code Changes Needed:** + +- Implement `AwsSecretsManagerConfigProvider` in `apps/load-tests/src/config/environments.ts` +- Add AWS SDK dependency: `@aws-sdk/client-secrets-manager` + +--- + +### 6. Result Storage + +**Question:** Where should test results be stored? + +**Current:** Local filesystem, GitHub Actions artifacts + +**Proposed:** S3 bucket + +**Required Setup:** + +- Create S3 bucket: `mobile-wallet-protocol-load-test-results` (or similar) +- Structure: `{environment}/{scenario}/{timestamp}/{task-id}/results.json` +- Enable versioning and lifecycle policies +- Set up bucket policies for ECS task access + +**Code Changes Needed:** + +- Implement `S3Uploader` in `apps/load-tests/src/output/uploader.ts` +- Add AWS SDK dependency: `@aws-sdk/client-s3` +- Update uploader selection logic + +**IAM Permissions Required:** + +- `s3:PutObject` on results bucket +- `s3:PutObjectAcl` (if needed) + +--- + +### 7. Monitoring and Logging + +**Question:** What monitoring and alerting is needed? + +**Required Setup:** + +- CloudWatch log group: `/ecs/load-test` +- CloudWatch alarms for: + - Failed test runs + - Unusual test durations + - Production test runs (for audit) +- SNS notifications for critical failures + +**Code Changes Needed:** + +- Ensure logs are sent to CloudWatch (automatic with ECS) +- Add CloudWatch metrics (optional, for advanced monitoring) + +--- + +### 8. Cost Management + +**Question:** How should costs be managed? + +**Required Setup:** + +- Budget alerts +- Cost allocation tags: + - `Environment: dev/uat/prod` + - `Project: load-testing` + - `Team: ` +- Cost monitoring dashboard + +--- + +### 9. Production Safety + +**Question:** How do we prevent accidental production runs? + +**Current:** Manual approval in GitHub Actions workflow + +**Additional Safeguards Needed:** + +- IAM policy preventing production runs without approval +- Separate AWS account for production (if using separate accounts) +- CloudWatch alarm for all production runs +- Audit logging + +**Required Changes:** + +- Update IAM policies +- Configure CloudWatch alarms +- Set up audit trail + +--- + +### 10. GitHub Actions Integration + +**Question:** How should GitHub Actions trigger AWS infrastructure? + +**Current:** Runs directly in GitHub Actions runner + +**Options:** + +- **Option A**: GitHub Actions triggers AWS CodePipeline/CodeBuild + - Pros: AWS-native, better integration + - Cons: More complex setup +- **Option B**: GitHub Actions uses AWS SDK to trigger ECS tasks + - Pros: Direct control, simpler + - Cons: Requires AWS credentials in GitHub +- **Option C**: Keep current approach, build image in AWS instead + - Pros: Minimal changes + - Cons: Still runs in GitHub Actions, not AWS + +**Recommendation:** Option B - GitHub Actions triggers ECS tasks directly. + +**Required Changes:** + +- Update `.github/workflows/load-test.yml` to: + - Use AWS SDK or `aws-actions/amazon-ecs-run-task` + - Pass environment variables to ECS task + - Wait for task completion + - Download results from S3 +- Add AWS credentials to GitHub Actions secrets +- Configure OIDC provider (if using OIDC instead of access keys) + +--- + +### 11. IAM Roles and Permissions + +**Required IAM Roles:** + +1. **ECS Task Role** (for load test tasks): + - Read from Secrets Manager + - Write to S3 bucket + - Write to CloudWatch Logs + - Minimum permissions principle + +2. **GitHub Actions Role** (if using AWS SDK): + - Run ECS tasks + - Read task status + - Read from S3 (for results) + - OIDC trust relationship (if using OIDC) + +**Required Setup:** + +- Create IAM roles with appropriate policies +- Configure trust relationships +- Set up OIDC provider (if using OIDC) + +--- + +### 12. Container Image Build Location + +**Question:** Where should Docker images be built? + +**Current:** GitHub Actions builds and pushes to GHCR + +**Options:** + +- **Keep in GitHub Actions**: Build in CI, push to registry + - Pros: CI validation, versioned images + - Cons: External to AWS +- **Build in AWS**: Use CodeBuild or ECR build service + - Pros: AWS-native, simpler permissions + - Cons: Less visibility in GitHub + +**Recommendation:** Build in GitHub Actions for CI validation, but this is optional. Can build on-demand in AWS if preferred. + +**Note:** Image versioning is not strictly necessary for load tests (git SHA in results provides versioning). + +--- + +## Required Code Changes (Post-DevOps Setup) + +### 1. AWS Secrets Manager Integration + +**File:** `apps/load-tests/src/config/environments.ts` + +Add `AwsSecretsManagerConfigProvider`: + +```typescript +async function getConfigFromAwsSecrets( + envName: EnvironmentName +): Promise { + // Implementation using @aws-sdk/client-secrets-manager +} +``` + +### 2. S3 Uploader Implementation + +**File:** `apps/load-tests/src/output/uploader.ts` + +Add `S3Uploader` class: + +```typescript +export class S3Uploader implements ResultUploader { + async upload(results: TestResults, options?: UploadOptions): Promise { + // Implementation using @aws-sdk/client-s3 + } +} +``` + +### 3. Update Uploader Selection + +**File:** `apps/load-tests/src/output/uploader.ts` + +Update `getUploader()` to detect AWS environment and return `S3Uploader`. + +### 4. GitHub Actions Workflow Updates + +**File:** `.github/workflows/load-test.yml` + +Replace direct execution with ECS task execution: + +- Use `aws-actions/amazon-ecs-run-task` or AWS SDK +- Pass environment variables to task +- Wait for completion +- Download results from S3 + +--- + +## Dependencies to Add (Post-DevOps) + +```json +{ + "dependencies": { + "@aws-sdk/client-s3": "^3.x.x", + "@aws-sdk/client-secrets-manager": "^3.x.x", + "@aws-sdk/client-ecs": "^3.x.x" // Optional, for advanced features + } +} +``` + +--- + +## Testing Strategy + +1. **Local Testing**: ✅ Complete (all functionality tested) +2. **Docker Testing**: ✅ Complete (container builds and runs) +3. **AWS Integration Testing**: + - Test Secrets Manager integration + - Test S3 upload + - Test ECS task execution + - Test CloudWatch logging + - Test production safety mechanisms + +--- + +## Next Steps + +1. **DevOps Setup:** + - Answer open questions above + - Set up AWS infrastructure + - Configure IAM roles and permissions + - Set up Secrets Manager secrets + - Create S3 bucket + +2. **Code Integration:** + - Implement AWS Secrets Manager integration + - Implement S3 uploader + - Update GitHub Actions workflow + - Add AWS SDK dependencies + +3. **Testing:** + - Test end-to-end AWS flow + - Verify production safety + - Test cost monitoring + +4. **Documentation:** + - Update README with AWS setup instructions + - Document AWS-specific configuration + - Document troubleshooting for AWS issues + +--- + +## Questions for DevOps Team + +1. Which AWS account should host this infrastructure? +2. ECS Fargate, EC2, or Lambda for load generators? +3. ECR or GHCR for container registry? +4. What VPC/network configuration is needed? +5. What are the cost expectations/budgets? +6. What monitoring/alerting requirements? +7. Should we use OIDC or access keys for GitHub Actions? +8. What are the production safety requirements beyond manual approval? diff --git a/apps/load-tests/Dockerfile b/apps/load-tests/Dockerfile new file mode 100644 index 0000000..7dd7bc1 --- /dev/null +++ b/apps/load-tests/Dockerfile @@ -0,0 +1,50 @@ +# Build from monorepo root context +# Usage: docker build -f apps/load-tests/Dockerfile . +FROM node:20-alpine + +# Set working directory +WORKDIR /app + +# Copy root package files for yarn workspace setup +COPY package.json yarn.lock ./ + +# Copy tsconfig.base.json (needed for TypeScript compilation) +COPY tsconfig.base.json ./ + +# Enable corepack for Yarn 4 +RUN corepack enable + +# Copy workspace package.json files in a way that preserves structure +# First create directories +RUN mkdir -p packages/core packages/dapp-client packages/wallet-client \ + apps/load-tests apps/web-demo apps/rn-demo apps/integration-tests + +# Copy all package.json files (needed for yarn workspace resolution) +COPY packages/core/package.json ./packages/core/ +COPY packages/dapp-client/package.json ./packages/dapp-client/ +COPY packages/wallet-client/package.json ./packages/wallet-client/ +COPY apps/load-tests/package.json ./apps/load-tests/ +COPY apps/web-demo/package.json ./apps/web-demo/ +COPY apps/rn-demo/package.json ./apps/rn-demo/ +COPY apps/integration-tests/package.json ./apps/integration-tests/ + +# Copy load-tests tsconfig +COPY apps/load-tests/tsconfig.json ./apps/load-tests/ + +# Install dependencies (without frozen lockfile for Docker build) +RUN yarn install + +# Copy load-tests source code +COPY apps/load-tests/src ./apps/load-tests/src +# Copy config directory (if it doesn't exist, create empty one) +COPY apps/load-tests/config ./apps/load-tests/config + +# Set working directory to load-tests app +WORKDIR /app/apps/load-tests + +# Set entrypoint to load test CLI +# Use yarn start which will use tsx from node_modules +ENTRYPOINT ["yarn", "start"] + +# Default command (can be overridden) +CMD ["--help"] diff --git a/apps/load-tests/README.md b/apps/load-tests/README.md new file mode 100644 index 0000000..e683913 --- /dev/null +++ b/apps/load-tests/README.md @@ -0,0 +1,292 @@ +# Load Testing + +Load testing infrastructure for the Mobile Wallet Protocol relay server. This tool runs various load test scenarios against Centrifugo relay servers to measure performance, stability, and scalability. + +## Features + +- **Multiple Test Scenarios**: Connection storm and steady-state testing +- **Environment Configuration**: Support for dev, UAT, and production environments +- **Docker Support**: Containerized execution for consistent testing +- **GitHub Actions Integration**: Automated workflow for running tests +- **Result Collection**: Automatic result storage and metadata collection + +## Prerequisites + +- Node.js 20.x or later +- Yarn package manager +- Docker (for containerized testing) +- Access to a Centrifugo relay server for testing + +## Installation + +From the repository root: + +```bash +yarn install +``` + +## Configuration + +### Environment Variables + +The load test runner supports environment-based configuration via environment variables: + +```bash +export RELAY_URL_DEV=ws://localhost:8000/connection/websocket +export RELAY_URL_UAT=wss://uat-relay.example.com/connection/websocket +export RELAY_URL_PROD=wss://prod-relay.example.com/connection/websocket +``` + +### Configuration File + +Alternatively, create a `config/environments.json` file: + +```json +{ + "dev": { + "relayUrl": "ws://localhost:8000/connection/websocket" + }, + "uat": { + "relayUrl": "wss://uat-relay.example.com/connection/websocket" + }, + "prod": { + "relayUrl": "wss://prod-relay.example.com/connection/websocket" + } +} +``` + +See `config/environments.example.json` for a template. + +**Note**: Production URLs should be stored in AWS Secrets Manager (after DevOps setup). For now, use environment variables or config files for local testing. + +## Usage + +### Direct CLI Usage + +Run load tests directly using the CLI: + +```bash +cd apps/load-tests + +# Using environment configuration +yarn start --environment dev --scenario connection-storm --connections 100 + +# Using explicit target URL +yarn start --target ws://localhost:8000/connection/websocket --scenario steady-state --connections 50 --duration 60 +``` + +### CLI Options + +- `--environment `: Environment name (dev, uat, prod). Resolves relay URL from config. +- `--target `: Explicit WebSocket URL (required if --environment not provided) +- `--scenario `: Test scenario (connection-storm, steady-state). Default: connection-storm +- `--connections `: Number of connections to create. Default: 100 +- `--duration `: Test duration in seconds (for steady-state). Default: 60 +- `--ramp-up `: Seconds to ramp up to full connection count. Default: 10 +- `--output `: Path to write JSON results file + +### Docker Usage + +Build the Docker image: + +```bash +# From repository root +yarn workspace @metamask/mobile-wallet-protocol-load-tests docker:build +``` + +Run the container: + +```bash +# Test with environment variable +docker run --rm \ + -e RELAY_URL_DEV=ws://host.docker.internal:8000 \ + load-test:local \ + --environment dev \ + --scenario connection-storm \ + --connections 10 + +# Mount results directory +docker run --rm \ + -e RELAY_URL_DEV=ws://host.docker.internal:8000 \ + -v $(pwd)/results:/app/apps/load-tests/results \ + load-test:local \ + --environment dev \ + --scenario connection-storm \ + --output results/test.json +``` + +**Note**: Use `host.docker.internal` to access the host machine's relay server from within Docker. + +### Local Workflow Testing + +Use the test script to simulate the GitHub Actions workflow locally: + +```bash +cd apps/load-tests + +# Set environment variables +export RELAY_URL_DEV=ws://localhost:8000 + +# Run test script +./scripts/test-local-workflow.sh \ + --environment dev \ + --scenario connection-storm \ + --connections 10 \ + --duration 30 \ + --ramp-up 5 +``` + +## Test Scenarios + +### Connection Storm + +Tests the system's ability to handle a rapid burst of connections: + +- Creates connections as quickly as possible +- Measures connection success rate +- Tracks retry attempts +- Calculates connection latency + +### Steady State + +Tests long-term connection stability: + +- Ramps up connections gradually +- Holds connections for a specified duration +- Monitors disconnections and reconnections +- Calculates connection stability metrics + +## Results + +Test results are saved as JSON files with the following structure: + +```json +{ + "scenario": "connection-storm", + "timestamp": "2024-01-01T12:00:00.000Z", + "target": "ws://localhost:8000", + "environment": "dev", + "gitSha": "abc123...", + "runnerType": "local", + "config": { + "connections": 100, + "durationSec": 60, + "rampUpSec": 10 + }, + "results": { + "connections": { + "attempted": 100, + "successful": 95, + "failed": 5, + "successRate": 95.0, + "immediate": 90, + "recovered": 5 + }, + "timing": { + "totalTimeMs": 5000, + "connectionsPerSec": 20.0 + }, + "latency": { + "min": 10, + "max": 500, + "avg": 100, + "p95": 250 + } + } +} +``` + +### Result Metadata + +- `environment`: Environment name (dev, uat, prod) +- `gitSha`: Git commit SHA (if available) +- `runnerType`: Type of runner (local, docker, aws) +- `containerId`: Container or task ID (if running in container) + +## GitHub Actions Workflow + +The load test can be triggered via GitHub Actions workflow: + +1. Go to **Actions** → **Load Test** +2. Click **Run workflow** +3. Select: + - Environment (dev, uat, prod) + - Scenario (connection-storm, steady-state) + - Optional: connections, duration, ramp-up +4. For production, manual approval is required + +Results are uploaded as workflow artifacts. + +## Development + +### Running Tests Locally + +1. Start the relay server: + + ```bash + docker compose -f backend/docker-compose.yml up -d + ``` + +2. Set environment variable: + + ```bash + export RELAY_URL_DEV=ws://localhost:8000 + ``` + +3. Run a quick test: + ```bash + cd apps/load-tests + yarn start --environment dev --scenario connection-storm --connections 10 + ``` + +### Adding New Scenarios + +1. Create a new scenario file in `src/scenarios/` +2. Implement the scenario function following the `ScenarioResult` interface +3. Add the scenario to `src/scenarios/index.ts` +4. Update the CLI to include the new scenario + +## AWS Integration (Post-DevOps) + +After DevOps sets up AWS infrastructure, the following will be available: + +- **AWS Secrets Manager**: Environment URLs stored securely +- **S3**: Automatic result upload and storage +- **ECS/EC2/Lambda**: Container orchestration for distributed testing +- **CloudWatch**: Logging and metrics + +The code is designed with abstraction layers that allow swapping AWS implementations without changing core logic. + +## Troubleshooting + +### Environment Not Found + +If you see "Environment 'dev' not configured": + +- Set the appropriate environment variable: `RELAY_URL_DEV`, `RELAY_URL_UAT`, or `RELAY_URL_PROD` +- Or create `config/environments.json` file + +### Docker Can't Connect to Host + +When running Docker, use `host.docker.internal` instead of `localhost`: + +```bash +-e RELAY_URL_DEV=ws://host.docker.internal:8000 +``` + +### Connection Failures + +- Verify the relay server is running and accessible +- Check network connectivity +- Verify the WebSocket URL format (ws:// for local, wss:// for remote) + +## Scripts + +- `yarn start`: Run load test CLI +- `yarn docker:build`: Build Docker image +- `yarn docker:run`: Run Docker container +- `yarn docker:test`: Quick Docker test + +## License + +See LICENSE file in the repository root. diff --git a/apps/load-tests/config/environments.example.json b/apps/load-tests/config/environments.example.json new file mode 100644 index 0000000..3bd5f44 --- /dev/null +++ b/apps/load-tests/config/environments.example.json @@ -0,0 +1,11 @@ +{ + "dev": { + "relayUrl": "ws://localhost:8000/connection/websocket" + }, + "uat": { + "relayUrl": "wss://uat-relay.example.com/connection/websocket" + }, + "prod": { + "relayUrl": "wss://prod-relay.example.com/connection/websocket" + } +} diff --git a/apps/load-tests/config/environments.json b/apps/load-tests/config/environments.json new file mode 100644 index 0000000..3bd5f44 --- /dev/null +++ b/apps/load-tests/config/environments.json @@ -0,0 +1,11 @@ +{ + "dev": { + "relayUrl": "ws://localhost:8000/connection/websocket" + }, + "uat": { + "relayUrl": "wss://uat-relay.example.com/connection/websocket" + }, + "prod": { + "relayUrl": "wss://prod-relay.example.com/connection/websocket" + } +} diff --git a/apps/load-tests/package.json b/apps/load-tests/package.json new file mode 100644 index 0000000..11693f2 --- /dev/null +++ b/apps/load-tests/package.json @@ -0,0 +1,31 @@ +{ + "name": "@metamask/mobile-wallet-protocol-load-tests", + "private": true, + "version": "0.0.1", + "type": "module", + "scripts": { + "start": "tsx src/cli/run.ts", + "infra": "tsx src/cli/infra.ts", + "results": "tsx src/cli/results.ts", + "docker:build": "cd ../.. && docker build -t load-test:local -f apps/load-tests/Dockerfile .", + "docker:run": "docker run --rm load-test:local", + "docker:test": "docker run --rm -e RELAY_URL_DEV=ws://host.docker.internal:8000/connection/websocket load-test:local --environment dev --scenario connection-storm --connections 10" + }, + "dependencies": { + "centrifuge": "^5.3.5", + "chalk": "^5.6.2", + "cli-progress": "^3.12.0", + "commander": "^13.1.0", + "dotenv": "^16.5.0", + "ssh2": "^1.16.0", + "tsx": "^4.20.3", + "ws": "^8.18.3" + }, + "devDependencies": { + "@types/cli-progress": "^3.11.6", + "@types/node": "^24.0.3", + "@types/ssh2": "^1.15.4", + "@types/ws": "^8.18.1", + "typescript": "^5.8.3" + } +} diff --git a/apps/load-tests/results/.gitkeep b/apps/load-tests/results/.gitkeep new file mode 100644 index 0000000..e69de29 diff --git a/apps/load-tests/scripts/test-local-workflow.sh b/apps/load-tests/scripts/test-local-workflow.sh new file mode 100755 index 0000000..c799910 --- /dev/null +++ b/apps/load-tests/scripts/test-local-workflow.sh @@ -0,0 +1,80 @@ +#!/bin/bash +# Local workflow testing script +# Simulates the GitHub Actions workflow locally + +set -e + +echo "╔══════════════════════════════════════╗" +echo "║ LOCAL WORKFLOW TESTING SCRIPT ║" +echo "╚══════════════════════════════════════╝" +echo "" + +# Default values +ENVIRONMENT="${ENVIRONMENT:-dev}" +SCENARIO="${SCENARIO:-connection-storm}" +CONNECTIONS="${CONNECTIONS:-10}" +DURATION="${DURATION:-30}" +RAMP_UP="${RAMP_UP:-5}" + +# Parse command line arguments +while [[ $# -gt 0 ]]; do + case $1 in + --environment) + ENVIRONMENT="$2" + shift 2 + ;; + --scenario) + SCENARIO="$2" + shift 2 + ;; + --connections) + CONNECTIONS="$2" + shift 2 + ;; + --duration) + DURATION="$2" + shift 2 + ;; + --ramp-up) + RAMP_UP="$2" + shift 2 + ;; + *) + echo "Unknown option: $1" + echo "Usage: $0 [--environment ENV] [--scenario SCENARIO] [--connections N] [--duration SEC] [--ramp-up SEC]" + exit 1 + ;; + esac +done + +echo "Configuration:" +echo " Environment: $ENVIRONMENT" +echo " Scenario: $SCENARIO" +echo " Connections: $CONNECTIONS" +echo " Duration: ${DURATION}s" +echo " Ramp-up: ${RAMP_UP}s" +echo "" + +# Check if environment variables are set +RELAY_URL_VAR="RELAY_URL_$(echo $ENVIRONMENT | tr '[:lower:]' '[:upper:]')" +if [ -z "${!RELAY_URL_VAR}" ]; then + echo "⚠️ Warning: $RELAY_URL_VAR not set" + echo " Set it with: export $RELAY_URL_VAR=ws://localhost:8000/connection/websocket" + echo "" +fi + +# Run the load test +echo "Running load test..." +cd "$(dirname "$0")/.." + +yarn start \ + --environment "$ENVIRONMENT" \ + --scenario "$SCENARIO" \ + --connections "$CONNECTIONS" \ + --duration "$DURATION" \ + --ramp-up "$RAMP_UP" \ + --output "results/local-test-$(date +%Y%m%d-%H%M%S).json" + +echo "" +echo "✓ Local workflow test complete!" +echo " Results saved to: results/" diff --git a/apps/load-tests/src/cli/infra.ts b/apps/load-tests/src/cli/infra.ts new file mode 100644 index 0000000..9d94818 --- /dev/null +++ b/apps/load-tests/src/cli/infra.ts @@ -0,0 +1,72 @@ +#!/usr/bin/env node +import { Command } from "commander"; + +const program = new Command(); + +program + .name("infra") + .description("Manage DigitalOcean infrastructure for distributed load testing") + .version("0.0.1"); + +program + .command("create") + .description("Create DigitalOcean droplets for load testing") + .option("--count ", "Number of droplets to create", "3") + .option("--region ", "DigitalOcean region", "nyc1") + .option("--size ", "Droplet size", "s-2vcpu-4gb") + .option("--name-prefix ", "Prefix for droplet names", "load-test") + .action((options) => { + console.log("[infra] Create droplets"); + console.log(` Count: ${options.count}`); + console.log(` Region: ${options.region}`); + console.log(` Size: ${options.size}`); + console.log(` Prefix: ${options.namePrefix}`); + // TODO: Implement droplet creation + }); + +program + .command("list") + .description("List current load test droplets") + .action(() => { + console.log("[infra] No droplets found"); + // TODO: Implement droplet listing + }); + +program + .command("destroy") + .description("Destroy all load test droplets") + .action(() => { + console.log("[infra] No droplets to destroy"); + // TODO: Implement droplet destruction + }); + +program + .command("update") + .description("Update code on all droplets (git pull && yarn build)") + .action(() => { + console.log("[infra] No droplets to update"); + // TODO: Implement droplet update + }); + +program + .command("exec") + .description("Execute a command on all droplets") + .requiredOption("--command ", "Command to execute") + .action((options) => { + console.log("[infra] No droplets to execute command on"); + console.log(` Command: ${options.command}`); + // TODO: Implement command execution + }); + +program + .command("collect") + .description("Collect results from all droplets") + .requiredOption("--output ", "Directory to store collected results") + .action((options) => { + console.log("[infra] No droplets to collect from"); + console.log(` Output: ${options.output}`); + // TODO: Implement results collection + }); + +program.parse(); + diff --git a/apps/load-tests/src/cli/results.ts b/apps/load-tests/src/cli/results.ts new file mode 100644 index 0000000..a3d42bb --- /dev/null +++ b/apps/load-tests/src/cli/results.ts @@ -0,0 +1,21 @@ +#!/usr/bin/env node +import { Command } from "commander"; + +const program = new Command(); + +program + .name("results") + .description("Process and aggregate load test results") + .version("0.0.1"); + +program + .command("aggregate") + .description("Aggregate results from multiple load test runs") + .requiredOption("--input ", "Directory containing result JSON files") + .action((options) => { + console.log("[results] Aggregate results"); + console.log(` Input: ${options.input}`); + // TODO: Implement results aggregation + }); + +program.parse(); diff --git a/apps/load-tests/src/cli/run.ts b/apps/load-tests/src/cli/run.ts new file mode 100644 index 0000000..80b1900 --- /dev/null +++ b/apps/load-tests/src/cli/run.ts @@ -0,0 +1,235 @@ +#!/usr/bin/env node +import chalk from "chalk"; +import { Command } from "commander"; +import { + getCurrentEnvironment, + getEnvironmentConfig, + isValidEnvironmentName, +} from "../config/environments.js"; +import { printResults } from "../output/formatter.js"; +import type { TestResults } from "../output/types.js"; +import { getUploader } from "../output/uploader.js"; +import { + isValidScenarioName, + runScenario, + type ScenarioOptions, + type ScenarioResult, +} from "../scenarios/index.js"; +import { calculateLatencyStats } from "../utils/stats.js"; +import { collectMetadata } from "../utils/metadata.js"; + +/** + * CLI options as parsed by commander (strings). + */ +interface CliOptions { + target?: string; + environment?: string; + scenario: string; + connections: string; + duration: string; + rampUp: string; + output?: string; +} + +/** + * Parse CLI options into ScenarioOptions (with proper types). + */ +function parseOptions(cli: CliOptions): ScenarioOptions { + return { + target: cli.target, + connections: Number.parseInt(cli.connections, 10), + durationSec: Number.parseInt(cli.duration, 10), + rampUpSec: Number.parseInt(cli.rampUp, 10), + }; +} + +/** + * Transform ScenarioResult into TestResults for output. + */ +function buildTestResults( + scenarioName: string, + options: ScenarioOptions, + result: ScenarioResult, + metadata: { environment?: string; gitSha?: string; runnerType: string; containerId?: string }, +): TestResults { + const { connections } = result; + + return { + scenario: scenarioName, + timestamp: new Date().toISOString(), + target: options.target, + environment: metadata.environment, + gitSha: metadata.gitSha, + runnerType: metadata.runnerType, + containerId: metadata.containerId, + config: { + connections: options.connections, + durationSec: options.durationSec, + rampUpSec: options.rampUpSec, + }, + results: { + connections: { + attempted: connections.attempted, + successful: connections.successful, + failed: connections.failed, + successRate: + connections.attempted > 0 + ? (connections.successful / connections.attempted) * 100 + : 0, + immediate: connections.immediate, + recovered: connections.recovered, + }, + timing: { + totalTimeMs: result.timing.totalTimeMs, + connectionsPerSec: + result.timing.totalTimeMs > 0 + ? (connections.attempted / result.timing.totalTimeMs) * 1000 + : 0, + }, + latency: calculateLatencyStats(result.timing.connectionLatencies), + retries: { + totalRetries: result.retries.totalRetries, + avgRetriesPerConnection: + connections.attempted > 0 + ? result.retries.totalRetries / connections.attempted + : 0, + }, + steadyState: result.steadyState + ? { + holdDurationMs: result.steadyState.holdDurationMs, + currentDisconnects: result.steadyState.currentDisconnects, + peakDisconnects: result.steadyState.peakDisconnects, + reconnectsDuringHold: result.steadyState.reconnectsDuringHold, + connectionStability: result.steadyState.connectionStability, + } + : undefined, + }, + }; +} + +const program = new Command(); + +program + .name("start") + .description("Run load tests against a Centrifugo relay server") + .version("0.0.1") + .option("--target ", "WebSocket URL of the relay server (required if --environment not provided)") + .option( + "--environment ", + "Environment name: dev, uat, prod (resolves relay URL from config)", + ) + .option( + "--scenario ", + "Scenario to run: connection-storm, steady-state", + "connection-storm", + ) + .option("--connections ", "Number of connections to create", "100") + .option( + "--duration ", + "Test duration in seconds (for steady-state)", + "60", + ) + .option( + "--ramp-up ", + "Seconds to ramp up to full connection count", + "10", + ) + .option("--output ", "Path to write JSON results") + .action(async (cli: CliOptions) => { + // Determine environment + let environment: string | undefined; + let targetUrl: string; + + if (cli.environment) { + // Validate environment name + if (!isValidEnvironmentName(cli.environment)) { + console.error(chalk.red(`[load-test] Invalid environment: ${cli.environment}`)); + console.error(chalk.yellow("[load-test] Valid environments: dev, uat, prod")); + process.exit(1); + } + + // Get environment config + const envConfig = getEnvironmentConfig(cli.environment); + if (!envConfig) { + console.error(chalk.red(`[load-test] Environment '${cli.environment}' not configured`)); + console.error(chalk.yellow("[load-test] Set RELAY_URL_DEV, RELAY_URL_UAT, or RELAY_URL_PROD environment variable")); + console.error(chalk.yellow(" or create config/environments.json file")); + process.exit(1); + } + + environment = cli.environment; + targetUrl = envConfig.relayUrl; + + // Warn if --target was also provided + if (cli.target) { + console.warn(chalk.yellow(`[load-test] Warning: --target ignored when using --environment`)); + } + } else if (cli.target) { + // Use explicit target + targetUrl = cli.target; + // Try to detect environment from LOAD_TEST_ENVIRONMENT + const currentEnv = getCurrentEnvironment(); + if (currentEnv) { + environment = currentEnv; + } + } else { + console.error(chalk.red("[load-test] Either --target or --environment must be provided")); + process.exit(1); + } + + // Validate scenario name + if (!isValidScenarioName(cli.scenario)) { + console.error(chalk.red(`[load-test] Unknown scenario: ${cli.scenario}`)); + console.error(chalk.yellow("[load-test] Available scenarios: connection-storm, steady-state")); + process.exit(1); + } + + // Parse options + const options = parseOptions({ ...cli, target: targetUrl }); + + // Collect metadata + const metadata = collectMetadata(environment); + + // Print configuration + console.log(chalk.bold.blue("╔══════════════════════════════════════╗")); + console.log(chalk.bold.blue("║ LOAD TEST RUNNER ║")); + console.log(chalk.bold.blue("╚══════════════════════════════════════╝")); + console.log(""); + console.log(chalk.bold("Configuration:")); + if (environment) { + console.log(` Environment: ${chalk.cyan(environment)}`); + } + console.log(` Target: ${chalk.dim(options.target)}`); + console.log(` Scenario: ${chalk.cyan(cli.scenario)}`); + console.log(` Connections: ${chalk.bold(options.connections)}`); + console.log(` Duration: ${options.durationSec}s`); + console.log(` Ramp-up: ${options.rampUpSec}s`); + console.log(` Runner: ${chalk.dim(metadata.runnerType)}`); + if (metadata.containerId) { + console.log(` Container: ${chalk.dim(metadata.containerId)}`); + } + if (cli.output) { + console.log(` Output: ${chalk.dim(cli.output)}`); + } + console.log(""); + + // Run scenario + const result = await runScenario(cli.scenario, options); + + // Build and display results + const testResults = buildTestResults(cli.scenario, options, result, metadata); + + console.log(""); + printResults(testResults); + + if (cli.output) { + console.log(""); + const uploader = getUploader(); + await uploader.upload(testResults, { path: cli.output }); + } + + console.log(""); + console.log(chalk.green("✓ Done")); + }); + +program.parse(); diff --git a/apps/load-tests/src/client/centrifuge-client.ts b/apps/load-tests/src/client/centrifuge-client.ts new file mode 100644 index 0000000..24263ea --- /dev/null +++ b/apps/load-tests/src/client/centrifuge-client.ts @@ -0,0 +1,112 @@ +import { Centrifuge } from "centrifuge"; +import WebSocket from "ws"; + +/** + * Connection outcome types: + * - immediate: Connected on first try + * - recovered: Failed initially but reconnected successfully + * - failed: Could not connect after all retries + */ +export type ConnectionOutcome = "immediate" | "recovered" | "failed"; + +export interface ConnectionResult { + success: boolean; + outcome: ConnectionOutcome; + connectionTimeMs: number; + retryCount: number; + error?: string; +} + +export interface CentrifugeClientOptions { + url: string; + timeoutMs?: number; + minReconnectDelay?: number; + maxReconnectDelay?: number; +} + +/** + * Wrapper around the Centrifuge client for load testing. + * Connects to a Centrifugo server and measures connection time. + * Supports automatic reconnection with tracking of outcomes. + */ +export class CentrifugeClient { + private client: Centrifuge | null = null; + private readonly url: string; + private readonly timeoutMs: number; + private readonly minReconnectDelay: number; + private readonly maxReconnectDelay: number; + + constructor(options: CentrifugeClientOptions) { + this.url = options.url; + this.timeoutMs = options.timeoutMs ?? 30000; + this.minReconnectDelay = options.minReconnectDelay ?? 500; + this.maxReconnectDelay = options.maxReconnectDelay ?? 5000; + } + + /** + * Connect to the Centrifugo server. + * Returns connection timing, outcome, and retry info. + * Will wait for reconnection if initial connection fails. + */ + async connect(): Promise { + const startTime = performance.now(); + let retryCount = 0; + let hadError = false; + + return new Promise((resolve) => { + const timeout = setTimeout(() => { + this.disconnect(); + resolve({ + success: false, + outcome: "failed", + connectionTimeMs: performance.now() - startTime, + retryCount, + error: `Connection timeout after ${this.timeoutMs}ms`, + }); + }, this.timeoutMs); + + this.client = new Centrifuge(this.url, { + websocket: WebSocket, + minReconnectDelay: this.minReconnectDelay, + maxReconnectDelay: this.maxReconnectDelay, + timeout: 10000, + }); + + this.client.on("connected", () => { + clearTimeout(timeout); + resolve({ + success: true, + outcome: hadError ? "recovered" : "immediate", + connectionTimeMs: performance.now() - startTime, + retryCount, + }); + }); + + // Track errors but don't resolve - let it retry + this.client.on("error", () => { + hadError = true; + retryCount++; + }); + + this.client.connect(); + }); + } + + /** + * Disconnect from the server. + */ + disconnect(): void { + if (this.client) { + this.client.disconnect(); + this.client = null; + } + } + + /** + * Check if currently connected. + */ + isConnected(): boolean { + return this.client?.state === "connected"; + } +} + diff --git a/apps/load-tests/src/config/environments.ts b/apps/load-tests/src/config/environments.ts new file mode 100644 index 0000000..0a719b5 --- /dev/null +++ b/apps/load-tests/src/config/environments.ts @@ -0,0 +1,112 @@ +import * as fs from "node:fs"; +import * as path from "node:path"; +import { fileURLToPath } from "node:url"; + +const __filename = fileURLToPath(import.meta.url); +const __dirname = path.dirname(__filename); + +/** + * Environment name type. + */ +export type EnvironmentName = "dev" | "uat" | "prod"; + +/** + * Environment configuration. + */ +export interface EnvironmentConfig { + name: EnvironmentName; + relayUrl: string; +} + +/** + * Get environment configuration from environment variables. + * Supports RELAY_URL_DEV, RELAY_URL_UAT, RELAY_URL_PROD. + */ +function getConfigFromEnvVars(envName: EnvironmentName): EnvironmentConfig | null { + const envVarName = `RELAY_URL_${envName.toUpperCase()}`; + const relayUrl = process.env[envVarName]; + + if (!relayUrl) { + return null; + } + + return { + name: envName, + relayUrl, + }; +} + +/** + * Get environment configuration from config file. + * Looks for config/environments.json in the load-tests directory. + */ +function getConfigFromFile(envName: EnvironmentName): EnvironmentConfig | null { + const configPath = path.join(__dirname, "../../config/environments.json"); + + if (!fs.existsSync(configPath)) { + return null; + } + + try { + const configContent = fs.readFileSync(configPath, "utf-8"); + const config = JSON.parse(configContent) as Record; + + const envConfig = config[envName]; + if (!envConfig || !envConfig.relayUrl) { + return null; + } + + return { + name: envName, + relayUrl: envConfig.relayUrl, + }; + } catch (error) { + console.warn(`[load-test] Failed to read config file: ${error}`); + return null; + } +} + +/** + * Get environment configuration for the specified environment. + * Checks in order: + * 1. Environment variables (RELAY_URL_DEV, RELAY_URL_UAT, RELAY_URL_PROD) + * 2. Config file (config/environments.json) + * + * @param envName - Environment name (dev, uat, prod) + * @returns Environment configuration or null if not found + */ +export function getEnvironmentConfig(envName: string): EnvironmentConfig | null { + if (envName !== "dev" && envName !== "uat" && envName !== "prod") { + return null; + } + + const normalizedEnv = envName as EnvironmentName; + + // Try environment variables first + const envConfig = getConfigFromEnvVars(normalizedEnv); + if (envConfig) { + return envConfig; + } + + // Fall back to config file + return getConfigFromFile(normalizedEnv); +} + +/** + * Get the current environment from LOAD_TEST_ENVIRONMENT env var. + * Falls back to null if not set. + */ +export function getCurrentEnvironment(): EnvironmentName | null { + const env = process.env.LOAD_TEST_ENVIRONMENT; + if (env === "dev" || env === "uat" || env === "prod") { + return env; + } + return null; +} + +/** + * Validate that an environment name is valid. + */ +export function isValidEnvironmentName(name: string): name is EnvironmentName { + return name === "dev" || name === "uat" || name === "prod"; +} diff --git a/apps/load-tests/src/output/formatter.ts b/apps/load-tests/src/output/formatter.ts new file mode 100644 index 0000000..0da42ca --- /dev/null +++ b/apps/load-tests/src/output/formatter.ts @@ -0,0 +1,66 @@ +import chalk from "chalk"; +import type { TestResults } from "./types.js"; + +/** + * Print test results summary to console. + */ +export function printResults(results: TestResults): void { + const { connections, timing, latency, retries, steadyState } = results.results; + + console.log(chalk.gray("─────────────────────────────────────")); + console.log(chalk.bold(" RESULTS SUMMARY")); + console.log(chalk.gray("─────────────────────────────────────")); + + // Connection summary with color-coded success rate + const successRate = connections.successRate; + const rateColor = successRate >= 99 ? chalk.green : successRate >= 95 ? chalk.yellow : chalk.red; + console.log( + `Connections: ${connections.attempted} attempted, ${connections.successful} successful (${rateColor(successRate.toFixed(1) + "%")})`, + ); + + // Breakdown with icons + console.log( + ` ${chalk.green("✓")} Immediate: ${connections.immediate} | ${chalk.yellow("↻")} Recovered: ${connections.recovered} | ${chalk.red("✗")} Failed: ${connections.failed}`, + ); + + // Timing + console.log(`Total time: ${Math.round(timing.totalTimeMs)}ms`); + console.log(`Rate: ${timing.connectionsPerSec.toFixed(1)} conn/sec`); + + // Latency with color-coded p95 + if (latency) { + const p95Color = latency.p95 <= 100 ? chalk.green : latency.p95 <= 400 ? chalk.yellow : chalk.red; + console.log( + `Latency: min=${latency.min}ms, max=${latency.max}ms, avg=${latency.avg}ms, p95=${p95Color(latency.p95 + "ms")}`, + ); + } + + // Retries (only if any) + if (retries.totalRetries > 0) { + console.log( + chalk.yellow(`Retries: ${retries.totalRetries} total (avg ${retries.avgRetriesPerConnection.toFixed(1)} per conn)`), + ); + } + + // Steady-state specific metrics + if (steadyState) { + console.log(`Hold: ${Math.round(steadyState.holdDurationMs / 1000)}s`); + + const disconnectColor = steadyState.currentDisconnects === 0 ? chalk.green : chalk.red; + console.log( + `Disconnects: ${disconnectColor(steadyState.currentDisconnects.toString())} current, ${steadyState.peakDisconnects} peak`, + ); + + if (steadyState.reconnectsDuringHold > 0) { + console.log(chalk.yellow(`Reconnects: ${steadyState.reconnectsDuringHold} during hold`)); + } + + const stabilityColor = + steadyState.connectionStability >= 99.9 + ? chalk.green + : steadyState.connectionStability >= 99 + ? chalk.yellow + : chalk.red; + console.log(`Stability: ${stabilityColor(steadyState.connectionStability.toFixed(1) + "%")}`); + } +} diff --git a/apps/load-tests/src/output/types.ts b/apps/load-tests/src/output/types.ts new file mode 100644 index 0000000..cacd6e5 --- /dev/null +++ b/apps/load-tests/src/output/types.ts @@ -0,0 +1,58 @@ +/** + * Complete test results structure for JSON output. + * This is the final output format - built from ScenarioResult in the CLI. + */ +export interface TestResults { + scenario: string; + timestamp: string; + target: string; + /** Environment name (dev, uat, prod) */ + environment?: string; + /** Git commit SHA if available */ + gitSha?: string; + /** Type of runner (local, docker, aws) */ + runnerType?: string; + /** Container or task ID if running in containerized environment */ + containerId?: string; + config: { + connections: number; + durationSec: number; + rampUpSec: number; + }; + results: { + connections: { + attempted: number; + successful: number; + failed: number; + successRate: number; + /** Connected on first try */ + immediate: number; + /** Failed initially but recovered via reconnect */ + recovered: number; + }; + timing: { + totalTimeMs: number; + connectionsPerSec: number; + }; + latency: { + min: number; + max: number; + avg: number; + p95: number; + } | null; + retries: { + totalRetries: number; + avgRetriesPerConnection: number; + }; + steadyState?: { + holdDurationMs: number; + /** Current number of disconnected clients at end of hold */ + currentDisconnects: number; + /** Peak number of disconnects seen at any point during hold */ + peakDisconnects: number; + /** Number of times clients reconnected during hold */ + reconnectsDuringHold: number; + connectionStability: number; + }; + }; +} diff --git a/apps/load-tests/src/output/uploader.ts b/apps/load-tests/src/output/uploader.ts new file mode 100644 index 0000000..0a2c8bf --- /dev/null +++ b/apps/load-tests/src/output/uploader.ts @@ -0,0 +1,55 @@ +import type { TestResults } from "./types.js"; + +/** + * Interface for uploading test results to various destinations. + * This abstraction allows swapping implementations (local file system, S3, etc.) + * without changing the core logic. + */ +export interface ResultUploader { + /** + * Upload test results to the destination. + * @param results - Test results to upload + * @param options - Upload options (e.g., path, key, etc.) + * @returns Path or identifier where results were uploaded + */ + upload(results: TestResults, options?: UploadOptions): Promise; +} + +/** + * Options for uploading results. + */ +export interface UploadOptions { + /** + * Path or key for the results. + * For local file system: file path + * For S3: object key + */ + path?: string; +} + +/** + * Local file system uploader. + * Writes results to a local file path. + */ +export class LocalFileUploader implements ResultUploader { + async upload(results: TestResults, options?: UploadOptions): Promise { + if (!options?.path) { + throw new Error("LocalFileUploader requires a path option"); + } + + const { writeResults } = await import("./writer.js"); + writeResults(options.path, results); + return options.path; + } +} + +/** + * Get the appropriate uploader based on environment. + * Currently only supports local file system. + * Future: can detect AWS environment and return S3Uploader. + */ +export function getUploader(): ResultUploader { + // Check if we're running in AWS (future implementation) + // For now, always use local file system + return new LocalFileUploader(); +} diff --git a/apps/load-tests/src/output/writer.ts b/apps/load-tests/src/output/writer.ts new file mode 100644 index 0000000..5057a3e --- /dev/null +++ b/apps/load-tests/src/output/writer.ts @@ -0,0 +1,16 @@ +import * as fs from "node:fs"; +import * as path from "node:path"; +import type { TestResults } from "./types.js"; + +/** + * Write test results to a JSON file. + */ +export function writeResults(outputPath: string, results: TestResults): void { + const dir = path.dirname(outputPath); + if (dir && !fs.existsSync(dir)) { + fs.mkdirSync(dir, { recursive: true }); + } + fs.writeFileSync(outputPath, JSON.stringify(results, null, 2)); + console.log(`[load-test] Results written to ${outputPath}`); +} + diff --git a/apps/load-tests/src/scenarios/connection-storm.ts b/apps/load-tests/src/scenarios/connection-storm.ts new file mode 100644 index 0000000..f448f73 --- /dev/null +++ b/apps/load-tests/src/scenarios/connection-storm.ts @@ -0,0 +1,117 @@ +import chalk from "chalk"; +import { + CentrifugeClient, + type ConnectionResult, +} from "../client/centrifuge-client.js"; +import { + createConnectionProgressBar, + startProgressBar, + stopProgressBar, + updateProgressBar, +} from "../utils/progress.js"; +import { sleep } from "../utils/timing.js"; +import type { ScenarioOptions, ScenarioResult } from "./types.js"; + +/** + * Connection storm scenario: + * Rapidly connect many clients with optional pacing, then disconnect. + * Tests raw connection handling capacity. + */ +export async function runConnectionStorm( + options: ScenarioOptions, +): Promise { + const { target, connections, rampUpSec } = options; + + // Calculate pacing: spread connection starts over ramp-up period + const connectionDelay = rampUpSec > 0 ? (rampUpSec * 1000) / connections : 0; + + console.log(`${chalk.cyan("[connection-storm]")} Connecting ${chalk.bold(connections)} client(s) to ${chalk.dim(target)}`); + if (connectionDelay > 0) { + console.log( + `${chalk.cyan("[connection-storm]")} Pacing: ${chalk.bold((1000 / connectionDelay).toFixed(1))} conn/sec over ${rampUpSec}s`, + ); + } + console.log(""); + + const startTime = performance.now(); + const clients: CentrifugeClient[] = []; + const connectionResults: ConnectionResult[] = []; + + // Create progress bar + const progressBar = createConnectionProgressBar("[connection-storm]"); + startProgressBar(progressBar, connections); + + // Create and connect all clients with pacing + const connectPromises: Promise[] = []; + + for (let i = 0; i < connections; i++) { + const client = new CentrifugeClient({ url: target }); + clients.push(client); + + connectPromises.push( + client.connect().then((result) => { + connectionResults.push(result); + const immediate = connectionResults.filter((r) => r.outcome === "immediate").length; + const recovered = connectionResults.filter((r) => r.outcome === "recovered").length; + const failed = connectionResults.filter((r) => r.outcome === "failed").length; + updateProgressBar(progressBar, connectionResults.length, { immediate, recovered, failed }); + }), + ); + + // Pace connection starts (but don't wait for connection to complete) + if (i < connections - 1 && connectionDelay > 0) { + await sleep(connectionDelay); + } + } + + await Promise.all(connectPromises); + stopProgressBar(progressBar); + + const totalTime = performance.now() - startTime; + + console.log(""); + + const immediate = connectionResults.filter((r) => r.outcome === "immediate"); + const recovered = connectionResults.filter((r) => r.outcome === "recovered"); + const failed = connectionResults.filter((r) => r.outcome === "failed"); + const successful = connectionResults.filter((r) => r.success); + const latencies = successful.map((r) => r.connectionTimeMs); + const totalRetries = connectionResults.reduce((sum, r) => sum + r.retryCount, 0); + + // Print errors if any + if (failed.length > 0) { + const errorCounts = new Map(); + for (const f of failed) { + const err = f.error ?? "Unknown error"; + errorCounts.set(err, (errorCounts.get(err) ?? 0) + 1); + } + console.log(chalk.red("Errors:")); + for (const [err, count] of errorCounts) { + console.log(chalk.red(` ${count}x: ${err}`)); + } + console.log(""); + } + + // Disconnect all clients + console.log(`${chalk.cyan("[connection-storm]")} Disconnecting clients...`); + for (const client of clients) { + client.disconnect(); + } + + return { + connections: { + attempted: connections, + successful: successful.length, + failed: failed.length, + immediate: immediate.length, + recovered: recovered.length, + }, + timing: { + totalTimeMs: totalTime, + connectionLatencies: latencies, + }, + retries: { + totalRetries, + }, + }; +} diff --git a/apps/load-tests/src/scenarios/index.ts b/apps/load-tests/src/scenarios/index.ts new file mode 100644 index 0000000..73c2f63 --- /dev/null +++ b/apps/load-tests/src/scenarios/index.ts @@ -0,0 +1,31 @@ +import { runConnectionStorm } from "./connection-storm.js"; +import { runSteadyState } from "./steady-state.js"; +import type { ScenarioName, ScenarioOptions, ScenarioResult } from "./types.js"; + +export type { ScenarioName, ScenarioOptions, ScenarioResult }; + +/** + * Run a scenario by name. + * This is the main entry point for executing load test scenarios. + */ +export async function runScenario( + name: ScenarioName, + options: ScenarioOptions, +): Promise { + switch (name) { + case "connection-storm": + return runConnectionStorm(options); + case "steady-state": + return runSteadyState(options); + default: + throw new Error(`Unknown scenario: ${name as string}`); + } +} + +/** + * Check if a string is a valid scenario name. + */ +export function isValidScenarioName(name: string): name is ScenarioName { + return name === "connection-storm" || name === "steady-state"; +} + diff --git a/apps/load-tests/src/scenarios/steady-state.ts b/apps/load-tests/src/scenarios/steady-state.ts new file mode 100644 index 0000000..aed7bae --- /dev/null +++ b/apps/load-tests/src/scenarios/steady-state.ts @@ -0,0 +1,200 @@ +import chalk from "chalk"; +import { + CentrifugeClient, + type ConnectionResult, +} from "../client/centrifuge-client.js"; +import { + createConnectionProgressBar, + startProgressBar, + stopProgressBar, + updateProgressBar, +} from "../utils/progress.js"; +import { sleep } from "../utils/timing.js"; +import type { ScenarioOptions, ScenarioResult } from "./types.js"; + +/** + * Steady state scenario: + * 1. Ramp up connections over rampUpSec (in parallel with proper pacing) + * 2. Hold connections for durationSec + * 3. Track disconnects during hold + * 4. Disconnect all at end + */ +export async function runSteadyState( + options: ScenarioOptions, +): Promise { + const { target, connections, durationSec, rampUpSec } = options; + + console.log( + `${chalk.cyan("[steady-state]")} Ramping up to ${chalk.bold(connections)} connections over ${rampUpSec}s...`, + ); + console.log(""); + + const clients: CentrifugeClient[] = []; + const connectionResults: ConnectionResult[] = []; + let peakDisconnects = 0; + let reconnectsDuringHold = 0; + let previousDisconnectCount = 0; + + const rampUpStart = performance.now(); + const connectionDelay = (rampUpSec * 1000) / connections; + + // Create progress bar + const progressBar = createConnectionProgressBar("[steady-state]"); + startProgressBar(progressBar, connections); + + // Ramp up phase - fire connections in parallel with pacing + const connectPromises: Promise[] = []; + + for (let i = 0; i < connections; i++) { + const client = new CentrifugeClient({ url: target }); + clients.push(client); + + // Fire connection (don't await - let it run in parallel) + const connectPromise = client.connect().then((result) => { + connectionResults.push(result); + const immediate = connectionResults.filter((r) => r.outcome === "immediate").length; + const recovered = connectionResults.filter((r) => r.outcome === "recovered").length; + const failed = connectionResults.filter((r) => r.outcome === "failed").length; + updateProgressBar(progressBar, connectionResults.length, { immediate, recovered, failed }); + }); + connectPromises.push(connectPromise); + + // Pace the connection starts (but don't wait for connection to complete) + if (i < connections - 1 && connectionDelay > 0) { + await sleep(connectionDelay); + } + } + + // Wait for all connections to complete + await Promise.all(connectPromises); + stopProgressBar(progressBar); + + const rampUpTime = performance.now() - rampUpStart; + const successfulConnections = connectionResults.filter((r) => r.success).length; + + console.log(""); + console.log( + `${chalk.cyan("[steady-state]")} Ramp complete: ${chalk.green(successfulConnections)}/${connections} connected in ${Math.round(rampUpTime)}ms`, + ); + + if (successfulConnections === 0) { + console.log(chalk.red("[steady-state] No successful connections, skipping hold phase")); + return buildResult(connectionResults, connections, rampUpTime, 0, 0, 0, 0); + } + + // Hold phase - keep connections open and monitor + console.log(`${chalk.cyan("[steady-state]")} Holding for ${chalk.bold(durationSec)}s...`); + + const holdStart = performance.now(); + const holdEndTime = holdStart + durationSec * 1000; + let lastLogTime = holdStart; + const logInterval = 5000; // Log every 5 seconds + + while (performance.now() < holdEndTime) { + // Check for disconnects + const currentActive = clients.filter((c) => c.isConnected()).length; + const currentDisconnectCount = successfulConnections - currentActive; + + // Track peak disconnects (high water mark) + if (currentDisconnectCount > peakDisconnects) { + peakDisconnects = currentDisconnectCount; + } + + // Track reconnections: if disconnect count decreased, clients reconnected + if (currentDisconnectCount < previousDisconnectCount) { + reconnectsDuringHold += previousDisconnectCount - currentDisconnectCount; + } + previousDisconnectCount = currentDisconnectCount; + + // Log status periodically + if (performance.now() - lastLogTime >= logInterval) { + const elapsed = Math.round((performance.now() - holdStart) / 1000); + const activeColor = currentActive === successfulConnections ? chalk.green : chalk.yellow; + const disconnectColor = currentDisconnectCount === 0 ? chalk.green : chalk.red; + console.log( + `${chalk.cyan("[steady-state]")} ${chalk.dim(`[${elapsed}s]`)} Active: ${activeColor(currentActive)}/${successfulConnections} | Disconnected: ${disconnectColor(currentDisconnectCount)} (peak: ${peakDisconnects}) | Reconnects: ${reconnectsDuringHold}`, + ); + lastLogTime = performance.now(); + } + + await sleep(100); // Check every 100ms + } + + const holdDuration = performance.now() - holdStart; + + // Final check + const finalActive = clients.filter((c) => c.isConnected()).length; + const finalDisconnects = successfulConnections - finalActive; + + const activeColor = finalActive === successfulConnections ? chalk.green : chalk.yellow; + const disconnectColor = finalDisconnects === 0 ? chalk.green : chalk.red; + console.log( + `${chalk.cyan("[steady-state]")} Hold complete: ${activeColor(finalActive)}/${successfulConnections} active | Final disconnects: ${disconnectColor(finalDisconnects)} | Peak: ${peakDisconnects} | Reconnects: ${reconnectsDuringHold}`, + ); + + // Disconnect all clients + console.log(`${chalk.cyan("[steady-state]")} Disconnecting clients...`); + for (const client of clients) { + client.disconnect(); + } + + // Connection stability = percentage that stayed connected the whole time + const connectionStability = + successfulConnections > 0 + ? ((successfulConnections - finalDisconnects) / successfulConnections) * 100 + : 0; + + return buildResult( + connectionResults, + connections, + rampUpTime, + holdDuration, + finalDisconnects, + peakDisconnects, + reconnectsDuringHold, + connectionStability, + ); +} + +function buildResult( + connectionResults: ConnectionResult[], + totalConnections: number, + rampUpTimeMs: number, + holdDurationMs: number, + currentDisconnects: number, + peakDisconnects: number, + reconnectsDuringHold: number, + connectionStability = 0, +): ScenarioResult { + const immediate = connectionResults.filter((r) => r.outcome === "immediate"); + const recovered = connectionResults.filter((r) => r.outcome === "recovered"); + const failed = connectionResults.filter((r) => r.outcome === "failed"); + const successful = connectionResults.filter((r) => r.success); + const latencies = successful.map((r) => r.connectionTimeMs); + const totalRetries = connectionResults.reduce((sum, r) => sum + r.retryCount, 0); + + return { + connections: { + attempted: totalConnections, + successful: successful.length, + failed: failed.length, + immediate: immediate.length, + recovered: recovered.length, + }, + timing: { + totalTimeMs: rampUpTimeMs + holdDurationMs, + connectionLatencies: latencies, + }, + retries: { + totalRetries, + }, + steadyState: { + rampUpTimeMs, + holdDurationMs, + currentDisconnects, + peakDisconnects, + reconnectsDuringHold, + connectionStability, + }, + }; +} diff --git a/apps/load-tests/src/scenarios/types.ts b/apps/load-tests/src/scenarios/types.ts new file mode 100644 index 0000000..93e9e11 --- /dev/null +++ b/apps/load-tests/src/scenarios/types.ts @@ -0,0 +1,50 @@ +/** + * Parsed options for running a scenario. + * These are already parsed (numbers, not strings) - CLI parsing happens in cli/run.ts + */ +export interface ScenarioOptions { + target: string; + connections: number; + durationSec: number; + rampUpSec: number; +} + +/** + * Common result type returned by all scenarios. + * This is the "raw" result - the CLI wraps this in TestResults for output. + */ +export interface ScenarioResult { + /** Connection metrics */ + connections: { + attempted: number; + successful: number; + failed: number; + immediate: number; + recovered: number; + }; + + /** Timing metrics */ + timing: { + totalTimeMs: number; + /** Raw latencies for percentile calculation */ + connectionLatencies: number[]; + }; + + /** Retry metrics */ + retries: { + totalRetries: number; + }; + + /** Steady-state specific metrics (only present for steady-state scenario) */ + steadyState?: { + rampUpTimeMs: number; + holdDurationMs: number; + currentDisconnects: number; + peakDisconnects: number; + reconnectsDuringHold: number; + connectionStability: number; + }; +} + +export type ScenarioName = "connection-storm" | "steady-state"; + diff --git a/apps/load-tests/src/utils/metadata.ts b/apps/load-tests/src/utils/metadata.ts new file mode 100644 index 0000000..c0341aa --- /dev/null +++ b/apps/load-tests/src/utils/metadata.ts @@ -0,0 +1,158 @@ +import * as fs from "node:fs"; +import * as path from "node:path"; + +/** + * Detect the type of runner environment. + * Uses only file system checks to avoid hanging on execSync calls. + */ +export function detectRunnerType(): "local" | "docker" | "aws" { + // Check for AWS ECS environment + if (process.env.ECS_CONTAINER_METADATA_URI || process.env.AWS_EXECUTION_ENV) { + return "aws"; + } + + // Check for /.dockerenv file (works on all platforms) + try { + if (fs.existsSync("/.dockerenv")) { + return "docker"; + } + } catch { + // Ignore errors + } + + // Skip cgroup check on macOS/Windows to avoid hanging + // Docker detection via /.dockerenv is sufficient for most cases + return "local"; +} + +/** + * Get container or task ID if running in containerized environment. + */ +export function getContainerId(): string | undefined { + // AWS ECS task ID + if (process.env.ECS_CONTAINER_METADATA_URI) { + // Extract task ID from metadata URI or use environment variable + return ( + process.env.ECS_TASK_ID || + process.env.ECS_CONTAINER_METADATA_URI.split("/").pop() + ); + } + + // Docker container ID from hostname (common in Docker containers) + // This is safer than reading /proc/self/cgroup which can hang + if (process.env.HOSTNAME) { + // Docker often sets HOSTNAME to container ID + const hostname = process.env.HOSTNAME; + // Check if it looks like a container ID (12+ hex characters) + if (/^[0-9a-f]{12,}$/i.test(hostname)) { + return hostname.substring(0, 12); + } + } + + return undefined; +} + +/** + * Find the git root directory by walking up the directory tree. + */ +function findGitRoot(startDir: string): string | undefined { + let current = path.resolve(startDir); + const root = path.parse(current).root; + + while (current !== root) { + const gitDir = path.join(current, ".git"); + if (fs.existsSync(gitDir)) { + return gitDir; + } + current = path.dirname(current); + } + + return undefined; +} + +/** + * Get git commit SHA if available. + * Reads directly from .git/HEAD and .git/refs to avoid hanging on execSync. + */ +export function getGitSha(): string | undefined { + try { + // Find git root by walking up from current working directory + const cwd = process.cwd(); + const gitDir = findGitRoot(cwd); + + if (!gitDir) { + return undefined; + } + + const headPath = path.join(gitDir, "HEAD"); + if (!fs.existsSync(headPath)) { + return undefined; + } + + const headContent = fs.readFileSync(headPath, "utf-8").trim(); + + // If HEAD points to a branch, resolve it + if (headContent.startsWith("ref: ")) { + const refPath = headContent.substring(5); + const fullRefPath = path.join(gitDir, refPath); + if (fs.existsSync(fullRefPath)) { + return fs.readFileSync(fullRefPath, "utf-8").trim(); + } + } else { + // Direct SHA reference + return headContent; + } + } catch { + // Not a git repo or file read failed + return undefined; + } + + return undefined; +} + +/** + * Collect all metadata for test results. + */ +export interface TestMetadata { + environment?: string; + gitSha?: string; + runnerType: string; + containerId?: string; +} + +/** + * Collect metadata for test results. + * All operations are synchronous file system checks - no execSync calls. + */ +export function collectMetadata(environment?: string): TestMetadata { + // Wrap all metadata collection in try-catch to prevent any failures from blocking + let gitSha: string | undefined; + let runnerType: "local" | "docker" | "aws" = "local"; + let containerId: string | undefined; + + try { + gitSha = getGitSha(); + } catch { + // Ignore errors - git SHA is optional + } + + try { + runnerType = detectRunnerType(); + } catch { + // Fallback to local if detection fails + runnerType = "local"; + } + + try { + containerId = getContainerId(); + } catch { + // Ignore errors - container ID is optional + } + + return { + environment, + gitSha, + runnerType, + containerId, + }; +} diff --git a/apps/load-tests/src/utils/progress.ts b/apps/load-tests/src/utils/progress.ts new file mode 100644 index 0000000..1d4ca49 --- /dev/null +++ b/apps/load-tests/src/utils/progress.ts @@ -0,0 +1,54 @@ +import chalk from "chalk"; +import cliProgress from "cli-progress"; + +export interface ConnectionProgress { + immediate: number; + recovered: number; + failed: number; +} + +/** + * Create a progress bar for connection tracking. + */ +export function createConnectionProgressBar(label: string): cliProgress.SingleBar { + return new cliProgress.SingleBar( + { + format: `${chalk.cyan(label)} ${chalk.gray("|")} {bar} ${chalk.gray("|")} {value}/{total} (${chalk.green("✓")} {immediate} ${chalk.yellow("↻")} {recovered} ${chalk.red("✗")} {failed})`, + barCompleteChar: "█", + barIncompleteChar: "░", + hideCursor: true, + clearOnComplete: false, + stopOnComplete: true, + }, + cliProgress.Presets.shades_classic, + ); +} + +/** + * Start a connection progress bar. + */ +export function startProgressBar( + bar: cliProgress.SingleBar, + total: number, +): void { + bar.start(total, 0, { immediate: 0, recovered: 0, failed: 0 }); +} + +/** + * Update a connection progress bar. + */ +export function updateProgressBar( + bar: cliProgress.SingleBar, + current: number, + progress: ConnectionProgress, +): void { + bar.update(current, progress); +} + +/** + * Stop a progress bar. + */ +export function stopProgressBar(bar: cliProgress.SingleBar): void { + bar.stop(); +} + diff --git a/apps/load-tests/src/utils/stats.ts b/apps/load-tests/src/utils/stats.ts new file mode 100644 index 0000000..0dca8dd --- /dev/null +++ b/apps/load-tests/src/utils/stats.ts @@ -0,0 +1,25 @@ +/** + * Latency statistics for a set of measurements. + */ +export interface LatencyStats { + min: number; + max: number; + avg: number; + p95: number; +} + +/** + * Calculate latency statistics from an array of latency measurements. + * Returns null if the array is empty. + */ +export function calculateLatencyStats(latencies: number[]): LatencyStats | null { + if (latencies.length === 0) return null; + const sorted = [...latencies].sort((a, b) => a - b); + return { + min: Math.round(Math.min(...sorted)), + max: Math.round(Math.max(...sorted)), + avg: Math.round(sorted.reduce((a, b) => a + b, 0) / sorted.length), + p95: Math.round(sorted[Math.floor((sorted.length - 1) * 0.95)] ?? 0), + }; +} + diff --git a/apps/load-tests/src/utils/timing.ts b/apps/load-tests/src/utils/timing.ts new file mode 100644 index 0000000..ed06d63 --- /dev/null +++ b/apps/load-tests/src/utils/timing.ts @@ -0,0 +1,7 @@ +/** + * Sleep for the specified number of milliseconds. + */ +export function sleep(ms: number): Promise { + return new Promise((resolve) => setTimeout(resolve, ms)); +} + diff --git a/apps/load-tests/tsconfig.json b/apps/load-tests/tsconfig.json new file mode 100644 index 0000000..877518b --- /dev/null +++ b/apps/load-tests/tsconfig.json @@ -0,0 +1,7 @@ +{ + "extends": "../../tsconfig.base.json", + "include": ["src/**/*.ts"], + "compilerOptions": { + "noEmit": true + } +} diff --git a/package.json b/package.json index e9bed1d..56567d5 100644 --- a/package.json +++ b/package.json @@ -45,7 +45,8 @@ "allowScripts": { "@lavamoat/preinstall-always-fail": false, "simple-git-hooks": false, - "tsup>esbuild": false + "tsup>esbuild": false, + "tsup>postcss-load-config>tsx>esbuild": false } }, "dependencies": { diff --git a/yarn.lock b/yarn.lock index 914fd93..f7a36d0 100644 --- a/yarn.lock +++ b/yarn.lock @@ -1301,6 +1301,13 @@ __metadata: languageName: node linkType: hard +"@esbuild/aix-ppc64@npm:0.27.2": + version: 0.27.2 + resolution: "@esbuild/aix-ppc64@npm:0.27.2" + conditions: os=aix & cpu=ppc64 + languageName: node + linkType: hard + "@esbuild/android-arm64@npm:0.25.8": version: 0.25.8 resolution: "@esbuild/android-arm64@npm:0.25.8" @@ -1308,6 +1315,13 @@ __metadata: languageName: node linkType: hard +"@esbuild/android-arm64@npm:0.27.2": + version: 0.27.2 + resolution: "@esbuild/android-arm64@npm:0.27.2" + conditions: os=android & cpu=arm64 + languageName: node + linkType: hard + "@esbuild/android-arm@npm:0.25.8": version: 0.25.8 resolution: "@esbuild/android-arm@npm:0.25.8" @@ -1315,6 +1329,13 @@ __metadata: languageName: node linkType: hard +"@esbuild/android-arm@npm:0.27.2": + version: 0.27.2 + resolution: "@esbuild/android-arm@npm:0.27.2" + conditions: os=android & cpu=arm + languageName: node + linkType: hard + "@esbuild/android-x64@npm:0.25.8": version: 0.25.8 resolution: "@esbuild/android-x64@npm:0.25.8" @@ -1322,6 +1343,13 @@ __metadata: languageName: node linkType: hard +"@esbuild/android-x64@npm:0.27.2": + version: 0.27.2 + resolution: "@esbuild/android-x64@npm:0.27.2" + conditions: os=android & cpu=x64 + languageName: node + linkType: hard + "@esbuild/darwin-arm64@npm:0.25.8": version: 0.25.8 resolution: "@esbuild/darwin-arm64@npm:0.25.8" @@ -1329,6 +1357,13 @@ __metadata: languageName: node linkType: hard +"@esbuild/darwin-arm64@npm:0.27.2": + version: 0.27.2 + resolution: "@esbuild/darwin-arm64@npm:0.27.2" + conditions: os=darwin & cpu=arm64 + languageName: node + linkType: hard + "@esbuild/darwin-x64@npm:0.25.8": version: 0.25.8 resolution: "@esbuild/darwin-x64@npm:0.25.8" @@ -1336,6 +1371,13 @@ __metadata: languageName: node linkType: hard +"@esbuild/darwin-x64@npm:0.27.2": + version: 0.27.2 + resolution: "@esbuild/darwin-x64@npm:0.27.2" + conditions: os=darwin & cpu=x64 + languageName: node + linkType: hard + "@esbuild/freebsd-arm64@npm:0.25.8": version: 0.25.8 resolution: "@esbuild/freebsd-arm64@npm:0.25.8" @@ -1343,6 +1385,13 @@ __metadata: languageName: node linkType: hard +"@esbuild/freebsd-arm64@npm:0.27.2": + version: 0.27.2 + resolution: "@esbuild/freebsd-arm64@npm:0.27.2" + conditions: os=freebsd & cpu=arm64 + languageName: node + linkType: hard + "@esbuild/freebsd-x64@npm:0.25.8": version: 0.25.8 resolution: "@esbuild/freebsd-x64@npm:0.25.8" @@ -1350,6 +1399,13 @@ __metadata: languageName: node linkType: hard +"@esbuild/freebsd-x64@npm:0.27.2": + version: 0.27.2 + resolution: "@esbuild/freebsd-x64@npm:0.27.2" + conditions: os=freebsd & cpu=x64 + languageName: node + linkType: hard + "@esbuild/linux-arm64@npm:0.25.8": version: 0.25.8 resolution: "@esbuild/linux-arm64@npm:0.25.8" @@ -1357,6 +1413,13 @@ __metadata: languageName: node linkType: hard +"@esbuild/linux-arm64@npm:0.27.2": + version: 0.27.2 + resolution: "@esbuild/linux-arm64@npm:0.27.2" + conditions: os=linux & cpu=arm64 + languageName: node + linkType: hard + "@esbuild/linux-arm@npm:0.25.8": version: 0.25.8 resolution: "@esbuild/linux-arm@npm:0.25.8" @@ -1364,6 +1427,13 @@ __metadata: languageName: node linkType: hard +"@esbuild/linux-arm@npm:0.27.2": + version: 0.27.2 + resolution: "@esbuild/linux-arm@npm:0.27.2" + conditions: os=linux & cpu=arm + languageName: node + linkType: hard + "@esbuild/linux-ia32@npm:0.25.8": version: 0.25.8 resolution: "@esbuild/linux-ia32@npm:0.25.8" @@ -1371,6 +1441,13 @@ __metadata: languageName: node linkType: hard +"@esbuild/linux-ia32@npm:0.27.2": + version: 0.27.2 + resolution: "@esbuild/linux-ia32@npm:0.27.2" + conditions: os=linux & cpu=ia32 + languageName: node + linkType: hard + "@esbuild/linux-loong64@npm:0.25.8": version: 0.25.8 resolution: "@esbuild/linux-loong64@npm:0.25.8" @@ -1378,6 +1455,13 @@ __metadata: languageName: node linkType: hard +"@esbuild/linux-loong64@npm:0.27.2": + version: 0.27.2 + resolution: "@esbuild/linux-loong64@npm:0.27.2" + conditions: os=linux & cpu=loong64 + languageName: node + linkType: hard + "@esbuild/linux-mips64el@npm:0.25.8": version: 0.25.8 resolution: "@esbuild/linux-mips64el@npm:0.25.8" @@ -1385,6 +1469,13 @@ __metadata: languageName: node linkType: hard +"@esbuild/linux-mips64el@npm:0.27.2": + version: 0.27.2 + resolution: "@esbuild/linux-mips64el@npm:0.27.2" + conditions: os=linux & cpu=mips64el + languageName: node + linkType: hard + "@esbuild/linux-ppc64@npm:0.25.8": version: 0.25.8 resolution: "@esbuild/linux-ppc64@npm:0.25.8" @@ -1392,6 +1483,13 @@ __metadata: languageName: node linkType: hard +"@esbuild/linux-ppc64@npm:0.27.2": + version: 0.27.2 + resolution: "@esbuild/linux-ppc64@npm:0.27.2" + conditions: os=linux & cpu=ppc64 + languageName: node + linkType: hard + "@esbuild/linux-riscv64@npm:0.25.8": version: 0.25.8 resolution: "@esbuild/linux-riscv64@npm:0.25.8" @@ -1399,6 +1497,13 @@ __metadata: languageName: node linkType: hard +"@esbuild/linux-riscv64@npm:0.27.2": + version: 0.27.2 + resolution: "@esbuild/linux-riscv64@npm:0.27.2" + conditions: os=linux & cpu=riscv64 + languageName: node + linkType: hard + "@esbuild/linux-s390x@npm:0.25.8": version: 0.25.8 resolution: "@esbuild/linux-s390x@npm:0.25.8" @@ -1406,6 +1511,13 @@ __metadata: languageName: node linkType: hard +"@esbuild/linux-s390x@npm:0.27.2": + version: 0.27.2 + resolution: "@esbuild/linux-s390x@npm:0.27.2" + conditions: os=linux & cpu=s390x + languageName: node + linkType: hard + "@esbuild/linux-x64@npm:0.25.8": version: 0.25.8 resolution: "@esbuild/linux-x64@npm:0.25.8" @@ -1413,6 +1525,13 @@ __metadata: languageName: node linkType: hard +"@esbuild/linux-x64@npm:0.27.2": + version: 0.27.2 + resolution: "@esbuild/linux-x64@npm:0.27.2" + conditions: os=linux & cpu=x64 + languageName: node + linkType: hard + "@esbuild/netbsd-arm64@npm:0.25.8": version: 0.25.8 resolution: "@esbuild/netbsd-arm64@npm:0.25.8" @@ -1420,6 +1539,13 @@ __metadata: languageName: node linkType: hard +"@esbuild/netbsd-arm64@npm:0.27.2": + version: 0.27.2 + resolution: "@esbuild/netbsd-arm64@npm:0.27.2" + conditions: os=netbsd & cpu=arm64 + languageName: node + linkType: hard + "@esbuild/netbsd-x64@npm:0.25.8": version: 0.25.8 resolution: "@esbuild/netbsd-x64@npm:0.25.8" @@ -1427,6 +1553,13 @@ __metadata: languageName: node linkType: hard +"@esbuild/netbsd-x64@npm:0.27.2": + version: 0.27.2 + resolution: "@esbuild/netbsd-x64@npm:0.27.2" + conditions: os=netbsd & cpu=x64 + languageName: node + linkType: hard + "@esbuild/openbsd-arm64@npm:0.25.8": version: 0.25.8 resolution: "@esbuild/openbsd-arm64@npm:0.25.8" @@ -1434,6 +1567,13 @@ __metadata: languageName: node linkType: hard +"@esbuild/openbsd-arm64@npm:0.27.2": + version: 0.27.2 + resolution: "@esbuild/openbsd-arm64@npm:0.27.2" + conditions: os=openbsd & cpu=arm64 + languageName: node + linkType: hard + "@esbuild/openbsd-x64@npm:0.25.8": version: 0.25.8 resolution: "@esbuild/openbsd-x64@npm:0.25.8" @@ -1441,6 +1581,13 @@ __metadata: languageName: node linkType: hard +"@esbuild/openbsd-x64@npm:0.27.2": + version: 0.27.2 + resolution: "@esbuild/openbsd-x64@npm:0.27.2" + conditions: os=openbsd & cpu=x64 + languageName: node + linkType: hard + "@esbuild/openharmony-arm64@npm:0.25.8": version: 0.25.8 resolution: "@esbuild/openharmony-arm64@npm:0.25.8" @@ -1448,6 +1595,13 @@ __metadata: languageName: node linkType: hard +"@esbuild/openharmony-arm64@npm:0.27.2": + version: 0.27.2 + resolution: "@esbuild/openharmony-arm64@npm:0.27.2" + conditions: os=openharmony & cpu=arm64 + languageName: node + linkType: hard + "@esbuild/sunos-x64@npm:0.25.8": version: 0.25.8 resolution: "@esbuild/sunos-x64@npm:0.25.8" @@ -1455,6 +1609,13 @@ __metadata: languageName: node linkType: hard +"@esbuild/sunos-x64@npm:0.27.2": + version: 0.27.2 + resolution: "@esbuild/sunos-x64@npm:0.27.2" + conditions: os=sunos & cpu=x64 + languageName: node + linkType: hard + "@esbuild/win32-arm64@npm:0.25.8": version: 0.25.8 resolution: "@esbuild/win32-arm64@npm:0.25.8" @@ -1462,6 +1623,13 @@ __metadata: languageName: node linkType: hard +"@esbuild/win32-arm64@npm:0.27.2": + version: 0.27.2 + resolution: "@esbuild/win32-arm64@npm:0.27.2" + conditions: os=win32 & cpu=arm64 + languageName: node + linkType: hard + "@esbuild/win32-ia32@npm:0.25.8": version: 0.25.8 resolution: "@esbuild/win32-ia32@npm:0.25.8" @@ -1469,6 +1637,13 @@ __metadata: languageName: node linkType: hard +"@esbuild/win32-ia32@npm:0.27.2": + version: 0.27.2 + resolution: "@esbuild/win32-ia32@npm:0.27.2" + conditions: os=win32 & cpu=ia32 + languageName: node + linkType: hard + "@esbuild/win32-x64@npm:0.25.8": version: 0.25.8 resolution: "@esbuild/win32-x64@npm:0.25.8" @@ -1476,6 +1651,13 @@ __metadata: languageName: node linkType: hard +"@esbuild/win32-x64@npm:0.27.2": + version: 0.27.2 + resolution: "@esbuild/win32-x64@npm:0.27.2" + conditions: os=win32 & cpu=x64 + languageName: node + linkType: hard + "@eslint-community/eslint-utils@npm:^4.7.0, @eslint-community/eslint-utils@npm:^4.8.0": version: 4.9.0 resolution: "@eslint-community/eslint-utils@npm:4.9.0" @@ -2624,6 +2806,26 @@ __metadata: languageName: unknown linkType: soft +"@metamask/mobile-wallet-protocol-load-tests@workspace:apps/load-tests": + version: 0.0.0-use.local + resolution: "@metamask/mobile-wallet-protocol-load-tests@workspace:apps/load-tests" + dependencies: + "@types/cli-progress": ^3.11.6 + "@types/node": ^24.0.3 + "@types/ssh2": ^1.15.4 + "@types/ws": ^8.18.1 + centrifuge: ^5.3.5 + chalk: ^5.6.2 + cli-progress: ^3.12.0 + commander: ^13.1.0 + dotenv: ^16.5.0 + ssh2: ^1.16.0 + tsx: ^4.20.3 + typescript: ^5.8.3 + ws: ^8.18.3 + languageName: unknown + linkType: soft + "@metamask/mobile-wallet-protocol-wallet-client@workspace:^, @metamask/mobile-wallet-protocol-wallet-client@workspace:packages/wallet-client": version: 0.0.0-use.local resolution: "@metamask/mobile-wallet-protocol-wallet-client@workspace:packages/wallet-client" @@ -4190,6 +4392,15 @@ __metadata: languageName: node linkType: hard +"@types/cli-progress@npm:^3.11.6": + version: 3.11.6 + resolution: "@types/cli-progress@npm:3.11.6" + dependencies: + "@types/node": "*" + checksum: 2df9d4788089564c8eb01e6d05b084bd030b7ce3f1a3698c57a998f2b329c5c7a3ea2d20e3756579a385945c70875df3c798b7740f6bf679eb1b1937e91f5eca + languageName: node + linkType: hard + "@types/debug@npm:^4.1.7": version: 4.1.12 resolution: "@types/debug@npm:4.1.12" @@ -4284,6 +4495,15 @@ __metadata: languageName: node linkType: hard +"@types/node@npm:^18.11.18": + version: 18.19.130 + resolution: "@types/node@npm:18.19.130" + dependencies: + undici-types: ~5.26.4 + checksum: b7032363581c416e721a88cffdc2b47662337cacd20f8294f5619a1abf79615c7fef1521964c2aa9d36ed6aae733e1a03e8c704661bd5a0c2f34b390f41ea395 + languageName: node + linkType: hard + "@types/node@npm:^20": version: 20.19.23 resolution: "@types/node@npm:20.19.23" @@ -4350,6 +4570,15 @@ __metadata: languageName: node linkType: hard +"@types/ssh2@npm:^1.15.4": + version: 1.15.5 + resolution: "@types/ssh2@npm:1.15.5" + dependencies: + "@types/node": ^18.11.18 + checksum: 158ce6644f6784b1f53d93f39d7b97291f97a45e756af6fd4e2d8b0f72800248137826e03b3218caadda5d769b882a06f2ab0981d57a55632658c54898fafc4a + languageName: node + linkType: hard + "@types/stack-utils@npm:^2.0.0": version: 2.0.3 resolution: "@types/stack-utils@npm:2.0.3" @@ -5133,6 +5362,15 @@ __metadata: languageName: node linkType: hard +"asn1@npm:^0.2.6": + version: 0.2.6 + resolution: "asn1@npm:0.2.6" + dependencies: + safer-buffer: ~2.1.0 + checksum: 39f2ae343b03c15ad4f238ba561e626602a3de8d94ae536c46a4a93e69578826305366dc09fbb9b56aec39b4982a463682f259c38e59f6fa380cd72cd61e493d + languageName: node + linkType: hard + "assertion-error@npm:^2.0.1": version: 2.0.1 resolution: "assertion-error@npm:2.0.1" @@ -5431,6 +5669,15 @@ __metadata: languageName: node linkType: hard +"bcrypt-pbkdf@npm:^1.0.2": + version: 1.0.2 + resolution: "bcrypt-pbkdf@npm:1.0.2" + dependencies: + tweetnacl: ^0.14.3 + checksum: 4edfc9fe7d07019609ccf797a2af28351736e9d012c8402a07120c4453a3b789a15f2ee1530dc49eee8f7eb9379331a8dd4b3766042b9e502f74a68e7f662291 + languageName: node + linkType: hard + "better-opn@npm:~3.0.2": version: 3.0.2 resolution: "better-opn@npm:3.0.2" @@ -5585,6 +5832,13 @@ __metadata: languageName: node linkType: hard +"buildcheck@npm:~0.0.6": + version: 0.0.7 + resolution: "buildcheck@npm:0.0.7" + checksum: 18bc4581525776dc7486906241723a0b2bc6d9d55bdbf8aa3ac225ed02c9dfc01be06020a5cce58b1630edd8a1ba1ce3fc51959bbbafaabcef05f9e7707210de + languageName: node + linkType: hard + "bundle-name@npm:^4.1.0": version: 4.1.0 resolution: "bundle-name@npm:4.1.0" @@ -5763,6 +6017,13 @@ __metadata: languageName: node linkType: hard +"chalk@npm:^5.6.2": + version: 5.6.2 + resolution: "chalk@npm:5.6.2" + checksum: 4ee2d47a626d79ca27cb5299ecdcce840ef5755e287412536522344db0fc51ca0f6d6433202332c29e2288c6a90a2b31f3bd626bc8c14743b6b6ee28abd3b796 + languageName: node + linkType: hard + "check-error@npm:^2.1.1": version: 2.1.1 resolution: "check-error@npm:2.1.1" @@ -5851,6 +6112,15 @@ __metadata: languageName: node linkType: hard +"cli-progress@npm:^3.12.0": + version: 3.12.0 + resolution: "cli-progress@npm:3.12.0" + dependencies: + string-width: ^4.2.3 + checksum: e8390dc3cdf3c72ecfda0a1e8997bfed63a0d837f97366bbce0ca2ff1b452da386caed007b389f0fe972625037b6c8e7ab087c69d6184cc4dfc8595c4c1d3e6e + languageName: node + linkType: hard + "cli-spinners@npm:^2.0.0": version: 2.9.2 resolution: "cli-spinners@npm:2.9.2" @@ -5969,6 +6239,13 @@ __metadata: languageName: node linkType: hard +"commander@npm:^13.1.0": + version: 13.1.0 + resolution: "commander@npm:13.1.0" + checksum: 8ca2fcb33caf2aa06fba3722d7a9440921331d54019dabf906f3603313e7bf334b009b862257b44083ff65d5a3ab19e83ad73af282bd5319f01dc228bdf87ef0 + languageName: node + linkType: hard + "commander@npm:^2.20.0": version: 2.20.3 resolution: "commander@npm:2.20.3" @@ -6093,6 +6370,17 @@ __metadata: languageName: node linkType: hard +"cpu-features@npm:~0.0.10": + version: 0.0.10 + resolution: "cpu-features@npm:0.0.10" + dependencies: + buildcheck: ~0.0.6 + nan: ^2.19.0 + node-gyp: latest + checksum: ab17e25cea0b642bdcfd163d3d872be4cc7d821e854d41048557799e990d672ee1cc7bd1d4e7c4de0309b1683d4c001d36ba8569b5035d1e7e2ff2d681f681d7 + languageName: node + linkType: hard + "crc-32@npm:^1.2.0": version: 1.2.2 resolution: "crc-32@npm:1.2.2" @@ -6398,7 +6686,7 @@ __metadata: languageName: node linkType: hard -"dotenv@npm:^16.4.5": +"dotenv@npm:^16.4.5, dotenv@npm:^16.5.0": version: 16.6.1 resolution: "dotenv@npm:16.6.1" checksum: e8bd63c9a37f57934f7938a9cf35de698097fadf980cb6edb61d33b3e424ceccfe4d10f37130b904a973b9038627c2646a3365a904b4406514ea94d7f1816b69 @@ -6770,6 +7058,95 @@ __metadata: languageName: node linkType: hard +"esbuild@npm:~0.27.0": + version: 0.27.2 + resolution: "esbuild@npm:0.27.2" + dependencies: + "@esbuild/aix-ppc64": 0.27.2 + "@esbuild/android-arm": 0.27.2 + "@esbuild/android-arm64": 0.27.2 + "@esbuild/android-x64": 0.27.2 + "@esbuild/darwin-arm64": 0.27.2 + "@esbuild/darwin-x64": 0.27.2 + "@esbuild/freebsd-arm64": 0.27.2 + "@esbuild/freebsd-x64": 0.27.2 + "@esbuild/linux-arm": 0.27.2 + "@esbuild/linux-arm64": 0.27.2 + "@esbuild/linux-ia32": 0.27.2 + "@esbuild/linux-loong64": 0.27.2 + "@esbuild/linux-mips64el": 0.27.2 + "@esbuild/linux-ppc64": 0.27.2 + "@esbuild/linux-riscv64": 0.27.2 + "@esbuild/linux-s390x": 0.27.2 + "@esbuild/linux-x64": 0.27.2 + "@esbuild/netbsd-arm64": 0.27.2 + "@esbuild/netbsd-x64": 0.27.2 + "@esbuild/openbsd-arm64": 0.27.2 + "@esbuild/openbsd-x64": 0.27.2 + "@esbuild/openharmony-arm64": 0.27.2 + "@esbuild/sunos-x64": 0.27.2 + "@esbuild/win32-arm64": 0.27.2 + "@esbuild/win32-ia32": 0.27.2 + "@esbuild/win32-x64": 0.27.2 + dependenciesMeta: + "@esbuild/aix-ppc64": + optional: true + "@esbuild/android-arm": + optional: true + "@esbuild/android-arm64": + optional: true + "@esbuild/android-x64": + optional: true + "@esbuild/darwin-arm64": + optional: true + "@esbuild/darwin-x64": + optional: true + "@esbuild/freebsd-arm64": + optional: true + "@esbuild/freebsd-x64": + optional: true + "@esbuild/linux-arm": + optional: true + "@esbuild/linux-arm64": + optional: true + "@esbuild/linux-ia32": + optional: true + "@esbuild/linux-loong64": + optional: true + "@esbuild/linux-mips64el": + optional: true + "@esbuild/linux-ppc64": + optional: true + "@esbuild/linux-riscv64": + optional: true + "@esbuild/linux-s390x": + optional: true + "@esbuild/linux-x64": + optional: true + "@esbuild/netbsd-arm64": + optional: true + "@esbuild/netbsd-x64": + optional: true + "@esbuild/openbsd-arm64": + optional: true + "@esbuild/openbsd-x64": + optional: true + "@esbuild/openharmony-arm64": + optional: true + "@esbuild/sunos-x64": + optional: true + "@esbuild/win32-arm64": + optional: true + "@esbuild/win32-ia32": + optional: true + "@esbuild/win32-x64": + optional: true + bin: + esbuild: bin/esbuild + checksum: 62ec92f8f40ad19922ae7d8dbf0427e41744120a77cc95abdf099dfb484d65fbe3c70cc55b8eccb7f6cb0d14e871ff1f2f76376d476915c2a6d2b800269261b2 + languageName: node + linkType: hard + "escalade@npm:^3.1.1, escalade@npm:^3.2.0": version: 3.2.0 resolution: "escalade@npm:3.2.0" @@ -8065,7 +8442,7 @@ __metadata: languageName: node linkType: hard -"get-tsconfig@npm:^4.10.0": +"get-tsconfig@npm:^4.10.0, get-tsconfig@npm:^4.7.5": version: 4.13.0 resolution: "get-tsconfig@npm:4.13.0" dependencies: @@ -10388,6 +10765,15 @@ __metadata: languageName: node linkType: hard +"nan@npm:^2.19.0, nan@npm:^2.23.0": + version: 2.24.0 + resolution: "nan@npm:2.24.0" + dependencies: + node-gyp: latest + checksum: ab4080188a2fe2bef0a1f3ce5c65a6c3d71fa23be08f4e0696dc256c5030c809d11569d5bcf28810148a7b0029c195c592b98b7b22c5e9e7e9aa0e71905a63b8 + languageName: node + linkType: hard + "nanoid@npm:^3.3.11, nanoid@npm:^3.3.6, nanoid@npm:^3.3.7, nanoid@npm:^3.3.8": version: 3.3.11 resolution: "nanoid@npm:3.3.11" @@ -12244,7 +12630,7 @@ __metadata: languageName: node linkType: hard -"safer-buffer@npm:>= 2.1.2 < 3, safer-buffer@npm:>= 2.1.2 < 3.0.0": +"safer-buffer@npm:>= 2.1.2 < 3, safer-buffer@npm:>= 2.1.2 < 3.0.0, safer-buffer@npm:~2.1.0": version: 2.1.2 resolution: "safer-buffer@npm:2.1.2" checksum: cab8f25ae6f1434abee8d80023d7e72b598cf1327164ddab31003c51215526801e40b66c5e65d658a0af1e9d6478cadcb4c745f4bd6751f97d8644786c0978b0 @@ -12768,6 +13154,23 @@ __metadata: languageName: node linkType: hard +"ssh2@npm:^1.16.0": + version: 1.17.0 + resolution: "ssh2@npm:1.17.0" + dependencies: + asn1: ^0.2.6 + bcrypt-pbkdf: ^1.0.2 + cpu-features: ~0.0.10 + nan: ^2.23.0 + dependenciesMeta: + cpu-features: + optional: true + nan: + optional: true + checksum: 1661b020e367e358603187a1efbb7628cb9b2f75543f60e354ede67be1216d331f2b99a73c57fb01a04be050a1e06fc97d04760d1396ea658ca816ddf80df9a9 + languageName: node + linkType: hard + "ssri@npm:^10.0.0": version: 10.0.6 resolution: "ssri@npm:10.0.6" @@ -13444,6 +13847,29 @@ __metadata: languageName: node linkType: hard +"tsx@npm:^4.20.3": + version: 4.21.0 + resolution: "tsx@npm:4.21.0" + dependencies: + esbuild: ~0.27.0 + fsevents: ~2.3.3 + get-tsconfig: ^4.7.5 + dependenciesMeta: + fsevents: + optional: true + bin: + tsx: dist/cli.mjs + checksum: 50c98e4b6e66d1c30f72925c8e5e7be1a02377574de7cd367d7e7a6d4af43ca8ff659f91c654e7628b25a5498015e32f090529b92c679b0342811e1cf682e8cf + languageName: node + linkType: hard + +"tweetnacl@npm:^0.14.3": + version: 0.14.5 + resolution: "tweetnacl@npm:0.14.5" + checksum: 6061daba1724f59473d99a7bb82e13f211cdf6e31315510ae9656fefd4779851cb927adad90f3b488c8ed77c106adc0421ea8055f6f976ff21b27c5c4e918487 + languageName: node + linkType: hard + "type-check@npm:^0.4.0, type-check@npm:~0.4.0": version: 0.4.0 resolution: "type-check@npm:0.4.0" @@ -13612,6 +14038,13 @@ __metadata: languageName: node linkType: hard +"undici-types@npm:~5.26.4": + version: 5.26.5 + resolution: "undici-types@npm:5.26.5" + checksum: 3192ef6f3fd5df652f2dc1cd782b49d6ff14dc98e5dced492aa8a8c65425227da5da6aafe22523c67f035a272c599bb89cfe803c1db6311e44bed3042fc25487 + languageName: node + linkType: hard + "undici-types@npm:~6.21.0": version: 6.21.0 resolution: "undici-types@npm:6.21.0"