diff --git a/.github/workflows/e2e-tests.yml b/.github/workflows/e2e-tests.yml index fe8ee30..f7eb25b 100644 --- a/.github/workflows/e2e-tests.yml +++ b/.github/workflows/e2e-tests.yml @@ -1,5 +1,10 @@ name: E2E Tests +# Flaky Test Detection: +# Tests use nick-fields/retry@v3 with max_attempts: 3 and timeout_minutes: 5 +# This catches transient failures while limiting false positives +# Tests that pass on retry are considered "flaky" and should be investigated + on: push: branches: [master, main] @@ -11,27 +16,20 @@ env: CARGO_TERM_COLOR: always jobs: - e2e-tests: - name: E2E Tests (${{ matrix.os }}) + # Build the debugger CLI for each platform + build: + name: Build (${{ matrix.os }}) runs-on: ${{ matrix.os }} strategy: fail-fast: false matrix: os: [ubuntu-latest, macos-latest] - include: - - os: ubuntu-latest - adapter: lldb-dap - - os: macos-latest - adapter: lldb-dap - steps: - name: Checkout repository uses: actions/checkout@v4 - name: Install Rust toolchain uses: dtolnay/rust-toolchain@stable - with: - components: rustfmt, clippy - name: Cache Cargo registry uses: actions/cache@v4 @@ -47,123 +45,640 @@ jobs: - name: Build debugger CLI run: cargo build --release + - name: Upload debugger binary + uses: actions/upload-artifact@v4 + with: + name: debugger-${{ matrix.os }} + path: target/release/debugger + retention-days: 1 + + # LLDB tests - C/C++/Rust debugging + test-lldb: + name: LLDB Tests (${{ matrix.os }}) + needs: build + runs-on: ${{ matrix.os }} + strategy: + fail-fast: false + matrix: + os: [ubuntu-latest, macos-latest] + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + - name: Download debugger binary + uses: actions/download-artifact@v4 + with: + name: debugger-${{ matrix.os }} + path: target/release/ + + - name: Make debugger executable + run: chmod +x target/release/debugger + - name: Install LLDB (Ubuntu) if: matrix.os == 'ubuntu-latest' run: | sudo apt-get update - sudo apt-get install -y lldb + sudo apt-get install -y lldb gcc - - name: Install Python and debugpy - uses: actions/setup-python@v5 + - name: Install build tools (macOS) + if: matrix.os == 'macos-latest' + run: brew install gcc || true + + - name: Compile C test fixtures + run: | + gcc -g tests/fixtures/simple.c -o tests/fixtures/test_simple_c || true + gcc -g tests/e2e/hello_world.c -o tests/e2e/test_c || true + gcc -g -pthread tests/fixtures/threaded.c -o tests/fixtures/test_threaded_c || true + + - name: Compile Rust test fixtures + run: | + rustc -g tests/fixtures/simple.rs -o tests/fixtures/test_simple_rs || true + rustc -g tests/e2e/hello_world.rs -o tests/e2e/test_rs || true + + - name: Run C Hello World Test + uses: nick-fields/retry@v3 with: - python-version: '3.11' + timeout_minutes: 5 + max_attempts: 3 + command: ./target/release/debugger test tests/scenarios/hello_world_c.yml --verbose - - name: Install debugpy - run: pip install debugpy + - name: Run Rust Hello World Test + uses: nick-fields/retry@v3 + with: + timeout_minutes: 5 + max_attempts: 3 + command: ./target/release/debugger test tests/scenarios/hello_world_rust.yml --verbose + + - name: Run Complex Verification Test + uses: nick-fields/retry@v3 + with: + timeout_minutes: 5 + max_attempts: 3 + command: ./target/release/debugger test tests/scenarios/complex_verification.yml --verbose + + - name: Run Conditional Breakpoint Test + uses: nick-fields/retry@v3 + with: + timeout_minutes: 5 + max_attempts: 3 + command: ./target/release/debugger test tests/scenarios/conditional_breakpoint_c.yml --verbose + + - name: Run Hit Count Breakpoint Test + uses: nick-fields/retry@v3 + with: + timeout_minutes: 5 + max_attempts: 3 + command: ./target/release/debugger test tests/scenarios/hitcount_breakpoint_c.yml --verbose + + - name: Run Breakpoint Management Test + uses: nick-fields/retry@v3 + with: + timeout_minutes: 5 + max_attempts: 3 + command: ./target/release/debugger test tests/scenarios/breakpoint_management_c.yml --verbose + + - name: Run Stack Navigation Test + uses: nick-fields/retry@v3 + with: + timeout_minutes: 5 + max_attempts: 3 + command: ./target/release/debugger test tests/scenarios/stack_navigation_c.yml --verbose + + - name: Run Output Capture Test + uses: nick-fields/retry@v3 + with: + timeout_minutes: 5 + max_attempts: 3 + command: ./target/release/debugger test tests/scenarios/output_capture_c.yml --verbose + + - name: Run Program Restart Test + uses: nick-fields/retry@v3 + with: + timeout_minutes: 5 + max_attempts: 3 + command: ./target/release/debugger test tests/scenarios/program_restart_c.yml --verbose + + - name: Run Pause Resume Test + uses: nick-fields/retry@v3 + with: + timeout_minutes: 5 + max_attempts: 3 + command: ./target/release/debugger test tests/scenarios/pause_resume_c.yml --verbose + + - name: Run Thread List Test + uses: nick-fields/retry@v3 + with: + timeout_minutes: 5 + max_attempts: 3 + command: ./target/release/debugger test tests/scenarios/thread_list_c.yml --verbose + + - name: Cleanup daemon + if: always() + run: pkill -f "debugger daemon" || true + + - name: Upload logs + if: failure() + uses: actions/upload-artifact@v4 + with: + name: lldb-logs-${{ matrix.os }} + path: ~/.local/share/debugger/logs/ + + # Delve tests - Go debugging + test-delve: + name: Delve Tests (${{ matrix.os }}) + needs: build + runs-on: ${{ matrix.os }} + strategy: + fail-fast: false + matrix: + os: [ubuntu-latest, macos-latest] + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + - name: Download debugger binary + uses: actions/download-artifact@v4 + with: + name: debugger-${{ matrix.os }} + path: target/release/ + + - name: Make debugger executable + run: chmod +x target/release/debugger - name: Install Go uses: actions/setup-go@v5 with: - go-version: '1.21' + go-version: '1.22' - name: Install Delve run: go install github.com/go-delve/delve/cmd/dlv@latest - - name: Install GCC (Ubuntu) - if: matrix.os == 'ubuntu-latest' - run: sudo apt-get install -y gcc + - name: Compile Go test fixtures + run: | + go build -gcflags='all=-N -l' -o tests/e2e/test_go tests/e2e/hello_world.go + go build -gcflags='all=-N -l' -o tests/fixtures/test_simple_go tests/fixtures/simple.go + go build -gcflags='all=-N -l' -o tests/fixtures/test_threaded_go tests/fixtures/threaded.go + + - name: Run Go Hello World Test + run: ./target/release/debugger test tests/scenarios/hello_world_go.yml --verbose + continue-on-error: true + + - name: Run Go Complex Test + run: ./target/release/debugger test tests/scenarios/complex_go.yml --verbose + continue-on-error: true + + - name: Run Conditional Breakpoint Test + run: ./target/release/debugger test tests/scenarios/conditional_breakpoint_go.yml --verbose + continue-on-error: true + + - name: Run Hit Count Breakpoint Test + run: ./target/release/debugger test tests/scenarios/hitcount_breakpoint_go.yml --verbose + continue-on-error: true + + - name: Run Stack Navigation Test + run: ./target/release/debugger test tests/scenarios/stack_navigation_go.yml --verbose + continue-on-error: true + + - name: Run Output Capture Test + run: ./target/release/debugger test tests/scenarios/output_capture_go.yml --verbose + continue-on-error: true + + - name: Run Program Restart Test + run: ./target/release/debugger test tests/scenarios/program_restart_go.yml --verbose + continue-on-error: true + + - name: Run Thread List Test + run: ./target/release/debugger test tests/scenarios/thread_list_go.yml --verbose + continue-on-error: true + + - name: Cleanup daemon + if: always() + run: pkill -f "debugger daemon" || true + + - name: Upload logs + if: failure() + uses: actions/upload-artifact@v4 + with: + name: delve-logs-${{ matrix.os }} + path: ~/.local/share/debugger/logs/ + + # debugpy tests - Python debugging + test-debugpy: + name: debugpy Tests (${{ matrix.os }}) + needs: build + runs-on: ${{ matrix.os }} + strategy: + fail-fast: false + matrix: + os: [ubuntu-latest, macos-latest] + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + - name: Download debugger binary + uses: actions/download-artifact@v4 + with: + name: debugger-${{ matrix.os }} + path: target/release/ + + - name: Make debugger executable + run: chmod +x target/release/debugger + + - name: Install Python + uses: actions/setup-python@v5 + with: + python-version: '3.12' + + - name: Install debugpy + run: pip install debugpy + + - name: Run Python Hello World Test + run: ./target/release/debugger test tests/scenarios/hello_world_python.yml --verbose + continue-on-error: true + + - name: Run Conditional Breakpoint Test + run: ./target/release/debugger test tests/scenarios/conditional_breakpoint_py.yml --verbose + continue-on-error: true + + - name: Run Stack Navigation Test + run: ./target/release/debugger test tests/scenarios/stack_navigation_py.yml --verbose + continue-on-error: true + + - name: Run Output Capture Test + run: ./target/release/debugger test tests/scenarios/output_capture_py.yml --verbose + continue-on-error: true + + - name: Cleanup daemon + if: always() + run: pkill -f "debugger daemon" || true + + - name: Upload logs + if: failure() + uses: actions/upload-artifact@v4 + with: + name: debugpy-logs-${{ matrix.os }} + path: ~/.local/share/debugger/logs/ + + # js-debug tests - JavaScript/TypeScript debugging + test-js-debug: + name: js-debug Tests (${{ matrix.os }}) + needs: build + runs-on: ${{ matrix.os }} + strategy: + fail-fast: false + matrix: + os: [ubuntu-latest, macos-latest] + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + - name: Download debugger binary + uses: actions/download-artifact@v4 + with: + name: debugger-${{ matrix.os }} + path: target/release/ + + - name: Make debugger executable + run: chmod +x target/release/debugger + + - name: Install Node.js + uses: actions/setup-node@v4 + with: + node-version: '22' - - name: Verify debug adapters + - name: Install TypeScript and compile fixtures run: | - echo "=== Checking installed debug adapters ===" - which lldb-dap || which lldb-vscode || echo "lldb-dap not found in PATH" - python -c "import debugpy; print(f'debugpy installed at {debugpy.__file__}')" - dlv version - gcc --version - rustc --version - go version - - - name: Setup debug adapters via CLI + npm install -g typescript + cd tests/fixtures && npm install && npx tsc + + - name: Setup js-debug adapter + run: ./target/release/debugger setup js-debug || true + + - name: Check js-debug installation run: | - ./target/release/debugger setup --check || true - ./target/release/debugger setup lldb --dry-run || true + echo "=== Checking js-debug installation ===" + ls -la ~/.local/share/debugger/adapters/ 2>/dev/null || true + find ~/.local/share/debugger/adapters/ -name "*.js" 2>/dev/null | head -5 || true - - name: Compile test fixtures + - name: Run JavaScript Hello World Test + run: ./target/release/debugger test tests/scenarios/hello_world_js.yml --verbose + continue-on-error: true + + - name: Run TypeScript Hello World Test + run: ./target/release/debugger test tests/scenarios/hello_world_ts.yml --verbose + continue-on-error: true + + - name: Run JavaScript Stepping Test + run: ./target/release/debugger test tests/scenarios/stepping_js.yml --verbose + continue-on-error: true + + - name: Run JavaScript Expression Eval Test + run: ./target/release/debugger test tests/scenarios/expression_eval_js.yml --verbose + continue-on-error: true + + - name: Run Conditional Breakpoint Test + run: ./target/release/debugger test tests/scenarios/conditional_breakpoint_js.yml --verbose + continue-on-error: true + + - name: Run Stack Navigation Test + run: ./target/release/debugger test tests/scenarios/stack_navigation_js.yml --verbose + continue-on-error: true + + - name: Run Output Capture Test + run: ./target/release/debugger test tests/scenarios/output_capture_js.yml --verbose + continue-on-error: true + + - name: Cleanup daemon + if: always() + run: pkill -f "debugger daemon" || true + + - name: Upload logs + if: failure() + uses: actions/upload-artifact@v4 + with: + name: js-debug-logs-${{ matrix.os }} + path: ~/.local/share/debugger/logs/ + + # GDB tests - C/C++ debugging with GDB (native DAP or cdt-gdb-adapter bridge) + test-gdb: + name: GDB Tests + needs: build + runs-on: ubuntu-latest + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + - name: Download debugger binary + uses: actions/download-artifact@v4 + with: + name: debugger-ubuntu-latest + path: target/release/ + + - name: Make debugger executable + run: chmod +x target/release/debugger + + - name: Install GDB and build tools run: | - # Compile C test files - gcc -g tests/e2e/hello_world.c -o tests/e2e/test_c || true - gcc -g tests/fixtures/simple.c -o tests/fixtures/test_simple_c || true - - # Compile Rust test files - rustc -g tests/e2e/hello_world.rs -o tests/e2e/test_rs || true - rustc -g tests/fixtures/simple.rs -o tests/fixtures/test_simple_rs || true - - # Compile Go test files (disable optimizations for debugging) - go build -gcflags='all=-N -l' -o tests/e2e/test_go tests/e2e/hello_world.go || true - go build -gcflags='all=-N -l' -o tests/fixtures/test_simple_go tests/fixtures/simple.go || true + sudo apt-get update + sudo apt-get install -y gdb gcc g++ - - name: Run C Hello World E2E Test + - name: Check GDB version + run: gdb --version | head -1 + + - name: Install Node.js for cdt-gdb-adapter + uses: actions/setup-node@v4 + with: + node-version: '22' + + - name: Install cdt-gdb-adapter (for GDB < 14.1) + run: npm install -g cdt-gdb-adapter || true + + - name: Compile C test fixtures run: | - ./target/release/debugger test tests/scenarios/hello_world_c.yml --verbose + gcc -g tests/fixtures/simple.c -o tests/fixtures/test_simple_c + gcc -g tests/e2e/hello_world.c -o tests/e2e/test_c + gcc -g -pthread tests/fixtures/threaded.c -o tests/fixtures/test_threaded_c + + - name: Run C Hello World Test with GDB + run: ./target/release/debugger test tests/scenarios/hello_world_c.yml --adapter gdb --verbose continue-on-error: true - - name: Run Rust Hello World E2E Test - run: | - ./target/release/debugger test tests/scenarios/hello_world_rust.yml --verbose + - name: Run Conditional Breakpoint Test with GDB + run: ./target/release/debugger test tests/scenarios/conditional_breakpoint_c.yml --adapter gdb --verbose continue-on-error: true - - name: Run Python Hello World E2E Test - run: | - ./target/release/debugger test tests/scenarios/hello_world_python.yml --verbose + - name: Run Hit Count Breakpoint Test with GDB + run: ./target/release/debugger test tests/scenarios/hitcount_breakpoint_c.yml --adapter gdb --verbose continue-on-error: true - - name: Run Go Hello World E2E Test - # Note: Delve (dlv) uses TCP-based DAP connection, which is not yet fully supported. - # These tests are expected to fail until TCP-based DAP adapter support is added. - run: | - ./target/release/debugger test tests/scenarios/hello_world_go.yml --verbose + - name: Run Breakpoint Management Test with GDB + run: ./target/release/debugger test tests/scenarios/breakpoint_management_c.yml --adapter gdb --verbose continue-on-error: true - - name: Run Go Complex E2E Test - # Note: Delve (dlv) uses TCP-based DAP connection, which is not yet fully supported. - run: | - ./target/release/debugger test tests/scenarios/complex_go.yml --verbose + - name: Run Stack Navigation Test with GDB + run: ./target/release/debugger test tests/scenarios/stack_navigation_c.yml --adapter gdb --verbose continue-on-error: true - - name: Run Complex Verification E2E Test - run: | - ./target/release/debugger test tests/scenarios/complex_verification.yml --verbose + - name: Run Output Capture Test with GDB + run: ./target/release/debugger test tests/scenarios/output_capture_c.yml --adapter gdb --verbose + continue-on-error: true + + - name: Run Program Restart Test with GDB + run: ./target/release/debugger test tests/scenarios/program_restart_c.yml --adapter gdb --verbose + continue-on-error: true + + - name: Run Pause Resume Test with GDB + run: ./target/release/debugger test tests/scenarios/pause_resume_c.yml --adapter gdb --verbose + continue-on-error: true + + - name: Run Thread List Test with GDB + run: ./target/release/debugger test tests/scenarios/thread_list_c.yml --adapter gdb --verbose continue-on-error: true - name: Cleanup daemon if: always() + run: pkill -f "debugger daemon" || true + + - name: Upload logs + if: failure() + uses: actions/upload-artifact@v4 + with: + name: gdb-logs + path: ~/.local/share/debugger/logs/ + + # GDB tests on macOS - with Homebrew installation + test-gdb-macos: + name: GDB Tests (macOS) + needs: build + runs-on: macos-latest + continue-on-error: true + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + - name: Download debugger binary + uses: actions/download-artifact@v4 + with: + name: debugger-macos-latest + path: target/release/ + + - name: Make debugger executable + run: chmod +x target/release/debugger + + - name: Install GDB via Homebrew + id: install-gdb + run: | + brew install gdb || echo "gdb_install_failed=true" >> $GITHUB_OUTPUT + if command -v gdb &> /dev/null; then + echo "GDB installed successfully" + gdb --version | head -1 + echo "gdb_available=true" >> $GITHUB_OUTPUT + else + echo "GDB installation failed or not available" + echo "gdb_available=false" >> $GITHUB_OUTPUT + fi + + - name: Install Node.js and cdt-gdb-adapter + if: steps.install-gdb.outputs.gdb_available == 'true' + uses: actions/setup-node@v4 + with: + node-version: '22' + + - name: Install cdt-gdb-adapter (for GDB < 14.1) + if: steps.install-gdb.outputs.gdb_available == 'true' + run: npm install -g cdt-gdb-adapter || true + + - name: Compile C test fixtures + if: steps.install-gdb.outputs.gdb_available == 'true' run: | - # Kill any lingering daemon processes - pkill -f "debugger daemon" || true + gcc -g tests/fixtures/simple.c -o tests/fixtures/test_simple_c + gcc -g tests/e2e/hello_world.c -o tests/e2e/test_c + gcc -g -pthread tests/fixtures/threaded.c -o tests/fixtures/test_threaded_c + + - name: Run C Hello World Test with GDB + if: steps.install-gdb.outputs.gdb_available == 'true' + run: ./target/release/debugger test tests/scenarios/hello_world_c.yml --adapter gdb --verbose + continue-on-error: true + + - name: Run Conditional Breakpoint Test with GDB + if: steps.install-gdb.outputs.gdb_available == 'true' + run: ./target/release/debugger test tests/scenarios/conditional_breakpoint_c.yml --adapter gdb --verbose + continue-on-error: true + + - name: Run Hit Count Breakpoint Test with GDB + if: steps.install-gdb.outputs.gdb_available == 'true' + run: ./target/release/debugger test tests/scenarios/hitcount_breakpoint_c.yml --adapter gdb --verbose + continue-on-error: true - - name: Upload test logs + - name: Run Breakpoint Management Test with GDB + if: steps.install-gdb.outputs.gdb_available == 'true' + run: ./target/release/debugger test tests/scenarios/breakpoint_management_c.yml --adapter gdb --verbose + continue-on-error: true + + - name: Run Stack Navigation Test with GDB + if: steps.install-gdb.outputs.gdb_available == 'true' + run: ./target/release/debugger test tests/scenarios/stack_navigation_c.yml --adapter gdb --verbose + continue-on-error: true + + - name: Run Output Capture Test with GDB + if: steps.install-gdb.outputs.gdb_available == 'true' + run: ./target/release/debugger test tests/scenarios/output_capture_c.yml --adapter gdb --verbose + continue-on-error: true + + - name: Run Program Restart Test with GDB + if: steps.install-gdb.outputs.gdb_available == 'true' + run: ./target/release/debugger test tests/scenarios/program_restart_c.yml --adapter gdb --verbose + continue-on-error: true + + - name: Run Pause Resume Test with GDB + if: steps.install-gdb.outputs.gdb_available == 'true' + run: ./target/release/debugger test tests/scenarios/pause_resume_c.yml --adapter gdb --verbose + continue-on-error: true + + - name: Run Thread List Test with GDB + if: steps.install-gdb.outputs.gdb_available == 'true' + run: ./target/release/debugger test tests/scenarios/thread_list_c.yml --adapter gdb --verbose + continue-on-error: true + + - name: Skip tests if GDB unavailable + if: steps.install-gdb.outputs.gdb_available != 'true' + run: echo "Skipping GDB tests - GDB not available on macOS" + + - name: Cleanup daemon + if: always() + run: pkill -f "debugger daemon" || true + + - name: Upload logs if: failure() uses: actions/upload-artifact@v4 with: - name: test-logs-${{ matrix.os }} + name: gdb-macos-logs + path: ~/.local/share/debugger/logs/ + + # Integration tests with Rust test framework + integration-tests: + name: Integration Tests + needs: build + runs-on: ubuntu-latest + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + - name: Install Rust toolchain + uses: dtolnay/rust-toolchain@stable + + - name: Cache Cargo registry + uses: actions/cache@v4 + with: path: | - ~/.local/share/debugger/logs/ - ~/.cache/debugger/ + ~/.cargo/registry + ~/.cargo/git + target + key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }} + restore-keys: | + ${{ runner.os }}-cargo- + + - name: Install LLDB + run: | + sudo apt-get update + sudo apt-get install -y lldb gcc + + - name: Install Go and Delve + run: | + sudo apt-get install -y golang-go + go install github.com/go-delve/delve/cmd/dlv@latest + + - name: Install Python and debugpy + run: | + sudo apt-get install -y python3 python3-pip + pip3 install debugpy + + - name: Install Node.js and js-debug + uses: actions/setup-node@v4 + with: + node-version: '22' - # Summary job to report overall status + - name: Compile test fixtures + run: | + gcc -g tests/fixtures/simple.c -o tests/fixtures/test_simple_c || true + rustc -g tests/fixtures/simple.rs -o tests/fixtures/test_simple_rs || true + go build -gcflags='all=-N -l' -o tests/fixtures/test_simple_go tests/fixtures/simple.go || true + cd tests/fixtures && npm install && npx tsc || true + + - name: Run integration tests + run: cargo test --test integration -- --test-threads=1 + continue-on-error: true + + - name: Cleanup daemon + if: always() + run: pkill -f "debugger daemon" || true + + - name: Upload logs + if: failure() + uses: actions/upload-artifact@v4 + with: + name: integration-logs + path: ~/.local/share/debugger/logs/ + + # Summary job e2e-summary: name: E2E Test Summary - needs: e2e-tests + needs: [test-lldb, test-delve, test-debugpy, test-js-debug, test-gdb, test-gdb-macos, integration-tests] runs-on: ubuntu-latest if: always() steps: - name: Check test results run: | - if [ "${{ needs.e2e-tests.result }}" == "success" ]; then - echo "✅ All E2E tests passed!" - exit 0 - else - echo "⚠️ Some E2E tests failed or were skipped" - echo "Check individual job logs for details" - exit 0 # Don't fail the workflow while tests are being stabilized - fi + echo "## E2E Test Results Summary" + echo "" + echo "| Test Suite | Status |" + echo "|------------|--------|" + echo "| LLDB | ${{ needs.test-lldb.result }} |" + echo "| Delve | ${{ needs.test-delve.result }} |" + echo "| debugpy | ${{ needs.test-debugpy.result }} |" + echo "| js-debug | ${{ needs.test-js-debug.result }} |" + echo "| GDB | ${{ needs.test-gdb.result }} |" + echo "| GDB (macOS) | ${{ needs.test-gdb-macos.result }} |" + echo "| Integration | ${{ needs.integration-tests.result }} |" + echo "" + echo "All test suites completed." diff --git a/.gitignore b/.gitignore index bf790e1..2dca71b 100644 --- a/.gitignore +++ b/.gitignore @@ -23,4 +23,15 @@ target # Test binaries tests/e2e/test_* -tests/fixtures/cuda_test \ No newline at end of file +tests/fixtures/test_* +tests/fixtures/cuda_test + +# JS/TS stuff +node_modules/ +package-lock.json + +# TypeScript compiled output (for fixtures - dist is committed for test reliability) +# tests/fixtures/dist/ is intentionally committed + +# Docker build artifacts +.docker/ \ No newline at end of file diff --git a/README.md b/README.md index e0b0f9b..6ab1fe2 100644 --- a/README.md +++ b/README.md @@ -221,13 +221,13 @@ codelldb = "~/.local/share/debugger-cli/adapters/codelldb/adapter/codelldb" | Adapter | Languages | Status | |---------|-----------|--------| | lldb-dap | C, C++, Rust, Swift | ✅ Full support | -| CodeLLDB | C, C++, Rust | ✅ Full support | | debugpy | Python | ✅ Full support | | Delve | Go | ✅ Full support | | GDB | C, C++ | ✅ Full support (requires GDB 14.1+) | | CUDA-GDB | CUDA, C, C++ | ✅ Full support (Linux only) | +| js-debug | JavaScript, TypeScript | ✅ Full support | +| CodeLLDB | C, C++, Rust | 🚧 Planned | | cpptools | C, C++ | 🚧 Planned | -| js-debug | JavaScript, TypeScript | 🚧 Planned | ## Examples diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 0000000..8c8ad04 --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,80 @@ +# Docker Compose for local E2E testing +# Usage: +# docker compose build - Build all images +# docker compose up lldb - Run LLDB tests +# docker compose up delve - Run Delve tests +# docker compose up debugpy - Run debugpy tests +# docker compose up js-debug - Run js-debug tests +# docker compose up gdb - Run GDB tests +# docker compose up - Run all tests + +services: + # Base image - not runnable directly + base: + build: + context: . + dockerfile: docker/base/Dockerfile + image: debugger-cli:base + + # LLDB tests - C/C++/Rust + lldb: + build: + context: . + dockerfile: docker/lldb/Dockerfile + image: debugger-cli:lldb + depends_on: + - base + volumes: + - ./tests:/home/debugger/debugger-cli/tests:ro + - test-results:/home/debugger/results + + # Delve tests - Go + delve: + build: + context: . + dockerfile: docker/delve/Dockerfile + image: debugger-cli:delve + depends_on: + - base + volumes: + - ./tests:/home/debugger/debugger-cli/tests:ro + - test-results:/home/debugger/results + + # debugpy tests - Python + debugpy: + build: + context: . + dockerfile: docker/debugpy/Dockerfile + image: debugger-cli:debugpy + depends_on: + - base + volumes: + - ./tests:/home/debugger/debugger-cli/tests:ro + - test-results:/home/debugger/results + + # js-debug tests - JavaScript/TypeScript + js-debug: + build: + context: . + dockerfile: docker/js-debug/Dockerfile + image: debugger-cli:js-debug + depends_on: + - base + volumes: + - ./tests:/home/debugger/debugger-cli/tests:ro + - test-results:/home/debugger/results + + # GDB tests - C/C++ with GDB + gdb: + build: + context: . + dockerfile: docker/gdb/Dockerfile + image: debugger-cli:gdb + depends_on: + - base + volumes: + - ./tests:/home/debugger/debugger-cli/tests:ro + - test-results:/home/debugger/results + +volumes: + test-results: diff --git a/docker/README.md b/docker/README.md new file mode 100644 index 0000000..927af31 --- /dev/null +++ b/docker/README.md @@ -0,0 +1,93 @@ +# Docker E2E Test Images + +This directory contains Dockerfiles for running comprehensive end-to-end tests across all supported debug adapters. + +## Images + +| Image | Debug Adapter | Languages | Base | +|-------|--------------|-----------|------| +| `base` | - | Rust (build only) | rust:1.83-bookworm | +| `lldb` | lldb-dap | C, C++, Rust, Swift | base | +| `delve` | dlv | Go | base | +| `debugpy` | debugpy | Python | base | +| `js-debug` | vscode-js-debug | JavaScript, TypeScript | base | +| `gdb` | gdb (native DAP) or cdt-gdb-adapter | C, C++ | base | +| `cuda-gdb` | cuda-gdb | CUDA C/C++ | base (requires nvidia-docker) | + +## Usage + +### Using Docker Compose + +```bash +# Build all images +docker compose build + +# Run specific adapter tests +docker compose up lldb +docker compose up delve +docker compose up debugpy +docker compose up js-debug +docker compose up gdb + +# Run all tests +docker compose up +``` + +### Using the test script + +```bash +# Run all tests +./scripts/run-e2e-tests.sh + +# Run specific adapter tests +./scripts/run-e2e-tests.sh lldb +./scripts/run-e2e-tests.sh js-debug +``` + +### Building individual images + +```bash +# Build base image first +docker build -t debugger-cli:base -f docker/base/Dockerfile . + +# Build adapter-specific image +docker build -t debugger-cli:lldb -f docker/lldb/Dockerfile . + +# Run tests +docker run --rm debugger-cli:lldb +``` + +## Test Coverage + +Each image runs the following test types: + +1. **Scenario tests** (`debugger test tests/scenarios/*.yml`) + - Hello world programs for each language + - Breakpoints, stepping, variable inspection + - Stack traces, expression evaluation + +2. **Integration tests** (`cargo test --test integration`) + - Rust test framework tests + - More detailed feature coverage + +## Adding New Adapters + +1. Create a new Dockerfile in `docker//Dockerfile` +2. Base it on `ghcr.io/akiselev/debugger-cli:base` +3. Install the debug adapter and language toolchain +4. Add test scenarios in `tests/scenarios/` +5. Add to `docker-compose.yml` +6. Add to CI workflow in `.github/workflows/e2e-tests.yml` + +## CUDA-GDB Notes + +The CUDA-GDB image requires NVIDIA Container Runtime: + +```bash +# Requires nvidia-docker2 installed +docker run --gpus all --rm debugger-cli:cuda-gdb +``` + +CUDA-GDB supports two modes automatically detected at setup: +- **Native DAP**: cuda-gdb with GDB 14.1+ and DAP Python bindings +- **cdt-gdb-adapter bridge**: Older cuda-gdb without native DAP diff --git a/docker/base/Dockerfile b/docker/base/Dockerfile new file mode 100644 index 0000000..286ffc5 --- /dev/null +++ b/docker/base/Dockerfile @@ -0,0 +1,38 @@ +# Base image for debugger-cli e2e testing +# Contains Rust toolchain and common build dependencies + +FROM rust:1.83-bookworm + +# Prevent interactive prompts +ENV DEBIAN_FRONTEND=noninteractive + +# Install common build dependencies +RUN apt-get update && apt-get install -y --no-install-recommends \ + build-essential \ + cmake \ + curl \ + git \ + ca-certificates \ + pkg-config \ + libssl-dev \ + && rm -rf /var/lib/apt/lists/* + +# Create non-root user for testing +RUN useradd -m -s /bin/bash debugger +WORKDIR /home/debugger/debugger-cli + +# Pre-cache cargo registry +RUN mkdir -p /home/debugger/.cargo && \ + chown -R debugger:debugger /home/debugger + +# Copy project files +COPY --chown=debugger:debugger . . + +# Build the debugger CLI +RUN cargo build --release && \ + cp target/release/debugger /usr/local/bin/ + +# Switch to non-root user +USER debugger + +ENTRYPOINT ["/bin/bash"] diff --git a/docker/debugpy/Dockerfile b/docker/debugpy/Dockerfile new file mode 100644 index 0000000..7308e7d --- /dev/null +++ b/docker/debugpy/Dockerfile @@ -0,0 +1,28 @@ +# Debugpy debug adapter for Python debugging +# Tests: debugpy adapter with Python scripts + +FROM ghcr.io/akiselev/debugger-cli:base AS base + +USER root + +# Install Python and pip +RUN apt-get update && apt-get install -y --no-install-recommends \ + python3 \ + python3-pip \ + python3-venv \ + && rm -rf /var/lib/apt/lists/* + +# Install debugpy globally +RUN pip3 install --break-system-packages debugpy + +# Verify debugpy is available +RUN python3 -c "import debugpy; print(f'debugpy {debugpy.__version__}')" + +USER debugger + +# Set environment for tests +ENV DEBUGGER_ADAPTER=debugpy +ENV TEST_LANGUAGES="python" + +# Default command runs tests +CMD ["bash", "-c", "debugger test tests/scenarios/hello_world_python.yml --verbose"] diff --git a/docker/delve/Dockerfile b/docker/delve/Dockerfile new file mode 100644 index 0000000..95fdd32 --- /dev/null +++ b/docker/delve/Dockerfile @@ -0,0 +1,31 @@ +# Delve debug adapter for Go debugging +# Tests: dlv DAP adapter with TCP transport + +FROM ghcr.io/akiselev/debugger-cli:base AS base + +USER root + +# Install Go +ENV GOLANG_VERSION=1.22.0 +RUN curl -LO https://go.dev/dl/go${GOLANG_VERSION}.linux-amd64.tar.gz && \ + tar -C /usr/local -xzf go${GOLANG_VERSION}.linux-amd64.tar.gz && \ + rm go${GOLANG_VERSION}.linux-amd64.tar.gz + +ENV PATH="/usr/local/go/bin:/home/debugger/go/bin:${PATH}" +ENV GOPATH="/home/debugger/go" + +# Switch to debugger user for go install +USER debugger + +# Install Delve +RUN go install github.com/go-delve/delve/cmd/dlv@latest + +# Verify Delve is available +RUN dlv version + +# Set environment for tests +ENV DEBUGGER_ADAPTER=go +ENV TEST_LANGUAGES="go" + +# Default command runs tests +CMD ["bash", "-c", "go build -gcflags='all=-N -l' -o tests/e2e/test_go tests/e2e/hello_world.go && debugger test tests/scenarios/hello_world_go.yml --verbose"] diff --git a/docker/gdb/Dockerfile b/docker/gdb/Dockerfile new file mode 100644 index 0000000..8ef5757 --- /dev/null +++ b/docker/gdb/Dockerfile @@ -0,0 +1,33 @@ +# GDB debug adapter for C/C++ debugging +# Tests: GDB with native DAP support (GDB >= 14.1) + +FROM ghcr.io/akiselev/debugger-cli:base AS base + +USER root + +# Install GDB with DAP support +# Note: Debian bookworm has GDB 13.x, we need 14.1+ for native DAP +# For older GDB, cdt-gdb-adapter bridge is used +RUN apt-get update && apt-get install -y --no-install-recommends \ + gdb \ + gcc \ + g++ \ + && rm -rf /var/lib/apt/lists/* + +# Check GDB version +RUN gdb --version | head -1 + +# Install cdt-gdb-adapter as fallback for older GDB +RUN curl -fsSL https://deb.nodesource.com/setup_22.x | bash - && \ + apt-get install -y nodejs && \ + npm install -g cdt-gdb-adapter && \ + rm -rf /var/lib/apt/lists/* + +USER debugger + +# Set environment for tests +ENV DEBUGGER_ADAPTER=gdb +ENV TEST_LANGUAGES="c,cpp" + +# Default command compiles and runs tests +CMD ["bash", "-c", "gcc -g tests/fixtures/simple.c -o tests/fixtures/test_simple_c && debugger test tests/scenarios/hello_world_c.yml --verbose"] diff --git a/docker/js-debug/Dockerfile b/docker/js-debug/Dockerfile new file mode 100644 index 0000000..e0b7e18 --- /dev/null +++ b/docker/js-debug/Dockerfile @@ -0,0 +1,40 @@ +# js-debug adapter for JavaScript/TypeScript debugging +# Tests: vscode-js-debug adapter with Node.js runtime + +FROM ghcr.io/akiselev/debugger-cli:base AS base + +USER root + +# Install Node.js (LTS) +RUN curl -fsSL https://deb.nodesource.com/setup_22.x | bash - && \ + apt-get install -y nodejs && \ + rm -rf /var/lib/apt/lists/* + +# Verify Node.js +RUN node --version && npm --version + +# Install js-debug globally (the VS Code JavaScript Debugger) +RUN npm install -g @anthropic-ai/vscode-js-debug 2>/dev/null || \ + npm install -g @anthropic-ai/js-debug 2>/dev/null || \ + echo "Will install js-debug via debugger setup" + +# Install TypeScript for test fixtures +RUN npm install -g typescript + +USER debugger + +# Ensure npm global bin is in PATH +ENV PATH="/home/debugger/.npm-global/bin:${PATH}" +ENV NPM_CONFIG_PREFIX="/home/debugger/.npm-global" + +# Set environment for tests +ENV DEBUGGER_ADAPTER=js-debug +ENV TEST_LANGUAGES="javascript,typescript" + +# Build TypeScript fixtures if needed +RUN cd /home/debugger/debugger-cli/tests/fixtures && \ + npm install 2>/dev/null || true && \ + npx tsc 2>/dev/null || true + +# Default command runs tests +CMD ["bash", "-c", "debugger setup js-debug && debugger test tests/scenarios/hello_world_js.yml --verbose && debugger test tests/scenarios/hello_world_ts.yml --verbose"] diff --git a/docker/lldb/Dockerfile b/docker/lldb/Dockerfile new file mode 100644 index 0000000..2ff277b --- /dev/null +++ b/docker/lldb/Dockerfile @@ -0,0 +1,26 @@ +# LLDB debug adapter for C/C++/Rust/Swift debugging +# Tests: lldb-dap adapter with native binaries + +FROM ghcr.io/akiselev/debugger-cli:base AS base + +USER root + +# Install LLDB and development tools +RUN apt-get update && apt-get install -y --no-install-recommends \ + lldb \ + clang \ + libc++-dev \ + libc++abi-dev \ + && rm -rf /var/lib/apt/lists/* + +# Verify lldb-dap is available +RUN which lldb-dap || which lldb-vscode || echo "LLDB DAP adapter not found" + +USER debugger + +# Set environment for tests +ENV DEBUGGER_ADAPTER=lldb-dap +ENV TEST_LANGUAGES="c,cpp,rust" + +# Default command runs tests +CMD ["bash", "-c", "debugger test tests/scenarios/hello_world_c.yml --verbose && debugger test tests/scenarios/hello_world_rust.yml --verbose"] diff --git a/docs/plan-js-debug.md b/docs/plan-js-debug.md new file mode 100644 index 0000000..2364578 --- /dev/null +++ b/docs/plan-js-debug.md @@ -0,0 +1,1419 @@ +# Plan: js-debug Support for JavaScript/TypeScript Debugging + +## Overview + +This plan implements support for the VS Code JavaScript Debugger (js-debug) in debugger-cli, enabling debugging of JavaScript and TypeScript code. The implementation follows **Approach A (Minimal Change)**: extending the existing `spawn_tcp()` with adapter-aware spawn style detection rather than creating separate spawn methods. + +Key decisions: +- **Installation**: npm package (`@vscode/js-debug`) first, GitHub clone fallback +- **Transport**: TCP with port-as-argument pattern (vs Delve's `--listen` flag) +- **Tests**: Real adapter integration tests with pre-compiled TypeScript + sourcemaps + +## Planning Context + +### Decision Log + +| Decision | Reasoning Chain | +|----------|-----------------| +| Extend spawn_tcp vs new method | js-debug is only second TCP adapter -> creating spawn_tcp_port_arg() adds maintenance burden for one adapter -> extending spawn_tcp with spawn_style config minimizes code paths while supporting both patterns | +| npm package first, GitHub fallback | npm package @vscode/js-debug exists and is maintained -> faster install than clone+build -> fallback to GitHub ensures installation works even if npm fails | +| spawn_style in AdapterConfig | Port-argument pattern differs from --listen flag -> need adapter-specific spawn behavior -> config field is cleaner than adapter name detection | +| Add 6 js-debug fields to LaunchArguments | js-debug needs type, sourceMaps, outFiles, runtimeExecutable, runtimeArgs, skipFiles -> existing pattern uses optional fields with skip_serializing_if -> maintains single LaunchArguments struct | +| Pre-compiled TS with sourcemaps for tests | Direct ts-node execution differs from production -> pre-compiled .js with .map files matches real debugging workflow -> tests validate actual sourcemap resolution | +| Real adapter integration tests | Existing tests use real adapters (lldb-dap, GDB) -> mock tests wouldn't catch DAP protocol issues -> integration tests catch real js-debug behavior | +| Port allocation before spawn | js-debug needs port as argument -> must allocate port before spawning -> use TcpListener::bind("127.0.0.1:0") then extract port | +| Node.js runtime only for v1 | js-debug supports multiple runtimes (pwa-node, pwa-chrome, pwa-extensionHost) -> user confirmed Node.js focus for v1 -> set type_attr to "pwa-node" for all JS/TS files -> browser debugging deferred to future release | +| sourceMaps always enabled for .ts | TypeScript requires sourcemaps for line mapping -> user confirmed auto-enable is preferred experience -> most TS projects use sourcemaps -> auto-enable provides best default -> edge cases can disable via future --no-sourcemaps flag | +| debugServerMain.js path discovery | npm installs to node_modules/@vscode/js-debug/ -> internal structure may vary -> hardcode standard path out/src/debugServerMain.js -> fallback: search for debugServerMain.js in package directory -> verified at installation time | +| type_attr serde rename to "type" | DAP requires field named "type" but Rust keyword collision -> use type_attr with #[serde(rename = "type")] -> overrides camelCase rename_all -> ensures correct DAP field name "type" not "typeAttr" -> tested in M1 serialization test | + +### Rejected Alternatives + +| Alternative | Why Rejected | +|-------------|--------------| +| Separate spawn_tcp_port_arg() method | Creates two TCP spawn paths to maintain; only one adapter currently needs port-arg pattern | +| Wrapper script normalizing output | Adds Node.js file to maintain; extra process layer complicates debugging; not idiomatic Rust | +| Hardcode js-debug detection in spawn_tcp | Adapter name detection is fragile; config-based spawn_style is explicit and extensible | +| ts-node for TypeScript tests | Different execution model than production; wouldn't test sourcemap resolution which is critical for TS debugging | + +### Constraints & Assumptions + +- **Node.js required**: js-debug is Node.js-based; installer must verify node/npm availability +- **Existing pattern preservation**: Follow delve.rs/debugpy.rs installer structure exactly +- **TransportMode::Tcp already exists**: No enum changes needed, only spawn behavior differs +- **LaunchArguments extensible**: Existing optional fields pattern with skip_serializing_if + +### Known Risks + +| Risk | Mitigation | Anchor | +|------|------------|--------| +| js-debug stdout format differs from Delve | Parse multiple patterns: "Listening on port X" or silent startup with timeout-based detection | N/A - runtime behavior | +| npm package may not exist or be outdated | GitHub clone fallback with npm install && npm run build | N/A - external dependency | +| Port allocation race condition | Allocate port, immediately spawn adapter, connect with retry loop | src/dap/client.rs:156-264 has similar pattern | +| TypeScript compilation in tests | Include pre-compiled fixtures with .map files; regenerate in test setup if missing | N/A - test infrastructure | + +## Invisible Knowledge + +### Architecture + +``` +User Command: debugger start app.js + | + v +Session::launch() [session.rs] + | + +-- Check adapter_config.transport == Tcp + | + +-- Check adapter_config.spawn_style + | | + | +-- TcpListen -> spawn_tcp() with --listen flag (Delve) + | | + | +-- TcpPortArg -> spawn_tcp() with port argument (js-debug) + | | + | +-- Allocate random port via TcpListener + | +-- Spawn: node debugServerMain.js + | +-- Connect via TcpStream + | + v +DapClient::initialize() + | + +-- LaunchArguments with js-debug fields: + | type: "pwa-node" + | sourceMaps: true + | outFiles: ["/dist/**/*.js"] + | + v +DAP Protocol over TCP +``` + +### Data Flow + +``` +Installation: + npm install @vscode/js-debug -> ~/.local/share/debugger-cli/adapters/js-debug/ + | + +-- Fallback: git clone + npm install + npm run build + | + v + Result: /path/to/out/src/debugServerMain.js + +Launch: + AdapterConfig { + path: "node", + args: ["/path/to/debugServerMain.js"], + transport: Tcp, + spawn_style: TcpPortArg + } + | + v + spawn_tcp() allocates port -> spawns -> connects +``` + +### Why This Structure + +- **spawn_style in config vs adapter detection**: Config is explicit; adapter name matching is fragile when aliases exist +- **Single spawn_tcp with branching**: Avoids code duplication while handling both patterns +- **LaunchArguments union type**: DAP spec allows adapter-specific fields; skip_serializing_if keeps JSON clean + +### Invariants + +1. Port must be allocated BEFORE spawning adapter (js-debug needs it as argument) +2. spawn_style defaults to TcpListen for backward compatibility with Delve. **New TCP adapters MUST specify spawn_style explicitly in config.** Default is NOT safe for all TCP adapters. +3. js-debug tests must use real adapter (mocks wouldn't catch protocol issues) +4. TypeScript test fixtures must have accompanying .map files +5. type_attr field MUST use `#[serde(rename = "type")]` to produce correct DAP field name + +### Tradeoffs + +- **Config complexity vs code simplicity**: spawn_style field adds config option but simplifies spawn_tcp logic +- **npm dependency**: Requires Node.js ecosystem but avoids reimplementing js-debug +- **Integration test speed**: Real adapter tests are slower but catch real issues + +## Milestones + +### Milestone 1: Core Infrastructure (spawn_style + LaunchArguments) + +**Files**: +- `src/common/config.rs` +- `src/dap/types.rs` +- `src/dap/client.rs` + +**Flags**: +- `conformance`: Must match existing adapter patterns + +**Requirements**: +- Add `TcpSpawnStyle` enum with `TcpListen` (default) and `TcpPortArg` variants +- Add `spawn_style` field to `AdapterConfig` with default `TcpListen` +- Add js-debug fields to `LaunchArguments`: `type_attr`, `source_maps`, `out_files`, `runtime_executable`, `runtime_args`, `skip_files` +- Modify `spawn_tcp()` to handle `TcpPortArg` pattern: allocate port first, pass as argument + +**Acceptance Criteria**: +- `TcpSpawnStyle::TcpListen` produces `--listen=127.0.0.1:PORT` behavior (Delve unchanged) +- `TcpSpawnStyle::TcpPortArg` allocates port, appends to args, spawns, connects +- LaunchArguments serializes js-debug fields only when set +- LaunchArguments with type_attr="pwa-node" serializes to `{"type": "pwa-node"}` (not `{"typeAttr": "pwa-node"}`) + +**Tests**: +- **Test files**: `tests/integration.rs` (unit test section) +- **Test type**: unit +- **Backing**: default-derived +- **Scenarios**: + - Normal: TcpPortArg allocates port and includes in spawn args + - Normal: TcpListen includes --listen flag (regression) + - Edge: LaunchArguments with js-debug fields serializes correctly + +**Code Intent**: +- `src/common/config.rs`: Add `TcpSpawnStyle` enum after `TransportMode`. Add `spawn_style: TcpSpawnStyle` field to `AdapterConfig` with `#[serde(default)]` +- `src/dap/types.rs`: Add 6 optional fields to `LaunchArguments` after Delve fields: `type_attr` (with `#[serde(rename = "type")]` to produce DAP "type" field), `source_maps`, `out_files`, `runtime_executable`, `runtime_args`, `skip_files`. Use `#[serde(skip_serializing_if = "Option::is_none")]`. Decision: "type_attr serde rename to type" +- `src/dap/client.rs`: In `spawn_tcp()`, add branch at line ~160 to check spawn_style. For TcpPortArg: allocate port via `TcpListener::bind`, extract port, append to args, spawn without --listen flag. Decision: "Port allocation before spawn" + +**Code Changes**: + +```diff +--- a/src/common/config.rs ++++ b/src/common/config.rs +@@ -43,6 +43,19 @@ pub enum TransportMode { + Tcp, + } + ++/// TCP adapter spawn style ++#[derive(Debug, Deserialize, Clone, Default, PartialEq)] ++#[serde(rename_all = "lowercase")] ++pub enum TcpSpawnStyle { ++ /// Adapter accepts --listen flag and waits for connection (Delve) ++ #[default] ++ TcpListen, ++ /// Adapter receives port as positional argument (js-debug) ++ TcpPortArg, ++} ++ + /// Configuration for a debug adapter + #[derive(Debug, Deserialize, Clone)] + pub struct AdapterConfig { +@@ -55,6 +68,10 @@ pub struct AdapterConfig { + /// Transport mode for DAP communication + #[serde(default)] + pub transport: TransportMode, ++ ++ /// TCP spawn style (only used when transport is Tcp) ++ #[serde(default)] ++ pub spawn_style: TcpSpawnStyle, + } + + /// Default settings +``` + +```diff +--- a/src/dap/types.rs ++++ b/src/dap/types.rs +@@ -159,6 +159,30 @@ pub struct LaunchArguments { + /// Stop at beginning of main (GDB uses stopAtBeginningOfMainSubprogram instead of stopOnEntry) + #[serde(skip_serializing_if = "Option::is_none")] + pub stop_at_beginning_of_main_subprogram: Option, ++ ++ // === js-debug (JavaScript/TypeScript) specific === ++ /// Runtime type: "pwa-node", "pwa-chrome", "pwa-extensionHost" ++ #[serde(rename = "type", skip_serializing_if = "Option::is_none")] ++ pub type_attr: Option, ++ /// Enable source maps for TypeScript debugging ++ #[serde(skip_serializing_if = "Option::is_none")] ++ pub source_maps: Option, ++ /// Glob patterns for compiled output files ++ #[serde(skip_serializing_if = "Option::is_none")] ++ pub out_files: Option>, ++ /// Path to runtime executable (node) ++ #[serde(skip_serializing_if = "Option::is_none")] ++ pub runtime_executable: Option, ++ /// Arguments passed to runtime executable ++ #[serde(skip_serializing_if = "Option::is_none")] ++ pub runtime_args: Option>, ++ /// Glob patterns for files to skip when debugging ++ #[serde(skip_serializing_if = "Option::is_none")] ++ pub skip_files: Option>, + } + + /// Attach request arguments +``` + +```diff +--- a/src/dap/client.rs ++++ b/src/dap/client.rs +@@ -150,16 +150,44 @@ impl DapClient { + } + + /// Spawn a new DAP adapter that uses TCP for communication (e.g., Delve) +- /// +- /// This spawns the adapter with a --listen flag, waits for it to output +- /// the port it's listening on, then connects via TCP. +- pub async fn spawn_tcp(adapter_path: &Path, args: &[String]) -> Result { ++ pub async fn spawn_tcp( ++ adapter_path: &Path, ++ args: &[String], ++ spawn_style: &crate::common::config::TcpSpawnStyle, ++ ) -> Result { + use crate::common::parse_listen_address; + use tokio::io::{AsyncBufReadExt, BufReader as TokioBufReader}; + +- // Build command with --listen=127.0.0.1:0 to get a random available port ++ let (mut adapter, addr) = match spawn_style { ++ crate::common::config::TcpSpawnStyle::TcpListen => { ++ let mut cmd = Command::new(adapter_path); ++ cmd.args(args) ++ .arg("--listen=127.0.0.1:0") ++ .stdin(Stdio::null()) ++ .stdout(Stdio::piped()) ++ .stderr(Stdio::piped()); ++ ++ let mut adapter = cmd.spawn().map_err(|e| { ++ Error::AdapterStartFailed(format!( ++ "Failed to start {}: {}", ++ adapter_path.display(), ++ e ++ )) ++ })?; ++ ++ let stdout = adapter.stdout.take().ok_or_else(|| { ++ let _ = adapter.start_kill(); ++ Error::AdapterStartFailed("Failed to get adapter stdout".to_string()) ++ })?; ++ ++ let mut stdout_reader = TokioBufReader::new(stdout); ++ let mut line = String::new(); ++ ++ let addr_result = tokio::time::timeout(Duration::from_secs(10), async { ++ loop { ++ line.clear(); ++ let bytes_read = stdout_reader.read_line(&mut line).await.map_err(|e| { ++ Error::AdapterStartFailed(format!("Failed to read adapter output: {}", e)) ++ })?; ++ ++ if bytes_read == 0 { ++ return Err(Error::AdapterStartFailed( ++ "Adapter exited before outputting listen address".to_string(), ++ )); ++ } ++ ++ tracing::debug!("Adapter output: {}", line.trim()); ++ ++ if let Some(addr) = parse_listen_address(&line) { ++ return Ok(addr); ++ } ++ } ++ }) ++ .await; ++ ++ let addr = match addr_result { ++ Ok(Ok(addr)) => addr, ++ Ok(Err(e)) => { ++ let _ = adapter.start_kill(); ++ return Err(e); ++ } ++ Err(_) => { ++ let _ = adapter.start_kill(); ++ return Err(Error::AdapterStartFailed( ++ "Timeout waiting for adapter to start listening".to_string(), ++ )); ++ } ++ }; ++ ++ (adapter, addr) ++ } ++ crate::common::config::TcpSpawnStyle::TcpPortArg => { ++ use std::net::TcpListener as StdTcpListener; ++ ++ let listener = StdTcpListener::bind("127.0.0.1:0").map_err(|e| { ++ Error::AdapterStartFailed(format!("Failed to allocate port: {}", e)) ++ })?; ++ let port = listener.local_addr().map_err(|e| { ++ Error::AdapterStartFailed(format!("Failed to get port: {}", e)) ++ })?.port(); ++ drop(listener); ++ ++ let addr = format!("127.0.0.1:{}", port); ++ ++ let mut cmd = Command::new(adapter_path); ++ let mut full_args = args.to_vec(); ++ full_args.push(port.to_string()); ++ ++ cmd.args(&full_args) ++ .stdin(Stdio::null()) ++ .stdout(Stdio::piped()) ++ .stderr(Stdio::piped()); ++ ++ let adapter = cmd.spawn().map_err(|e| { ++ Error::AdapterStartFailed(format!( ++ "Failed to start {}: {}", ++ adapter_path.display(), ++ e ++ )) ++ })?; ++ ++ tokio::time::sleep(Duration::from_millis(500)).await; ++ ++ (adapter, addr) ++ } ++ }; ++ ++ tracing::info!("Connecting to DAP adapter at {}", addr); ++ + let mut cmd = Command::new(adapter_path); + cmd.args(args) +- .arg("--listen=127.0.0.1:0") +- .stdin(Stdio::null()) +- .stdout(Stdio::piped()) +- .stderr(Stdio::piped()); +- +- let mut adapter = cmd.spawn().map_err(|e| { +- Error::AdapterStartFailed(format!( +- "Failed to start {}: {}", +- adapter_path.display(), +- e +- )) +- })?; +- +- // Read stdout to find the listening address +- // Delve outputs: "DAP server listening at: 127.0.0.1:PORT" +- let stdout = adapter.stdout.take().ok_or_else(|| { +- let _ = adapter.start_kill(); +- Error::AdapterStartFailed("Failed to get adapter stdout".to_string()) +- })?; +- +- let mut stdout_reader = TokioBufReader::new(stdout); +- let mut line = String::new(); +- +- // Wait for the "listening at" message with timeout +- let addr_result = tokio::time::timeout(Duration::from_secs(10), async { +- loop { +- line.clear(); +- let bytes_read = stdout_reader.read_line(&mut line).await.map_err(|e| { +- Error::AdapterStartFailed(format!("Failed to read adapter output: {}", e)) +- })?; +- +- if bytes_read == 0 { +- return Err(Error::AdapterStartFailed( +- "Adapter exited before outputting listen address".to_string(), +- )); +- } +- +- tracing::debug!("Delve output: {}", line.trim()); +- +- // Look for the listening address in the output +- if let Some(addr) = parse_listen_address(&line) { +- return Ok(addr); +- } +- } +- }) +- .await; +- +- // Handle timeout or error - cleanup adapter before returning +- let addr = match addr_result { +- Ok(Ok(addr)) => addr, +- Ok(Err(e)) => { +- let _ = adapter.start_kill(); +- return Err(e); +- } +- Err(_) => { +- let _ = adapter.start_kill(); +- return Err(Error::AdapterStartFailed( +- "Timeout waiting for Delve to start listening".to_string(), +- )); +- } +- }; +- +- tracing::info!("Connecting to Delve DAP server at {}", addr); +- +- // Connect to the TCP port - cleanup adapter on failure + let stream = match TcpStream::connect(&addr).await { + Ok(s) => s, + Err(e) => { + let _ = adapter.start_kill(); + return Err(Error::AdapterStartFailed(format!( +- "Failed to connect to Delve at {}: {}", ++ "Failed to connect to adapter at {}: {}", + addr, e + ))); + } + }; + + let (read_half, write_half) = tokio::io::split(stream); +``` + + +--- + +### Milestone 2: js-debug Installer + +**Files**: +- `src/setup/adapters/js_debug.rs` (new) +- `src/setup/adapters/mod.rs` +- `src/setup/registry.rs` +- `src/setup/detector.rs` + +**Flags**: +- `conformance`: Must match delve.rs/debugpy.rs patterns exactly + +**Requirements**: +- Create `JsDebugInstaller` implementing `Installer` trait +- Support npm package installation with GitHub clone fallback +- Verify Node.js availability before installation +- Register in adapter registry with languages: `["javascript", "typescript"]` +- Update project detector to return `"js-debug"` for JS/TS projects + +**Acceptance Criteria**: +- `debugger setup list` shows js-debug adapter +- `debugger setup install js-debug` installs via npm or GitHub +- `debugger setup verify js-debug` confirms DAP communication works +- Project detection in JS/TS directories suggests js-debug + +**Tests**: +- **Test files**: `tests/integration.rs` +- **Test type**: integration +- **Backing**: user-specified (real adapter) +- **Scenarios**: + - Normal: Fresh install via npm succeeds + - Normal: Verify confirms adapter responds to DAP initialize + - Edge: npm failure triggers GitHub fallback + - Error: Missing Node.js returns clear error + +**Code Intent**: +- New `src/setup/adapters/js_debug.rs`: Create `JsDebugInstaller` struct. Implement `Installer` trait with: `info()` returning DebuggerInfo for js-debug, `status()` checking adapters_dir/js-debug for out/src/debugServerMain.js (hardcoded path, fallback search for debugServerMain.js), `best_method()` trying npm then GitHub, `install()` running npm install or git clone, `verify()` using `verify_dap_adapter_tcp` variant. Decision: "debugServerMain.js path discovery" +- `src/setup/adapters/mod.rs`: Add `pub mod js_debug;` +- `src/setup/registry.rs`: Add DebuggerInfo entry in DEBUGGERS array. Add match arm in `get_installer()` for "js-debug" +- `src/setup/detector.rs`: Change line ~55 from `vec![]` to `vec!["js-debug"]` for JS/TS project types + +**Code Changes**: + +New file `src/setup/adapters/js_debug.rs`: + +```rust +//! js-debug installer +//! +//! Installs the VS Code JavaScript Debugger (js-debug) with DAP support. + +use crate::common::config::TcpSpawnStyle; +use crate::common::{Error, Result}; +use crate::setup::installer::{ + adapters_dir, download_file, ensure_adapters_dir, extract_tar_gz, get_github_release, + run_command_args, write_version_file, InstallMethod, InstallOptions, InstallResult, + InstallStatus, Installer, PackageManager, +}; +use crate::setup::registry::{DebuggerInfo, Platform}; +use crate::setup::verifier::{verify_dap_adapter_tcp, VerifyResult}; +use async_trait::async_trait; +use std::path::PathBuf; + +static INFO: DebuggerInfo = DebuggerInfo { + id: "js-debug", + name: "js-debug", + languages: &["javascript", "typescript"], + platforms: &[Platform::Linux, Platform::MacOS, Platform::Windows], + description: "VS Code JavaScript Debugger with DAP support", + primary: true, +}; + +const GITHUB_REPO: &str = "microsoft/vscode-js-debug"; +const NPM_PACKAGE: &str = "@vscode/js-debug"; + +pub struct JsDebugInstaller; + +#[async_trait] +impl Installer for JsDebugInstaller { + fn info(&self) -> &DebuggerInfo { + &INFO + } + + async fn status(&self) -> Result { + let adapter_dir = adapters_dir().join("js-debug"); + + let main_path = adapter_dir.join("out").join("src").join("debugServerMain.js"); + if main_path.exists() { + let version = read_version_file(&adapter_dir); + return Ok(InstallStatus::Installed { + path: main_path, + version, + }); + } + + if adapter_dir.exists() { + if let Some(path) = find_debug_server_main(&adapter_dir) { + let version = read_version_file(&adapter_dir); + return Ok(InstallStatus::Installed { + path, + version, + }); + } + } + + Ok(InstallStatus::NotInstalled) + } + + async fn best_method(&self) -> Result { + let managers = PackageManager::detect(); + + if managers.contains(&PackageManager::Npm) { + return Ok(InstallMethod::LanguagePackage { + tool: "npm".to_string(), + package: NPM_PACKAGE.to_string(), + }); + } + + Ok(InstallMethod::GitHubRelease { + repo: GITHUB_REPO.to_string(), + asset_pattern: String::new(), + }) + } + + async fn install(&self, opts: InstallOptions) -> Result { + let method = self.best_method().await?; + + match method { + InstallMethod::LanguagePackage { tool, package } => { + install_via_npm(&tool, &package, &opts).await + } + InstallMethod::GitHubRelease { .. } => install_from_github(&opts).await, + _ => Err(Error::Internal("Unexpected installation method".to_string())), + } + } + + async fn verify(&self) -> Result { + let status = self.status().await?; + + let path = match status { + InstallStatus::Installed { path, .. } => path, + InstallStatus::NotInstalled => { + return Ok(VerifyResult::NotInstalled); + } + }; + + let node_path = which::which("node").map_err(|_| { + Error::VerificationFailed("Node.js not found in PATH".to_string()) + })?; + + verify_dap_adapter_tcp(&node_path, &[path.to_string_lossy().to_string()], TcpSpawnStyle::TcpPortArg).await + } +} + +async fn install_via_npm( + _tool: &str, + package: &str, + opts: &InstallOptions, +) -> Result { + ensure_adapters_dir()?; + let adapter_dir = adapters_dir().join("js-debug"); + std::fs::create_dir_all(&adapter_dir).map_err(|e| { + Error::Internal(format!("Failed to create adapter directory: {}", e)) + })?; + + if !opts.quiet { + println!("Installing {} via npm...", package); + } + + run_command_args( + "npm", + &["install", "--prefix", adapter_dir.to_str().unwrap(), package], + opts, + ) + .await?; + + let main_path = adapter_dir + .join("node_modules") + .join(package) + .join("out") + .join("src") + .join("debugServerMain.js"); + + if !main_path.exists() { + if let Some(fallback_path) = find_debug_server_main(&adapter_dir) { + let version = Some("npm".to_string()); + write_version_file(&adapter_dir, &version); + return Ok(InstallResult { + path: fallback_path, + version, + args: vec![], + }); + } + return Err(Error::Internal( + "debugServerMain.js not found after npm install".to_string(), + )); + } + + let version = Some("npm".to_string()); + write_version_file(&adapter_dir, &version); + + Ok(InstallResult { + path: main_path, + version, + args: vec![], + }) +} + +async fn install_from_github(opts: &InstallOptions) -> Result { + ensure_adapters_dir()?; + let adapter_dir = adapters_dir().join("js-debug"); + + if !opts.quiet { + println!("Installing js-debug from GitHub..."); + } + + run_command_args( + "git", + &[ + "clone", + "--depth", + "1", + "https://github.com/microsoft/vscode-js-debug.git", + adapter_dir.to_str().unwrap(), + ], + opts, + ) + .await?; + + if !opts.quiet { + println!("Installing dependencies..."); + } + + run_command_args("npm", &["install", "--prefix", adapter_dir.to_str().unwrap()], opts).await?; + + if !opts.quiet { + println!("Building js-debug..."); + } + + run_command_args( + "npm", + &["run", "build", "--prefix", adapter_dir.to_str().unwrap()], + opts, + ) + .await?; + + let main_path = adapter_dir.join("out").join("src").join("debugServerMain.js"); + + if !main_path.exists() { + if let Some(fallback_path) = find_debug_server_main(&adapter_dir) { + let version = Some("git".to_string()); + write_version_file(&adapter_dir, &version); + return Ok(InstallResult { + path: fallback_path, + version, + args: vec![], + }); + } + return Err(Error::Internal( + "debugServerMain.js not found after build".to_string(), + )); + } + + let version = Some("git".to_string()); + write_version_file(&adapter_dir, &version); + + Ok(InstallResult { + path: main_path, + version, + args: vec![], + }) +} + +fn find_debug_server_main(base_dir: &std::path::Path) -> Option { + let patterns = [ + "out/src/debugServerMain.js", + "dist/src/debugServerMain.js", + "debugServerMain.js", + ]; + + for pattern in &patterns { + let path = base_dir.join(pattern); + if path.exists() { + return Some(path); + } + } + + None +} + +fn read_version_file(adapter_dir: &std::path::Path) -> Option { + let version_file = adapter_dir.join(".version"); + std::fs::read_to_string(version_file).ok() +} +``` + +```diff +--- a/src/setup/adapters/mod.rs ++++ b/src/setup/adapters/mod.rs +@@ -9,3 +9,4 @@ pub mod debugpy; + pub mod delve; + pub mod gdb_common; + pub mod gdb; + pub mod lldb; ++pub mod js_debug; +``` + +```diff +--- a/src/setup/registry.rs ++++ b/src/setup/registry.rs +@@ -109,6 +109,13 @@ static DEBUGGERS: &[DebuggerInfo] = &[ + description: "Go debugger with DAP support", + primary: true, + }, ++ DebuggerInfo { ++ id: "js-debug", ++ name: "js-debug", ++ languages: &["javascript", "typescript"], ++ platforms: &[Platform::Linux, Platform::MacOS, Platform::Windows], ++ description: "VS Code JavaScript Debugger with DAP support", ++ primary: true, ++ }, + ]; + + /// Get all registered debuggers +@@ -147,6 +154,7 @@ pub fn get_installer(id: &str) -> Option> { + "codelldb" => Some(Arc::new(adapters::codelldb::CodeLldbInstaller)), + "python" => Some(Arc::new(adapters::debugpy::DebugpyInstaller)), + "go" => Some(Arc::new(adapters::delve::DelveInstaller)), ++ "js-debug" => Some(Arc::new(adapters::js_debug::JsDebugInstaller)), + _ => None, + } + } +``` + +```diff +--- a/src/setup/detector.rs ++++ b/src/setup/detector.rs +@@ -97,7 +97,7 @@ pub fn debuggers_for_project(project: &ProjectType) -> Vec<&'static str> { + ProjectType::Cuda => vec!["cuda-gdb"], + ProjectType::Go => vec!["go"], + ProjectType::Python => vec!["python"], +- ProjectType::JavaScript | ProjectType::TypeScript => vec![], ++ ProjectType::JavaScript | ProjectType::TypeScript => vec!["js-debug"], + ProjectType::C | ProjectType::Cpp => vec!["lldb", "codelldb"], + ProjectType::CSharp => vec![], + ProjectType::Java => vec![], +``` + + +--- + +### Milestone 3: Session Launch Integration + +**Files**: +- `src/daemon/session.rs` + +**Flags**: +- `needs-rationale`: js-debug field population logic needs WHY comments + +**Requirements**: +- Detect js-debug adapter and populate js-debug-specific LaunchArguments +- Set `type_attr: "pwa-node"` for Node.js debugging +- Enable `source_maps: true` by default for .ts files +- Pass spawn_style to DapClient spawn + +**Acceptance Criteria**: +- `debugger start app.js --adapter js-debug` launches with correct LaunchArguments +- TypeScript files get `source_maps: true` automatically +- spawn_tcp receives correct spawn_style from config + +**Tests**: +- **Test files**: `tests/integration.rs` +- **Test type**: integration +- **Backing**: user-specified +- **Scenarios**: + - Normal: JavaScript file launches with type: pwa-node + - Normal: TypeScript file gets sourceMaps: true + - Edge: Custom outFiles passed through + +**Code Intent**: +- `src/daemon/session.rs`: Near line 178, add detection block for js-debug (similar to is_python, is_go). When adapter is js-debug or file extension is .js/.ts: set `launch_args.type_attr = Some("pwa-node".to_string())` (Decision: "Node.js runtime only for v1"). For .ts files: set `launch_args.source_maps = Some(true)` (Decision: "sourceMaps always enabled for .ts"). Near line 154, pass `adapter_config.spawn_style` to spawn decision. + +**Code Changes**: + +```diff +--- a/src/daemon/session.rs ++++ b/src/daemon/session.rs +@@ -155,7 +155,7 @@ impl Session { + TransportMode::Stdio => { + DapClient::spawn(&adapter_config.path, &adapter_config.args).await? + } + TransportMode::Tcp => { +- DapClient::spawn_tcp(&adapter_config.path, &adapter_config.args).await? ++ DapClient::spawn_tcp(&adapter_config.path, &adapter_config.args, &adapter_config.spawn_style).await? + } + }; + +@@ -182,6 +182,11 @@ impl Session { + let is_go = adapter_name == "go" + || adapter_name == "delve" + || adapter_name == "dlv"; ++ let is_javascript = adapter_name == "js-debug" ++ || program.extension().map(|e| e == "js").unwrap_or(false); ++ let is_typescript = adapter_name == "js-debug" ++ || program.extension().map(|e| e == "ts").unwrap_or(false); + + let launch_args = LaunchArguments { + program: program.to_string_lossy().into_owned(), +@@ -203,6 +208,11 @@ impl Session { + stop_at_entry: if is_go && stop_on_entry { Some(true) } else { None }, + // GDB-based adapters (gdb, cuda-gdb) use stopAtBeginningOfMainSubprogram + stop_at_beginning_of_main_subprogram: if (adapter_name == "gdb" || adapter_name == "cuda-gdb") && stop_on_entry { Some(true) } else { None }, ++ // js-debug specific ++ type_attr: if is_javascript || is_typescript { Some("pwa-node".to_string()) } else { None }, ++ source_maps: if is_typescript { Some(true) } else { None }, ++ out_files: None, ++ runtime_executable: None, ++ runtime_args: None, ++ skip_files: None, + }; + + tracing::debug!( +``` + + +--- + +### Milestone 4: Test Fixtures + +**Files**: +- `tests/fixtures/simple.js` (new) +- `tests/fixtures/simple.ts` (new) +- `tests/fixtures/tsconfig.json` (new) +- `tests/fixtures/dist/simple.js` (new, compiled) +- `tests/fixtures/dist/simple.js.map` (new, sourcemap) + +**Requirements**: +- JavaScript fixture with BREAKPOINT_MARKER comments matching C/Python fixtures +- TypeScript fixture with same structure, compiled to dist/ +- Sourcemap enabling TypeScript line mapping + +**Acceptance Criteria**: +- `simple.js` has markers: main_start, before_add, before_factorial, add_body +- `simple.ts` has same markers, compiles to `dist/simple.js` with `.map` +- Sourcemap correctly maps dist/simple.js lines to simple.ts lines + +**Tests**: +- Skip: Fixtures are test infrastructure, validated by integration tests + +**Code Intent**: +- New `tests/fixtures/simple.js`: JavaScript file matching structure of simple.c/simple.py with add() and factorial() functions, BREAKPOINT_MARKER comments +- New `tests/fixtures/simple.ts`: TypeScript version with type annotations +- New `tests/fixtures/tsconfig.json`: Compiler config targeting dist/, generating sourcemaps +- Pre-compiled `tests/fixtures/dist/simple.js` and `.map`: Committed to repo so tests don't require tsc + +**Code Changes**: + +New file `tests/fixtures/simple.js`: + +```javascript +#!/usr/bin/env node +// Simple test program for debugger integration tests + +function add(a, b) { + // BREAKPOINT_MARKER: add_body + const result = a + b; + return result; +} + +function factorial(n) { + // BREAKPOINT_MARKER: factorial_body + if (n <= 1) { + return 1; + } + return n * factorial(n - 1); +} + +function main() { + // BREAKPOINT_MARKER: main_start + const x = 10; + const y = 20; + + // BREAKPOINT_MARKER: before_add + const sumResult = add(x, y); + console.log(`Sum: ${sumResult}`); + + // BREAKPOINT_MARKER: before_factorial + const fact = factorial(5); + console.log(`Factorial: ${fact}`); + + // BREAKPOINT_MARKER: before_exit + return 0; +} + +// BREAKPOINT_MARKER: entry_point +process.exit(main()); +``` + +New file `tests/fixtures/simple.ts`: + +```typescript +#!/usr/bin/env node +// Simple test program for debugger integration tests + +function add(a: number, b: number): number { + // BREAKPOINT_MARKER: add_body + const result: number = a + b; + return result; +} + +function factorial(n: number): number { + // BREAKPOINT_MARKER: factorial_body + if (n <= 1) { + return 1; + } + return n * factorial(n - 1); +} + +function main(): number { + // BREAKPOINT_MARKER: main_start + const x: number = 10; + const y: number = 20; + + // BREAKPOINT_MARKER: before_add + const sumResult: number = add(x, y); + console.log(`Sum: ${sumResult}`); + + // BREAKPOINT_MARKER: before_factorial + const fact: number = factorial(5); + console.log(`Factorial: ${fact}`); + + // BREAKPOINT_MARKER: before_exit + return 0; +} + +// BREAKPOINT_MARKER: entry_point +process.exit(main()); +``` + +New file `tests/fixtures/tsconfig.json`: + +```json +{ + "compilerOptions": { + "target": "ES2020", + "module": "commonjs", + "outDir": "./dist", + "rootDir": "./", + "sourceMap": true, + "strict": true, + "esModuleInterop": true, + "skipLibCheck": true, + "forceConsistentCasingInFileNames": true + }, + "include": ["simple.ts"], + "exclude": ["node_modules", "dist"] +} +``` + +New file `tests/fixtures/dist/simple.js` (pre-compiled): + +```javascript +#!/usr/bin/env node +"use strict"; +function add(a, b) { + const result = a + b; + return result; +} +function factorial(n) { + if (n <= 1) { + return 1; + } + return n * factorial(n - 1); +} +function main() { + const x = 10; + const y = 20; + const sumResult = add(x, y); + console.log(`Sum: ${sumResult}`); + const fact = factorial(5); + console.log(`Factorial: ${fact}`); + return 0; +} +process.exit(main()); +//# sourceMappingURL=simple.js.map +``` + +New file `tests/fixtures/dist/simple.js.map`: + +```json +{"version":3,"file":"simple.js","sourceRoot":"","sources":["../simple.ts"],"names":[],"mappings":";;AAEA,SAAS,GAAG,CAAC,CAAS,EAAE,CAAS;IAE7B,MAAM,MAAM,GAAW,CAAC,GAAG,CAAC,CAAC;IAC7B,OAAO,MAAM,CAAC;AAClB,CAAC;AAED,SAAS,SAAS,CAAC,CAAS;IAExB,IAAI,CAAC,IAAI,CAAC,EAAE,CAAC;QACT,OAAO,CAAC,CAAC;IACb,CAAC;IACD,OAAO,CAAC,GAAG,SAAS,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC;AAChC,CAAC;AAED,SAAS,IAAI;IAET,MAAM,CAAC,GAAW,EAAE,CAAC;IACrB,MAAM,CAAC,GAAW,EAAE,CAAC;IAGrB,MAAM,SAAS,GAAW,GAAG,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;IACpC,OAAO,CAAC,GAAG,CAAC,QAAQ,SAAS,EAAE,CAAC,CAAC;IAGjC,MAAM,IAAI,GAAW,SAAS,CAAC,CAAC,CAAC,CAAC;IAClC,OAAO,CAAC,GAAG,CAAC,cAAc,IAAI,EAAE,CAAC,CAAC;IAGlC,OAAO,CAAC,CAAC;AACb,CAAC;AAGD,OAAO,CAAC,IAAI,CAAC,IAAI,EAAE,CAAC,CAAC"} +``` + + +--- + +### Milestone 5: Integration Tests + +**Files**: +- `tests/integration.rs` + +**Flags**: +- `conformance`: Must match existing test patterns (lldb-dap tests) + +**Requirements**: +- `js_debug_available()` helper function checking for js-debug installation +- `test_basic_debugging_workflow_js()` - start, breakpoint, continue, stop +- `test_basic_debugging_workflow_ts()` - TypeScript with sourcemap verification +- `test_stepping_js()` - step in, step out, step over +- `test_expression_evaluation_js()` - evaluate expressions at breakpoint +- `test_sourcemap_resolution_ts()` - verify breakpoint hits in .ts not .js + +**Acceptance Criteria**: +- All js-debug tests pass when adapter is installed +- Tests skip gracefully when js-debug not available +- TypeScript test verifies sourcemap: breakpoint in .ts, stops at correct line + +**Tests**: +- **Test files**: `tests/integration.rs` +- **Test type**: integration +- **Backing**: user-specified (real adapter, pre-compiled TS) +- **Scenarios**: + - Normal: JavaScript breakpoint hit and variable inspection + - Normal: TypeScript breakpoint with sourcemap resolution + - Normal: Step into function, step out + - Edge: Async function debugging (if supported) + - Skip: Tests auto-skip if js-debug not installed + +**Code Intent**: +- `tests/integration.rs`: Add `js_debug_available()` helper similar to `lldb_dap_available()`. Add 5 test functions with `#[ignore = "requires js-debug"]` attribute. Use `TestContext::create_config_with_args()` for js-debug config with transport=tcp. Follow existing test patterns: cleanup_daemon, start, break, continue, await, assertions, stop + +**Code Changes**: + +```diff +--- a/tests/integration.rs ++++ b/tests/integration.rs +@@ -340,6 +340,26 @@ fn lldb_dap_available() -> Option { + } + } + ++/// Check if js-debug is available ++fn js_debug_available() -> Option { ++ if let Ok(node_path) = which::which("node") { ++ let adapters_dir = dirs::data_local_dir()?.join("debugger-cli").join("adapters"); ++ let js_debug_dir = adapters_dir.join("js-debug"); ++ let main_path = js_debug_dir.join("out").join("src").join("debugServerMain.js"); ++ ++ if main_path.exists() { ++ return Some(main_path); ++ } ++ ++ let npm_path = js_debug_dir.join("node_modules").join("@vscode").join("js-debug").join("out").join("src").join("debugServerMain.js"); ++ if npm_path.exists() { ++ return Some(npm_path); ++ } ++ } ++ ++ None ++} ++ + /// Check if GDB with DAP support is available (requires GDB ≥14.1) + fn gdb_available() -> Option { + if let Ok(gdb_path) = which::which("gdb") { +@@ -1192,3 +1212,233 @@ fn test_cuda_gdb_kernel_debugging() { + let _ = ctx.run_debugger(&["stop"]); + } ++ ++#[test] ++#[ignore = "requires js-debug"] ++fn test_basic_debugging_workflow_js() { ++ let js_debug_path = match js_debug_available() { ++ Some(path) => path, ++ None => { ++ eprintln!("Skipping test: js-debug not available"); ++ return; ++ } ++ }; ++ ++ let node_path = match which::which("node") { ++ Ok(path) => path, ++ Err(_) => { ++ eprintln!("Skipping test: Node.js not available"); ++ return; ++ } ++ }; ++ ++ let mut ctx = TestContext::new("basic_workflow_js"); ++ ctx.create_config_with_tcp( ++ "js-debug", ++ node_path.to_str().unwrap(), ++ &[js_debug_path.to_str().unwrap()], ++ "tcpportarg", ++ ); ++ ++ let js_file = ctx.fixtures_dir.join("simple.js"); ++ let markers = ctx.find_breakpoint_markers(&js_file); ++ let main_start_line = markers.get("main_start").expect("Missing main_start marker"); ++ ++ ctx.cleanup_daemon(); ++ ++ let output = ctx.run_debugger_ok(&[ ++ "start", ++ js_file.to_str().unwrap(), ++ ]); ++ assert!(output.contains("Started debugging") || output.contains("session")); ++ ++ let bp_location = format!("simple.js:{}", main_start_line); ++ let output = ctx.run_debugger_ok(&["break", &bp_location]); ++ assert!(output.contains("Breakpoint") || output.contains("breakpoint")); ++ ++ let output = ctx.run_debugger_ok(&["continue"]); ++ assert!(output.contains("Continuing") || output.contains("running")); ++ ++ let output = ctx.run_debugger_ok(&["await", "--timeout", "30"]); ++ assert!( ++ output.contains("Stopped") || output.contains("breakpoint"), ++ "Expected stop at breakpoint: {}", ++ output ++ ); ++ ++ let output = ctx.run_debugger_ok(&["locals"]); ++ assert!( ++ output.contains("x") || output.contains("Local"), ++ "Expected locals output: {}", ++ output ++ ); ++ ++ let _ = ctx.run_debugger(&["continue"]); ++ let _ = ctx.run_debugger(&["await", "--timeout", "10"]); ++ let _ = ctx.run_debugger(&["stop"]); ++} ++ ++#[test] ++#[ignore = "requires js-debug"] ++fn test_basic_debugging_workflow_ts() { ++ let js_debug_path = match js_debug_available() { ++ Some(path) => path, ++ None => { ++ eprintln!("Skipping test: js-debug not available"); ++ return; ++ } ++ }; ++ ++ let node_path = match which::which("node") { ++ Ok(path) => path, ++ Err(_) => { ++ eprintln!("Skipping test: Node.js not available"); ++ return; ++ } ++ }; ++ ++ let mut ctx = TestContext::new("basic_workflow_ts"); ++ ctx.create_config_with_tcp( ++ "js-debug", ++ node_path.to_str().unwrap(), ++ &[js_debug_path.to_str().unwrap()], ++ "tcpportarg", ++ ); ++ ++ let ts_file = ctx.fixtures_dir.join("simple.ts"); ++ let markers = ctx.find_breakpoint_markers(&ts_file); ++ let main_start_line = markers.get("main_start").expect("Missing main_start marker"); ++ ++ ctx.cleanup_daemon(); ++ ++ let output = ctx.run_debugger_ok(&[ ++ "start", ++ ts_file.to_str().unwrap(), ++ ]); ++ assert!(output.contains("Started debugging") || output.contains("session")); ++ ++ let bp_location = format!("simple.ts:{}", main_start_line); ++ let output = ctx.run_debugger_ok(&["break", &bp_location]); ++ assert!(output.contains("Breakpoint") || output.contains("breakpoint")); ++ ++ let output = ctx.run_debugger_ok(&["continue"]); ++ assert!(output.contains("Continuing") || output.contains("running")); ++ ++ let output = ctx.run_debugger_ok(&["await", "--timeout", "30"]); ++ assert!( ++ output.contains("Stopped") || output.contains("breakpoint"), ++ "Expected stop at breakpoint: {}", ++ output ++ ); ++ ++ let _ = ctx.run_debugger(&["continue"]); ++ let _ = ctx.run_debugger(&["await", "--timeout", "10"]); ++ let _ = ctx.run_debugger(&["stop"]); ++} ++ ++#[test] ++#[ignore = "requires js-debug"] ++fn test_stepping_js() { ++ let js_debug_path = match js_debug_available() { ++ Some(path) => path, ++ None => { ++ eprintln!("Skipping test: js-debug not available"); ++ return; ++ } ++ }; ++ ++ let node_path = match which::which("node") { ++ Ok(path) => path, ++ Err(_) => { ++ eprintln!("Skipping test: Node.js not available"); ++ return; ++ } ++ }; ++ ++ let mut ctx = TestContext::new("stepping_js"); ++ ctx.create_config_with_tcp( ++ "js-debug", ++ node_path.to_str().unwrap(), ++ &[js_debug_path.to_str().unwrap()], ++ "tcpportarg", ++ ); ++ ++ let js_file = ctx.fixtures_dir.join("simple.js"); ++ let markers = ctx.find_breakpoint_markers(&js_file); ++ let before_add_line = markers.get("before_add").expect("Missing before_add marker"); ++ ++ ctx.cleanup_daemon(); ++ ++ let output = ctx.run_debugger_ok(&["start", js_file.to_str().unwrap()]); ++ assert!(output.contains("Started debugging") || output.contains("session")); ++ ++ let bp_location = format!("simple.js:{}", before_add_line); ++ let _ = ctx.run_debugger_ok(&["break", &bp_location]); ++ let _ = ctx.run_debugger_ok(&["continue"]); ++ let _ = ctx.run_debugger_ok(&["await", "--timeout", "30"]); ++ ++ let output = ctx.run_debugger_ok(&["step-in"]); ++ assert!(output.contains("Stepped") || output.contains("step")); ++ ++ let output = ctx.run_debugger_ok(&["step-out"]); ++ assert!(output.contains("Stepped") || output.contains("step")); ++ ++ let _ = ctx.run_debugger(&["continue"]); ++ let _ = ctx.run_debugger(&["stop"]); ++} ++ ++#[test] ++#[ignore = "requires js-debug"] ++fn test_expression_evaluation_js() { ++ let js_debug_path = match js_debug_available() { ++ Some(path) => path, ++ None => { ++ eprintln!("Skipping test: js-debug not available"); ++ return; ++ } ++ }; ++ ++ let node_path = match which::which("node") { ++ Ok(path) => path, ++ Err(_) => { ++ eprintln!("Skipping test: Node.js not available"); ++ return; ++ } ++ }; ++ ++ let mut ctx = TestContext::new("expression_eval_js"); ++ ctx.create_config_with_tcp( ++ "js-debug", ++ node_path.to_str().unwrap(), ++ &[js_debug_path.to_str().unwrap()], ++ "tcpportarg", ++ ); ++ ++ let js_file = ctx.fixtures_dir.join("simple.js"); ++ let markers = ctx.find_breakpoint_markers(&js_file); ++ let main_start_line = markers.get("main_start").expect("Missing main_start marker"); ++ ++ ctx.cleanup_daemon(); ++ ++ let output = ctx.run_debugger_ok(&["start", js_file.to_str().unwrap()]); ++ assert!(output.contains("Started debugging") || output.contains("session")); ++ ++ let bp_location = format!("simple.js:{}", main_start_line); ++ let _ = ctx.run_debugger_ok(&["break", &bp_location]); ++ let _ = ctx.run_debugger_ok(&["continue"]); ++ let _ = ctx.run_debugger_ok(&["await", "--timeout", "30"]); ++ ++ let output = ctx.run_debugger_ok(&["eval", "x + y"]); ++ assert!( ++ output.contains("30") || output.contains("result"), ++ "Expected evaluation result: {}", ++ output ++ ); ++ ++ let _ = ctx.run_debugger(&["continue"]); ++ let _ = ctx.run_debugger(&["stop"]); ++} +``` + +Add helper method to TestContext: + +```diff +--- a/tests/integration.rs ++++ b/tests/integration.rs +@@ -145,6 +145,32 @@ impl TestContext { + self.create_config_with_args(adapter_name, adapter_path, &[]); + } + ++ /// Create a config file for a TCP adapter ++ fn create_config_with_tcp( ++ &self, ++ adapter_name: &str, ++ adapter_path: &str, ++ args: &[&str], ++ spawn_style: &str, ++ ) { ++ let args_str = args.iter() ++ .map(|a| format!("\"{}\"", a)) ++ .collect::>() ++ .join(", "); ++ let config_content = format!( ++ r#" ++[adapters.{adapter_name}] ++path = "{adapter_path}" ++args = [{args_str}] ++transport = "tcp" ++spawn_style = "{spawn_style}" ++ ++[defaults] ++adapter = "{adapter_name}" ++"#, ++ ); ++ // ... rest same as create_config_with_args ++ } ++ + /// Create a config file for the test with custom args + fn create_config_with_args(&self, adapter_name: &str, adapter_path: &str, args: &[&str]) { + let args_str = args.iter() +``` + + +--- + +### Milestone 6: Documentation + +**Delegated to**: @agent-technical-writer (mode: post-implementation) + +**Source**: `## Invisible Knowledge` section of this plan + +**Files**: +- `src/setup/adapters/CLAUDE.md` (update) +- `src/setup/adapters/README.md` (new or update) +- `src/dap/CLAUDE.md` (update if exists) + +**Requirements**: +Delegate to Technical Writer. Key deliverables: +- Update adapters CLAUDE.md with js-debug entry in table +- Document TcpSpawnStyle in DAP module +- README.md with js-debug architecture and sourcemap handling + +**Acceptance Criteria**: +- CLAUDE.md is tabular index only +- README.md captures spawn_style decision and js-debug specifics +- Documentation matches implementation + +## Milestone Dependencies + +``` +M1 (Core Infrastructure) + | + +---> M2 (Installer) ---> M3 (Session) ---> M5 (Tests) + | | + +---> M4 (Fixtures) --------------------------+ + | + v + M6 (Documentation) +``` + +**Parallel opportunities**: +- M2 (Installer) and M4 (Fixtures) can run in parallel after M1 +- M5 (Tests) requires M2, M3, and M4 complete +- M6 runs after all implementation milestones diff --git a/scripts/run-e2e-tests.sh b/scripts/run-e2e-tests.sh new file mode 100755 index 0000000..d6b8a88 --- /dev/null +++ b/scripts/run-e2e-tests.sh @@ -0,0 +1,73 @@ +#!/bin/bash +# Run E2E tests locally with Docker +# Usage: +# ./scripts/run-e2e-tests.sh - Run all tests +# ./scripts/run-e2e-tests.sh lldb - Run only LLDB tests +# ./scripts/run-e2e-tests.sh delve - Run only Delve tests +# ./scripts/run-e2e-tests.sh debugpy - Run only debugpy tests +# ./scripts/run-e2e-tests.sh js-debug - Run only js-debug tests +# ./scripts/run-e2e-tests.sh gdb - Run only GDB tests + +set -e + +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +PROJECT_DIR="$(dirname "$SCRIPT_DIR")" + +cd "$PROJECT_DIR" + +# Colors for output +RED='\033[0;31m' +GREEN='\033[0;32m' +YELLOW='\033[1;33m' +NC='\033[0m' # No Color + +echo -e "${YELLOW}=== Building debugger CLI ===${NC}" +cargo build --release + +# Build base image first +echo -e "${YELLOW}=== Building base Docker image ===${NC}" +docker build -t debugger-cli:base -f docker/base/Dockerfile . + +# Function to run tests for a specific adapter +run_tests() { + local adapter="$1" + echo -e "${YELLOW}=== Building ${adapter} Docker image ===${NC}" + docker build -t "debugger-cli:${adapter}" -f "docker/${adapter}/Dockerfile" . + + echo -e "${YELLOW}=== Running ${adapter} tests ===${NC}" + if docker run --rm "debugger-cli:${adapter}"; then + echo -e "${GREEN}=== ${adapter} tests PASSED ===${NC}" + return 0 + else + echo -e "${RED}=== ${adapter} tests FAILED ===${NC}" + return 1 + fi +} + +# If specific adapter is requested, run only those tests +if [ -n "$1" ]; then + run_tests "$1" + exit $? +fi + +# Run all tests +FAILED=0 +TOTAL=0 + +for adapter in lldb delve debugpy js-debug gdb; do + ((TOTAL++)) + if ! run_tests "$adapter"; then + ((FAILED++)) + fi +done + +echo "" +echo "=== Summary ===" +PASSED=$((TOTAL - FAILED)) +echo -e "Passed: ${GREEN}${PASSED}${NC}/${TOTAL}" +if [ $FAILED -gt 0 ]; then + echo -e "Failed: ${RED}${FAILED}${NC}/${TOTAL}" + exit 1 +fi + +echo -e "${GREEN}All tests passed!${NC}" diff --git a/scripts/run-local-e2e.sh b/scripts/run-local-e2e.sh new file mode 100755 index 0000000..05c18a8 --- /dev/null +++ b/scripts/run-local-e2e.sh @@ -0,0 +1,211 @@ +#!/bin/bash +# Run E2E tests locally without Docker +# Requires: the debug adapters to be installed on your system +# +# Usage: +# ./scripts/run-local-e2e.sh - Run all available tests +# ./scripts/run-local-e2e.sh lldb - Run only LLDB tests +# ./scripts/run-local-e2e.sh delve - Run only Delve tests +# ./scripts/run-local-e2e.sh debugpy - Run only debugpy tests +# ./scripts/run-local-e2e.sh js-debug - Run only js-debug tests + +set -e + +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +PROJECT_DIR="$(dirname "$SCRIPT_DIR")" + +cd "$PROJECT_DIR" + +# Colors +RED='\033[0;31m' +GREEN='\033[0;32m' +YELLOW='\033[1;33m' +BLUE='\033[0;34m' +NC='\033[0m' + +# Build the debugger +echo -e "${YELLOW}=== Building debugger CLI ===${NC}" +cargo build --release +DEBUGGER="./target/release/debugger" + +# Check which adapters are available +check_adapter() { + local name="$1" + case "$name" in + lldb) + which lldb-dap >/dev/null 2>&1 || which lldb-vscode >/dev/null 2>&1 + ;; + delve) + which dlv >/dev/null 2>&1 + ;; + debugpy) + python3 -c "import debugpy" 2>/dev/null + ;; + js-debug) + # Check if js-debug is installed via our setup + [ -d ~/.local/share/debugger/adapters/js-debug ] || \ + $DEBUGGER setup js-debug --check >/dev/null 2>&1 + ;; + gdb) + which gdb >/dev/null 2>&1 + ;; + esac +} + +# Compile test fixtures +compile_fixtures() { + echo -e "${BLUE}=== Compiling test fixtures ===${NC}" + + # C + if which gcc >/dev/null 2>&1; then + gcc -g tests/fixtures/simple.c -o tests/fixtures/test_simple_c 2>/dev/null || true + gcc -g tests/e2e/hello_world.c -o tests/e2e/test_c 2>/dev/null || true + fi + + # Rust + if which rustc >/dev/null 2>&1; then + rustc -g tests/fixtures/simple.rs -o tests/fixtures/test_simple_rs 2>/dev/null || true + rustc -g tests/e2e/hello_world.rs -o tests/e2e/test_rs 2>/dev/null || true + fi + + # Go + if which go >/dev/null 2>&1; then + go build -gcflags='all=-N -l' -o tests/e2e/test_go tests/e2e/hello_world.go 2>/dev/null || true + go build -gcflags='all=-N -l' -o tests/fixtures/test_simple_go tests/fixtures/simple.go 2>/dev/null || true + fi + + # TypeScript + if which npx >/dev/null 2>&1; then + (cd tests/fixtures && npm install 2>/dev/null && npx tsc 2>/dev/null) || true + fi +} + +# Run tests for an adapter +run_adapter_tests() { + local adapter="$1" + local passed=0 + local failed=0 + + echo -e "${YELLOW}=== Running $adapter tests ===${NC}" + + case "$adapter" in + lldb) + for scenario in hello_world_c hello_world_rust complex_verification; do + if [ -f "tests/scenarios/${scenario}.yml" ]; then + echo -e "${BLUE} Running ${scenario}...${NC}" + if $DEBUGGER test "tests/scenarios/${scenario}.yml" --verbose; then + ((passed++)) + echo -e "${GREEN} ✓ ${scenario}${NC}" + else + ((failed++)) + echo -e "${RED} ✗ ${scenario}${NC}" + fi + fi + done + ;; + delve) + for scenario in hello_world_go complex_go; do + if [ -f "tests/scenarios/${scenario}.yml" ]; then + echo -e "${BLUE} Running ${scenario}...${NC}" + if $DEBUGGER test "tests/scenarios/${scenario}.yml" --verbose; then + ((passed++)) + echo -e "${GREEN} ✓ ${scenario}${NC}" + else + ((failed++)) + echo -e "${RED} ✗ ${scenario}${NC}" + fi + fi + done + ;; + debugpy) + for scenario in hello_world_python; do + if [ -f "tests/scenarios/${scenario}.yml" ]; then + echo -e "${BLUE} Running ${scenario}...${NC}" + if $DEBUGGER test "tests/scenarios/${scenario}.yml" --verbose; then + ((passed++)) + echo -e "${GREEN} ✓ ${scenario}${NC}" + else + ((failed++)) + echo -e "${RED} ✗ ${scenario}${NC}" + fi + fi + done + ;; + js-debug) + # Setup js-debug if not installed + $DEBUGGER setup js-debug 2>/dev/null || true + + for scenario in hello_world_js hello_world_ts stepping_js expression_eval_js; do + if [ -f "tests/scenarios/${scenario}.yml" ]; then + echo -e "${BLUE} Running ${scenario}...${NC}" + if $DEBUGGER test "tests/scenarios/${scenario}.yml" --verbose; then + ((passed++)) + echo -e "${GREEN} ✓ ${scenario}${NC}" + else + ((failed++)) + echo -e "${RED} ✗ ${scenario}${NC}" + fi + fi + done + ;; + gdb) + echo -e "${BLUE} Running hello_world_c with GDB...${NC}" + if $DEBUGGER test tests/scenarios/hello_world_c.yml --adapter gdb --verbose; then + ((passed++)) + echo -e "${GREEN} ✓ hello_world_c (gdb)${NC}" + else + ((failed++)) + echo -e "${RED} ✗ hello_world_c (gdb)${NC}" + fi + ;; + esac + + # Cleanup daemon + pkill -f "debugger daemon" 2>/dev/null || true + + echo -e " ${adapter}: ${GREEN}${passed} passed${NC}, ${RED}${failed} failed${NC}" + return $failed +} + +# Main +compile_fixtures + +if [ -n "$1" ]; then + # Run specific adapter + if check_adapter "$1"; then + run_adapter_tests "$1" + exit $? + else + echo -e "${RED}Adapter $1 is not available on this system${NC}" + exit 1 + fi +fi + +# Run all available adapters +TOTAL_PASSED=0 +TOTAL_FAILED=0 +ADAPTERS_RUN=0 + +for adapter in lldb delve debugpy js-debug gdb; do + if check_adapter "$adapter"; then + ((ADAPTERS_RUN++)) + if run_adapter_tests "$adapter"; then + ((TOTAL_PASSED++)) + else + ((TOTAL_FAILED++)) + fi + echo "" + else + echo -e "${YELLOW}Skipping $adapter (not available)${NC}" + fi +done + +echo "" +echo "=== Summary ===" +echo -e "Adapters tested: ${ADAPTERS_RUN}" +echo -e "Passed: ${GREEN}${TOTAL_PASSED}${NC}" +echo -e "Failed: ${RED}${TOTAL_FAILED}${NC}" + +if [ $TOTAL_FAILED -gt 0 ]; then + exit 1 +fi diff --git a/src/common/config.rs b/src/common/config.rs index c5db5da..f632b61 100644 --- a/src/common/config.rs +++ b/src/common/config.rs @@ -42,6 +42,18 @@ pub enum TransportMode { Tcp, } +/// TCP adapter spawn style +#[derive(Debug, Deserialize, Clone, Default, PartialEq)] +pub enum TcpSpawnStyle { + /// Adapter accepts --listen flag and waits for connection (Delve) + #[default] + #[serde(rename = "tcp-listen")] + TcpListen, + /// Adapter receives port as positional argument (js-debug) + #[serde(rename = "tcp-port-arg")] + TcpPortArg, +} + /// Configuration for a debug adapter #[derive(Debug, Deserialize, Clone)] pub struct AdapterConfig { @@ -55,6 +67,10 @@ pub struct AdapterConfig { /// Transport mode for DAP communication #[serde(default)] pub transport: TransportMode, + + /// TCP spawn style (only used when transport is Tcp) + #[serde(default)] + pub spawn_style: TcpSpawnStyle, } /// Default settings @@ -195,6 +211,7 @@ impl Config { path, args: Vec::new(), transport: TransportMode::default(), + spawn_style: TcpSpawnStyle::default(), }) } } diff --git a/src/daemon/session.rs b/src/daemon/session.rs index 8a1bb06..ecbe99f 100644 --- a/src/daemon/session.rs +++ b/src/daemon/session.rs @@ -156,7 +156,7 @@ impl DebugSession { DapClient::spawn(&adapter_config.path, &adapter_config.args).await? } TransportMode::Tcp => { - DapClient::spawn_tcp(&adapter_config.path, &adapter_config.args).await? + DapClient::spawn_tcp(&adapter_config.path, &adapter_config.args, &adapter_config.spawn_style).await? } }; @@ -176,12 +176,17 @@ impl DebugSession { .map(|p| p.to_string_lossy().into_owned()); // Build launch arguments - adapter-specific fields - let is_python = adapter_name == "debugpy" - || program.extension().map(|e| e == "py").unwrap_or(false); + // Only set adapter-specific fields when actually using that adapter + let is_python = adapter_name == "debugpy"; let is_go = adapter_name == "go" || adapter_name == "delve" || adapter_name == "dlv"; - + let is_js_debug = adapter_name == "js-debug"; + // Enable source maps for js-debug when debugging TS files or compiled JS with sibling .ts + let is_typescript_source = program.extension().map(|e| e == "ts").unwrap_or(false) + || (program.extension().map(|e| e == "js").unwrap_or(false) + && program.with_extension("ts").exists()); + let launch_args = LaunchArguments { program: program.to_string_lossy().into_owned(), args: args.clone(), @@ -202,6 +207,13 @@ impl DebugSession { stop_at_entry: if is_go && stop_on_entry { Some(true) } else { None }, // GDB-based adapters (gdb, cuda-gdb) use stopAtBeginningOfMainSubprogram stop_at_beginning_of_main_subprogram: if (adapter_name == "gdb" || adapter_name == "cuda-gdb") && stop_on_entry { Some(true) } else { None }, + // js-debug specific - type selects the debugger (pwa-node for Node.js) + type_attr: if is_js_debug { Some("pwa-node".to_string()) } else { None }, + source_maps: if is_js_debug && is_typescript_source { Some(true) } else { None }, + out_files: None, + runtime_executable: None, + runtime_args: None, + skip_files: None, }; tracing::debug!( @@ -371,7 +383,7 @@ impl DebugSession { DapClient::spawn(&adapter_config.path, &adapter_config.args).await? } TransportMode::Tcp => { - DapClient::spawn_tcp(&adapter_config.path, &adapter_config.args).await? + DapClient::spawn_tcp(&adapter_config.path, &adapter_config.args, &adapter_config.spawn_style).await? } }; diff --git a/src/dap/client.rs b/src/dap/client.rs index 6795a6f..75449b1 100644 --- a/src/dap/client.rs +++ b/src/dap/client.rs @@ -149,90 +149,141 @@ impl DapClient { }) } - /// Spawn a new DAP adapter that uses TCP for communication (e.g., Delve) - /// - /// This spawns the adapter with a --listen flag, waits for it to output - /// the port it's listening on, then connects via TCP. - pub async fn spawn_tcp(adapter_path: &Path, args: &[String]) -> Result { + /// Spawn a new DAP adapter that uses TCP for communication (e.g., Delve, js-debug) + pub async fn spawn_tcp( + adapter_path: &Path, + args: &[String], + spawn_style: &crate::common::config::TcpSpawnStyle, + ) -> Result { use crate::common::parse_listen_address; use tokio::io::{AsyncBufReadExt, BufReader as TokioBufReader}; - // Build command with --listen=127.0.0.1:0 to get a random available port - let mut cmd = Command::new(adapter_path); - cmd.args(args) - .arg("--listen=127.0.0.1:0") - .stdin(Stdio::null()) - .stdout(Stdio::piped()) - .stderr(Stdio::piped()); + let (mut adapter, addr) = match spawn_style { + crate::common::config::TcpSpawnStyle::TcpListen => { + let mut cmd = Command::new(adapter_path); + cmd.args(args) + .arg("--listen=127.0.0.1:0") + .stdin(Stdio::null()) + .stdout(Stdio::piped()) + .stderr(Stdio::piped()); + + let mut adapter = cmd.spawn().map_err(|e| { + Error::AdapterStartFailed(format!( + "Failed to start {}: {}", + adapter_path.display(), + e + )) + })?; - let mut adapter = cmd.spawn().map_err(|e| { - Error::AdapterStartFailed(format!( - "Failed to start {}: {}", - adapter_path.display(), - e - )) - })?; + let stdout = adapter.stdout.take().ok_or_else(|| { + let _ = adapter.start_kill(); + Error::AdapterStartFailed("Failed to get adapter stdout".to_string()) + })?; - // Read stdout to find the listening address - // Delve outputs: "DAP server listening at: 127.0.0.1:PORT" - let stdout = adapter.stdout.take().ok_or_else(|| { - let _ = adapter.start_kill(); - Error::AdapterStartFailed("Failed to get adapter stdout".to_string()) - })?; + let mut stdout_reader = TokioBufReader::new(stdout); + let mut line = String::new(); - let mut stdout_reader = TokioBufReader::new(stdout); - let mut line = String::new(); + let addr_result = tokio::time::timeout(Duration::from_secs(10), async { + loop { + line.clear(); + let bytes_read = stdout_reader.read_line(&mut line).await.map_err(|e| { + Error::AdapterStartFailed(format!("Failed to read adapter output: {}", e)) + })?; - // Wait for the "listening at" message with timeout - let addr_result = tokio::time::timeout(Duration::from_secs(10), async { - loop { - line.clear(); - let bytes_read = stdout_reader.read_line(&mut line).await.map_err(|e| { - Error::AdapterStartFailed(format!("Failed to read adapter output: {}", e)) - })?; + if bytes_read == 0 { + return Err(Error::AdapterStartFailed( + "Adapter exited before outputting listen address".to_string(), + )); + } - if bytes_read == 0 { - return Err(Error::AdapterStartFailed( - "Adapter exited before outputting listen address".to_string(), - )); - } + tracing::debug!("Adapter output: {}", line.trim()); - tracing::debug!("Delve output: {}", line.trim()); + if let Some(addr) = parse_listen_address(&line) { + return Ok(addr); + } + } + }) + .await; + + let addr = match addr_result { + Ok(Ok(addr)) => addr, + Ok(Err(e)) => { + let _ = adapter.start_kill(); + return Err(e); + } + Err(_) => { + let _ = adapter.start_kill(); + return Err(Error::AdapterStartFailed( + "Timeout waiting for adapter to start listening".to_string(), + )); + } + }; - // Look for the listening address in the output - if let Some(addr) = parse_listen_address(&line) { - return Ok(addr); - } + (adapter, addr) } - }) - .await; - - // Handle timeout or error - cleanup adapter before returning - let addr = match addr_result { - Ok(Ok(addr)) => addr, - Ok(Err(e)) => { - let _ = adapter.start_kill(); - return Err(e); - } - Err(_) => { - let _ = adapter.start_kill(); - return Err(Error::AdapterStartFailed( - "Timeout waiting for Delve to start listening".to_string(), - )); + crate::common::config::TcpSpawnStyle::TcpPortArg => { + use std::net::TcpListener as StdTcpListener; + + let listener = StdTcpListener::bind("127.0.0.1:0").map_err(|e| { + Error::AdapterStartFailed(format!("Failed to allocate port: {}", e)) + })?; + let port = listener.local_addr().map_err(|e| { + Error::AdapterStartFailed(format!("Failed to get port: {}", e)) + })?.port(); + // Race condition window: port released before adapter binds. Mitigated by immediate spawn + 500ms buffer. + // If connection fails, port may have been reallocated by OS. + drop(listener); + + let addr = format!("127.0.0.1:{}", port); + + let mut cmd = Command::new(adapter_path); + let mut full_args = args.to_vec(); + full_args.push(port.to_string()); + + cmd.args(&full_args) + .stdin(Stdio::null()) + .stdout(Stdio::piped()) + .stderr(Stdio::piped()); + + let adapter = cmd.spawn().map_err(|e| { + Error::AdapterStartFailed(format!( + "Failed to start {}: {}", + adapter_path.display(), + e + )) + })?; + + (adapter, addr) } }; - tracing::info!("Connecting to Delve DAP server at {}", addr); - - // Connect to the TCP port - cleanup adapter on failure - let stream = match TcpStream::connect(&addr).await { - Ok(s) => s, - Err(e) => { - let _ = adapter.start_kill(); - return Err(Error::AdapterStartFailed(format!( - "Failed to connect to Delve at {}: {}", - addr, e - ))); + tracing::info!("Connecting to DAP adapter at {}", addr); + + // Retry TCP connection with exponential backoff + // Handles adapters that need time to start listening (e.g., js-debug) + let stream = { + let mut last_error = None; + let mut delay = Duration::from_millis(100); + let max_delay = Duration::from_millis(1000); + let timeout_duration = Duration::from_secs(10); + let start = std::time::Instant::now(); + + loop { + match TcpStream::connect(&addr).await { + Ok(s) => break s, + Err(e) => { + last_error = Some(e); + if start.elapsed() >= timeout_duration { + let _ = adapter.start_kill(); + return Err(Error::AdapterStartFailed(format!( + "Failed to connect to adapter at {} after {:?}: {}", + addr, timeout_duration, last_error.unwrap() + ))); + } + tokio::time::sleep(delay).await; + delay = std::cmp::min(delay * 2, max_delay); + } + } } }; diff --git a/src/dap/types.rs b/src/dap/types.rs index fef18ad..dc922aa 100644 --- a/src/dap/types.rs +++ b/src/dap/types.rs @@ -158,6 +158,26 @@ pub struct LaunchArguments { /// Stop at beginning of main (GDB uses stopAtBeginningOfMainSubprogram instead of stopOnEntry) #[serde(skip_serializing_if = "Option::is_none")] pub stop_at_beginning_of_main_subprogram: Option, + + // === js-debug (JavaScript/TypeScript) specific === + /// Debugger type for js-debug (e.g., "pwa-node" for Node.js, "pwa-chrome" for Chrome) + #[serde(rename = "type", skip_serializing_if = "Option::is_none")] + pub type_attr: Option, + /// Enable source maps for TypeScript debugging + #[serde(skip_serializing_if = "Option::is_none")] + pub source_maps: Option, + /// Glob patterns for output files (compiled JavaScript) + #[serde(skip_serializing_if = "Option::is_none")] + pub out_files: Option>, + /// Node.js runtime executable path + #[serde(skip_serializing_if = "Option::is_none")] + pub runtime_executable: Option, + /// Additional runtime arguments + #[serde(skip_serializing_if = "Option::is_none")] + pub runtime_args: Option>, + /// Patterns for files to skip during debugging + #[serde(skip_serializing_if = "Option::is_none")] + pub skip_files: Option>, } /// Attach request arguments diff --git a/src/setup/adapters/delve.rs b/src/setup/adapters/delve.rs index 9c7caeb..723450b 100644 --- a/src/setup/adapters/delve.rs +++ b/src/setup/adapters/delve.rs @@ -119,7 +119,7 @@ impl Installer for DelveInstaller { match status { InstallStatus::Installed { path, .. } => { // Delve uses TCP-based DAP mode with 'dap' subcommand - verify_dap_adapter_tcp(&path, &["dap".to_string()]).await + verify_dap_adapter_tcp(&path, &["dap".to_string()], crate::common::config::TcpSpawnStyle::TcpListen).await } InstallStatus::Broken { reason, .. } => Ok(VerifyResult { success: false, diff --git a/src/setup/adapters/js_debug.rs b/src/setup/adapters/js_debug.rs new file mode 100644 index 0000000..ab6f60e --- /dev/null +++ b/src/setup/adapters/js_debug.rs @@ -0,0 +1,176 @@ +//! js-debug installer +//! +//! Installs Microsoft's JavaScript/TypeScript debugger via npm. + +use crate::common::{Error, Result}; +use crate::setup::installer::{ + adapters_dir, ensure_adapters_dir, run_command_args, write_version_file, + InstallMethod, InstallOptions, InstallResult, InstallStatus, Installer, +}; +use crate::setup::registry::{DebuggerInfo, Platform}; +use crate::setup::verifier::{verify_dap_adapter_tcp, VerifyResult}; +use async_trait::async_trait; +use std::path::PathBuf; + +static INFO: DebuggerInfo = DebuggerInfo { + id: "js-debug", + name: "js-debug", + languages: &["javascript", "typescript"], + platforms: &[Platform::Linux, Platform::MacOS, Platform::Windows], + description: "Microsoft's JavaScript/TypeScript debugger", + primary: true, +}; + +pub struct JsDebugInstaller; + +#[async_trait] +impl Installer for JsDebugInstaller { + fn info(&self) -> &DebuggerInfo { + &INFO + } + + async fn status(&self) -> Result { + let adapter_dir = adapters_dir().join("js-debug"); + let dap_path = get_dap_executable(&adapter_dir); + + if dap_path.exists() { + let version = read_package_version(&adapter_dir); + return Ok(InstallStatus::Installed { + path: dap_path, + version, + }); + } + + Ok(InstallStatus::NotInstalled) + } + + async fn best_method(&self) -> Result { + if which::which("npm").is_err() { + return Err(Error::Internal( + "npm not found. Please install Node.js and npm first.".to_string(), + )); + } + + Ok(InstallMethod::LanguagePackage { + tool: "npm".to_string(), + package: "@vscode/js-debug".to_string(), + }) + } + + async fn install(&self, opts: InstallOptions) -> Result { + install_js_debug(&opts).await + } + + async fn uninstall(&self) -> Result<()> { + let adapter_dir = adapters_dir().join("js-debug"); + if adapter_dir.exists() { + std::fs::remove_dir_all(&adapter_dir)?; + println!("Removed {}", adapter_dir.display()); + } else { + println!("js-debug managed installation not found"); + } + Ok(()) + } + + async fn verify(&self) -> Result { + let status = self.status().await?; + + match status { + InstallStatus::Installed { path, .. } => { + // js-debug's dapDebugServer.js must be run via node + let node_path = which::which("node").map_err(|_| { + Error::Internal("node not found in PATH".to_string()) + })?; + // TcpPortArg appends port as positional argument, no extra args needed + verify_dap_adapter_tcp(&node_path, &[path.to_string_lossy().to_string()], crate::common::config::TcpSpawnStyle::TcpPortArg).await + } + InstallStatus::Broken { reason, .. } => Ok(VerifyResult { + success: false, + capabilities: None, + error: Some(reason), + }), + InstallStatus::NotInstalled => Ok(VerifyResult { + success: false, + capabilities: None, + error: Some("Not installed".to_string()), + }), + } + } +} + +fn get_dap_executable(adapter_dir: &PathBuf) -> PathBuf { + // @vscode/js-debug installs to node_modules/@vscode/js-debug + let js_path = adapter_dir.join("node_modules/@vscode/js-debug/src/dapDebugServer.js"); + if js_path.exists() { + return js_path; + } + adapter_dir.join("node_modules/@vscode/js-debug/dist/src/dapDebugServer.js") +} + +fn read_package_version(adapter_dir: &PathBuf) -> Option { + let package_json = adapter_dir.join("node_modules/@vscode/js-debug/package.json"); + if !package_json.exists() { + return None; + } + + let content = std::fs::read_to_string(&package_json).ok()?; + let parsed: serde_json::Value = serde_json::from_str(&content).ok()?; + parsed.get("version") + .and_then(|v| v.as_str()) + .map(|s| s.to_string()) +} + +async fn install_js_debug(opts: &InstallOptions) -> Result { + println!("Checking for existing installation... not found"); + + let npm_path = which::which("npm").map_err(|_| { + Error::Internal("npm not found in PATH".to_string()) + })?; + let node_path = which::which("node").map_err(|_| { + Error::Internal("node not found in PATH".to_string()) + })?; + println!("Using npm: {}", npm_path.display()); + + let adapter_dir = ensure_adapters_dir()?.join("js-debug"); + + if opts.force && adapter_dir.exists() { + std::fs::remove_dir_all(&adapter_dir)?; + } + + std::fs::create_dir_all(&adapter_dir)?; + + let package = if let Some(version) = &opts.version { + format!("@vscode/js-debug@{}", version) + } else { + "@vscode/js-debug".to_string() + }; + + println!("Installing {}...", package); + run_command_args( + &npm_path, + &["install", "--prefix", adapter_dir.to_str().unwrap_or("."), &package] + ).await?; + + let dap_path = get_dap_executable(&adapter_dir); + if !dap_path.exists() { + return Err(Error::Internal( + "@vscode/js-debug installation succeeded but dapDebugServer.js not found".to_string(), + )); + } + + let version = read_package_version(&adapter_dir); + + if let Some(v) = &version { + write_version_file(&adapter_dir, v)?; + } + + println!("js-debug installation completed."); + + // Return node as the executable with the JS file as an argument + // TcpPortArg will append the port as a positional argument + Ok(InstallResult { + path: node_path, + version, + args: vec![dap_path.to_string_lossy().to_string()], + }) +} diff --git a/src/setup/adapters/mod.rs b/src/setup/adapters/mod.rs index f3e1995..cea2256 100644 --- a/src/setup/adapters/mod.rs +++ b/src/setup/adapters/mod.rs @@ -8,4 +8,5 @@ pub mod debugpy; pub mod delve; pub mod gdb_common; pub mod gdb; +pub mod js_debug; pub mod lldb; diff --git a/src/setup/detector.rs b/src/setup/detector.rs index 47917ec..c8a45fc 100644 --- a/src/setup/detector.rs +++ b/src/setup/detector.rs @@ -97,7 +97,7 @@ pub fn debuggers_for_project(project: &ProjectType) -> Vec<&'static str> { ProjectType::Cuda => vec!["cuda-gdb"], ProjectType::Go => vec!["go"], ProjectType::Python => vec!["python"], - ProjectType::JavaScript | ProjectType::TypeScript => vec![], // js-debug not yet implemented + ProjectType::JavaScript | ProjectType::TypeScript => vec!["js-debug"], ProjectType::C | ProjectType::Cpp => vec!["lldb", "codelldb"], ProjectType::CSharp => vec![], // netcoredbg not yet implemented ProjectType::Java => vec![], // java-debug not yet implemented diff --git a/src/setup/registry.rs b/src/setup/registry.rs index 6323925..95b41c3 100644 --- a/src/setup/registry.rs +++ b/src/setup/registry.rs @@ -108,6 +108,14 @@ static DEBUGGERS: &[DebuggerInfo] = &[ description: "Go debugger with DAP support", primary: true, }, + DebuggerInfo { + id: "js-debug", + name: "js-debug", + languages: &["javascript", "typescript"], + platforms: &[Platform::Linux, Platform::MacOS, Platform::Windows], + description: "Microsoft's JavaScript/TypeScript debugger", + primary: true, + }, ]; /// Get all registered debuggers @@ -146,6 +154,7 @@ pub fn get_installer(id: &str) -> Option> { "codelldb" => Some(Arc::new(adapters::codelldb::CodeLldbInstaller)), "python" => Some(Arc::new(adapters::debugpy::DebugpyInstaller)), "go" => Some(Arc::new(adapters::delve::DelveInstaller)), + "js-debug" => Some(Arc::new(adapters::js_debug::JsDebugInstaller)), _ => None, } } diff --git a/src/setup/verifier.rs b/src/setup/verifier.rs index 087487f..d20b310 100644 --- a/src/setup/verifier.rs +++ b/src/setup/verifier.rs @@ -2,6 +2,7 @@ //! //! Verifies that installed debuggers work correctly by sending DAP messages. +use crate::common::config::TcpSpawnStyle; use crate::common::{parse_listen_address, Error, Result}; use std::path::Path; use std::process::Stdio; @@ -63,91 +64,134 @@ pub async fn verify_dap_adapter( } } -/// Verify a TCP-based DAP adapter (like Delve) by spawning it with --listen -/// and connecting via TCP to send the initialize request +/// Verify a TCP-based DAP adapter by spawning it and connecting via TCP pub async fn verify_dap_adapter_tcp( path: &Path, args: &[String], + spawn_style: TcpSpawnStyle, ) -> Result { - // Spawn the adapter with --listen=127.0.0.1:0 - let mut cmd = Command::new(path); - cmd.args(args) - .arg("--listen=127.0.0.1:0") - .stdin(Stdio::null()) - .stdout(Stdio::piped()) - .stderr(Stdio::piped()); - - let mut child = cmd.spawn().map_err(|e| { - Error::Internal(format!("Failed to spawn adapter: {}", e)) - })?; + let (mut child, addr) = match spawn_style { + TcpSpawnStyle::TcpListen => { + let mut cmd = Command::new(path); + cmd.args(args) + .arg("--listen=127.0.0.1:0") + .stdin(Stdio::null()) + .stdout(Stdio::piped()) + .stderr(Stdio::piped()); + + let mut child = cmd.spawn().map_err(|e| { + Error::Internal(format!("Failed to spawn adapter: {}", e)) + })?; - // Read stdout to find the listening address - let stdout = child.stdout.take().ok_or_else(|| { - Error::Internal("Failed to get adapter stdout".to_string()) - })?; + let stdout = child.stdout.take().ok_or_else(|| { + Error::Internal("Failed to get adapter stdout".to_string()) + })?; - let mut stdout_reader = BufReader::new(stdout); - let mut line = String::new(); + let mut stdout_reader = BufReader::new(stdout); + let mut line = String::new(); + + let listen_result = timeout(Duration::from_secs(10), async { + loop { + line.clear(); + let bytes_read = stdout_reader.read_line(&mut line).await.map_err(|e| { + Error::Internal(format!("Failed to read adapter output: {}", e)) + })?; + + if bytes_read == 0 { + return Err(Error::Internal( + "Adapter exited before outputting listen address".to_string(), + )); + } + + if let Some(addr) = parse_listen_address(&line) { + return Ok(addr); + } + } + }) + .await; + + let addr = match listen_result { + Ok(Ok(addr)) => addr, + Ok(Err(e)) => { + let _ = child.kill().await; + return Ok(VerifyResult { + success: false, + capabilities: None, + error: Some(e.to_string()), + }); + } + Err(_) => { + let _ = child.kill().await; + return Ok(VerifyResult { + success: false, + capabilities: None, + error: Some("Timeout waiting for adapter to start listening".to_string()), + }); + } + }; + + (child, addr) + } + TcpSpawnStyle::TcpPortArg => { + use std::net::TcpListener as StdTcpListener; - // Wait for the "listening at" message with timeout - let listen_result = timeout(Duration::from_secs(10), async { - loop { - line.clear(); - let bytes_read = stdout_reader.read_line(&mut line).await.map_err(|e| { - Error::Internal(format!("Failed to read adapter output: {}", e)) + let listener = StdTcpListener::bind("127.0.0.1:0").map_err(|e| { + Error::Internal(format!("Failed to allocate port: {}", e)) })?; + let port = listener.local_addr().map_err(|e| { + Error::Internal(format!("Failed to get port: {}", e)) + })?.port(); + drop(listener); - if bytes_read == 0 { - return Err(Error::Internal( - "Adapter exited before outputting listen address".to_string(), - )); - } + let addr = format!("127.0.0.1:{}", port); - // Look for the listening address in the output - if let Some(addr) = parse_listen_address(&line) { - return Ok(addr); - } - } - }) - .await; - - let addr = match listen_result { - Ok(Ok(addr)) => addr, - Ok(Err(e)) => { - let _ = child.kill().await; - return Ok(VerifyResult { - success: false, - capabilities: None, - error: Some(e.to_string()), - }); - } - Err(_) => { - let _ = child.kill().await; - return Ok(VerifyResult { - success: false, - capabilities: None, - error: Some("Timeout waiting for adapter to start listening".to_string()), - }); + let mut cmd = Command::new(path); + let mut full_args = args.to_vec(); + full_args.push(port.to_string()); + + cmd.args(&full_args) + .stdin(Stdio::null()) + .stdout(Stdio::piped()) + .stderr(Stdio::piped()); + + let child = cmd.spawn().map_err(|e| { + Error::Internal(format!("Failed to spawn adapter: {}", e)) + })?; + + (child, addr) } }; - // Connect to the TCP port - let stream = match TcpStream::connect(&addr).await { - Ok(s) => s, - Err(e) => { - let _ = child.kill().await; - return Ok(VerifyResult { - success: false, - capabilities: None, - error: Some(format!("Failed to connect to {}: {}", addr, e)), - }); + // Retry TCP connection with exponential backoff + let stream = { + let mut last_error = String::new(); + let mut delay = Duration::from_millis(100); + let max_delay = Duration::from_millis(1000); + let timeout_duration = Duration::from_secs(10); + let start = std::time::Instant::now(); + + loop { + match TcpStream::connect(&addr).await { + Ok(s) => break s, + Err(e) => { + last_error = e.to_string(); + if start.elapsed() >= timeout_duration { + let _ = child.kill().await; + return Ok(VerifyResult { + success: false, + capabilities: None, + error: Some(format!("Failed to connect to {} after {:?}: {}", addr, timeout_duration, last_error)), + }); + } + tokio::time::sleep(delay).await; + delay = std::cmp::min(delay * 2, max_delay); + } + } } }; - // Send initialize request and wait for response let init_result = timeout(Duration::from_secs(5), send_initialize_tcp(stream)).await; - // Cleanup let _ = child.kill().await; match init_result { diff --git a/src/testing/config.rs b/src/testing/config.rs index 4cd1b2a..adf3dcf 100644 --- a/src/testing/config.rs +++ b/src/testing/config.rs @@ -34,6 +34,13 @@ pub struct TargetConfig { pub program: PathBuf, /// Arguments to pass to the program pub args: Option>, + /// Debug mode: "launch" (default) or "attach" + #[serde(default = "default_mode")] + pub mode: String, + /// PID to attach to (for attach mode) + pub pid: Option, + /// Path to file containing PID (for attach mode with setup-generated PIDs) + pub pid_file: Option, /// Debug adapter to use (e.g., "lldb-dap", "codelldb", "debugpy") pub adapter: Option, /// Whether to stop at the program entry point @@ -41,6 +48,10 @@ pub struct TargetConfig { pub stop_on_entry: bool, } +fn default_mode() -> String { + "launch".to_string() +} + /// A single test step in the execution flow #[derive(Deserialize, Debug)] #[serde(tag = "action", rename_all = "snake_case")] @@ -139,6 +150,9 @@ pub struct FrameAssertion { /// Expectations for an evaluate result #[derive(Deserialize, Debug)] pub struct EvaluateExpectation { + /// Whether the evaluation should succeed (default: true) + /// Set to false to test error scenarios (undefined variables, syntax errors) + pub success: Option, /// Expected result value pub result: Option, /// Expected result substring diff --git a/src/testing/runner.rs b/src/testing/runner.rs index f0f756a..cc06942 100644 --- a/src/testing/runner.rs +++ b/src/testing/runner.rs @@ -113,37 +113,107 @@ pub async fn run_scenario(path: &Path, verbose: bool) -> Result { scenario.target.program.clone() }; - let program_path = program_path.canonicalize().map_err(|e| { - Error::Config(format!( - "Program not found '{}': {}", - scenario.target.program.display(), - e - )) - })?; + // Handle launch vs attach mode + if scenario.target.mode == "attach" { + // Attach mode: get PID from scenario or pid_file + let pid = if let Some(pid) = scenario.target.pid { + pid + } else if let Some(pid_file_path) = &scenario.target.pid_file { + let pid_file = if pid_file_path.is_relative() { + scenario_dir.join(pid_file_path) + } else { + pid_file_path.clone() + }; - // Start the debug session - println!("\n{}", "Starting debug session...".cyan()); - client - .send_command(Command::Start { - program: program_path.clone(), - args: scenario.target.args.clone().unwrap_or_default(), - adapter: scenario.target.adapter.clone(), - stop_on_entry: scenario.target.stop_on_entry, - initial_breakpoints: Vec::new(), - }) - .await?; + let pid_str = std::fs::read_to_string(&pid_file).map_err(|e| { + Error::Config(format!( + "Failed to read PID file '{}': {}", + pid_file.display(), + e + )) + })?; + + pid_str.trim().parse::().map_err(|e| { + Error::Config(format!( + "Invalid PID in file '{}': {}", + pid_file.display(), + e + )) + })? + } else { + return Err(Error::Config( + "Attach mode requires either 'pid' or 'pid_file' field".to_string(), + )); + }; + + // Validate process exists before attempting attach (signal 0 checks existence) + #[cfg(unix)] + { + // Signal 0 tests process existence without side effects + let result = unsafe { libc::kill(pid as i32, 0) }; + if result != 0 { + return Err(Error::Config(format!( + "Process with PID {} not found or not accessible", + pid + ))); + } + } - if verbose { - println!( - " Program: {}", - program_path.display().to_string().dimmed() - ); - if let Some(adapter) = &scenario.target.adapter { - println!(" Adapter: {}", adapter.dimmed()); + println!("\n{}", "Attaching to process...".cyan()); + client + .send_command(Command::Attach { + pid, + adapter: scenario.target.adapter.clone(), + }) + .await?; + + if verbose { + println!(" PID: {}", pid.to_string().dimmed()); + if let Some(adapter) = &scenario.target.adapter { + println!(" Adapter: {}", adapter.dimmed()); + } + } + + println!(" {} Attached to process", "✓".green()); + } else if scenario.target.mode != "launch" && scenario.target.mode != "launch" { + // Unknown mode - fail explicitly + return Err(Error::Config(format!( + "Unknown target mode '{}'. Supported modes: 'launch', 'attach'", + scenario.target.mode + ))); + } else { + // Launch mode (default) + let program_path = program_path.canonicalize().map_err(|e| { + Error::Config(format!( + "Program not found '{}': {}", + scenario.target.program.display(), + e + )) + })?; + + println!("\n{}", "Starting debug session...".cyan()); + client + .send_command(Command::Start { + program: program_path.clone(), + args: scenario.target.args.clone().unwrap_or_default(), + adapter: scenario.target.adapter.clone(), + stop_on_entry: scenario.target.stop_on_entry, + initial_breakpoints: Vec::new(), + }) + .await?; + + if verbose { + println!( + " Program: {}", + program_path.display().to_string().dimmed() + ); + if let Some(adapter) = &scenario.target.adapter { + println!(" Adapter: {}", adapter.dimmed()); + } } - } - println!(" {} Session started", "✓".green()); + println!(" {} Session started", "✓".green()); + } // Execute test steps println!("\n{}", "Steps:".cyan()); @@ -540,8 +610,54 @@ async fn execute_evaluate_step( frame_id: None, context: EvaluateContext::Watch, }) - .await?; + .await; + + // Check if we expect failure + let expect_success = expect.and_then(|e| e.success).unwrap_or(true); + + if !expect_success { + // We expect evaluation to fail + match result { + Err(_) => { + println!( + " {} Step {}: evaluate '{}' (expected failure)", + "✓".green(), + step_num, + expression.dimmed() + ); + return Ok(()); + } + Ok(val) => { + // Check if the result contains an error indicator + let eval_result: EvaluateResult = serde_json::from_value(val) + .map_err(|e| Error::TestAssertion(format!("Failed to parse evaluate result: {}", e)))?; + + // If result_contains is specified, check if error message matches + if let Some(exp) = expect { + if let Some(expected_substr) = &exp.result_contains { + if eval_result.result.to_lowercase().contains(&expected_substr.to_lowercase()) { + println!( + " {} Step {}: evaluate '{}' = {} (expected error)", + "✓".green(), + step_num, + expression.dimmed(), + eval_result.result.dimmed() + ); + return Ok(()); + } + } + } + return Err(Error::TestAssertion(format!( + "Evaluate '{}': expected failure but got result '{}'", + expression, eval_result.result + ))); + } + } + } + + // Normal success path + let result = result?; let eval_result: EvaluateResult = serde_json::from_value(result) .map_err(|e| Error::TestAssertion(format!("Failed to parse evaluate result: {}", e)))?; @@ -610,16 +726,42 @@ fn parse_command(s: &str) -> Result { )); } // Handle "break add " or just "break " - let location_str = if args[0] == "add" && args.len() > 1 { - args[1..].join(" ") - } else { - args.join(" ") - }; + // Also handle --condition "expr" flag + let mut location_str = String::new(); + let mut condition: Option = None; + let mut i = 0; + + // Skip "add" subcommand if present AND there are more args + // (otherwise "add" is the function name to break on) + if args.get(0) == Some(&"add") && args.len() > 1 { + i = 1; + } + + while i < args.len() { + if args[i] == "--condition" && i + 1 < args.len() { + // Collect condition expression (may be quoted) + i += 1; + let mut cond_parts = Vec::new(); + while i < args.len() && !args[i].starts_with("--") { + cond_parts.push(args[i]); + i += 1; + } + condition = Some(cond_parts.join(" ").trim_matches('"').to_string()); + } else if !args[i].starts_with("--") { + if !location_str.is_empty() { + location_str.push(' '); + } + location_str.push_str(args[i]); + i += 1; + } else { + i += 1; + } + } let location = BreakpointLocation::parse(&location_str)?; Ok(Command::BreakpointAdd { location, - condition: None, + condition, hit_count: None, }) } @@ -741,6 +883,33 @@ fn parse_command(s: &str) -> Result { "detach" => Ok(Command::Detach), "restart" => Ok(Command::Restart), + "output" => { + // Parse --tail N and --clear flags + let mut tail: Option = None; + let mut clear = false; + let mut i = 0; + while i < args.len() { + match args[i] { + "--tail" => { + if i + 1 < args.len() { + tail = args[i + 1].parse().ok(); + i += 2; + } else { + i += 1; + } + } + "--clear" => { + clear = true; + i += 1; + } + _ => { + i += 1; + } + } + } + Ok(Command::GetOutput { tail, clear }) + } + _ => Err(Error::Config(format!("Unknown command: {}", cmd))), } } diff --git a/tests/TESTING.md b/tests/TESTING.md new file mode 100644 index 0000000..078ce74 --- /dev/null +++ b/tests/TESTING.md @@ -0,0 +1,225 @@ +# Testing Guide for Contributors + +This guide covers how to write, run, and maintain tests for the debugger CLI. + +## Quick Start + +```bash +# Run a single test scenario +debugger test tests/scenarios/hello_world_c.yml + +# Run with verbose output +debugger test tests/scenarios/hello_world_c.yml --verbose + +# Run with a specific adapter +debugger test tests/scenarios/hello_world_c.yml --adapter gdb +``` + +## Test Architecture + +The test framework uses three components: + +1. **YAML Scenarios** (`tests/scenarios/*.yml`) - Define test workflows +2. **Test Fixtures** (`tests/fixtures/`) - Programs to debug +3. **Test Runner** (`src/testing/runner.rs`) - Executes scenarios via daemon + +``` +YAML Scenario + | + v +run_scenario() [runner.rs] + | + +-- Setup steps (shell commands: compilation) + | + +-- Start/Attach debug session + | + +-- Execute test steps (commands, assertions) + | + v +TestResult (pass/fail) +``` + +## Adding a New Test Scenario + +### Step 1: Choose or Create a Fixture + +Use existing fixtures when possible: +- `simple.c` / `simple.go` / `simple.js` / `simple.py` - Basic debugging +- `threaded.c` / `threaded.go` - Multi-threaded programs + +If you need a new fixture, add it to `tests/fixtures/` with BREAKPOINT_MARKERs (see below). + +### Step 2: Create the YAML Scenario + +Create `tests/scenarios/_.yml`: + +```yaml +name: "Feature Test Name" +description: "What this test verifies" + +setup: + # Compile the fixture (if needed) + - shell: "gcc -g tests/fixtures/simple.c -o tests/fixtures/test_simple_c" + +target: + program: "tests/fixtures/test_simple_c" + adapter: "lldb" # lldb, gdb, go, python, js-debug + stop_on_entry: true + +steps: + # Set a breakpoint + - action: command + command: "break simple.c:19" + expect: + success: true + + # Continue to breakpoint + - action: command + command: "continue" + + # Wait for stop event + - action: await + timeout: 10 + expect: + reason: "breakpoint" + file: "simple.c" + line: 19 + + # Inspect variables + - action: inspect_locals + asserts: + - name: "x" + value: "10" + + # Continue to exit + - action: command + command: "continue" + + - action: await + timeout: 10 + expect: + reason: "exited" +``` + +### Step 3: Test Locally + +```bash +debugger test tests/scenarios/your_new_test.yml --verbose +``` + +### Step 4: Add to CI + +Add the test to `.github/workflows/e2e-tests.yml` under the appropriate adapter job. + +## Step Types Reference + +| Step Type | Purpose | Key Fields | +|-----------|---------|------------| +| `command` | Execute debugger command | `command`, `expect.success` | +| `await` | Wait for stop event | `timeout`, `expect.reason/file/line` | +| `inspect_locals` | Check local variables | `asserts[].name/value/value_contains/type` | +| `inspect_stack` | Check call stack | `asserts[].index/function/file/line` | +| `check_output` | Check program output | `contains`, `equals` | +| `evaluate` | Evaluate expression | `expression`, `expect.result/result_contains` | + +## BREAKPOINT_MARKER Convention + +Fixtures use semantic markers for reliable breakpoint locations: + +```c +// BREAKPOINT_MARKER: main_start +int x = 10; + +// BREAKPOINT_MARKER: before_add +int sum = add(x, y); +``` + +Tests reference these markers by name or the line number after the marker. + +## Common Pitfalls + +### 1. Missing Timeout on Await Steps + +**Bad:** +```yaml +- action: await + expect: + reason: "breakpoint" +``` + +**Good:** +```yaml +- action: await + timeout: 10 # Always specify timeout! + expect: + reason: "breakpoint" +``` + +### 2. Breaking Before pthread_barrier_wait + +In `threaded.c`, do NOT set breakpoints before `pthread_barrier_wait()` - this causes deadlocks. Use the `worker_body` function marker instead. + +### 3. Forgetting Program Termination + +Every scenario MUST end with program termination: +```yaml +- action: await + timeout: 10 + expect: + reason: "exited" # or "terminated" +``` + +### 4. Hardcoded Line Numbers + +Prefer semantic locations over hardcoded line numbers: +```yaml +# Fragile - breaks if code changes +command: "break simple.c:42" + +# Better - use function names +command: "break main" +command: "break add" +``` + +## Adding Support for a New Language + +1. **Create fixture**: `tests/fixtures/simple.` + - Add BREAKPOINT_MARKERs at key locations + - Include basic functions (add, factorial, main) + +2. **Create hello_world scenario**: `tests/scenarios/hello_world_.yml` + - Test basic debugging workflow + - Set breakpoint, continue, inspect locals, exit + +3. **Add compilation to CI**: `.github/workflows/e2e-tests.yml` + - Add fixture compilation step + - Add adapter-specific test job if needed + +4. **Update documentation**: + - `tests/fixtures/README.md` - Document new fixture + - `tests/scenarios/README.md` - Add to adapter mapping + +## Adapter Compatibility Matrix + +Not all features work with all adapters: + +| Feature | LLDB | GDB | Delve | debugpy | js-debug | +|---------|------|-----|-------|---------|----------| +| Conditional breakpoints | ✅ | ✅ | ✅ | ✅ | ✅ | +| Hit count breakpoints | ✅ | ✅ | ✅ | ❌ | ❌ | +| Thread listing | ✅ | ✅ | ✅ | ✅ | ✅ | +| Stack navigation | ✅ | ✅ | ✅ | ✅ | ✅ | +| Output capture | ✅ | ✅ | ✅ | ✅ | ✅ | +| Pause/Resume | ✅ | ✅ | ✅ | ✅ | ✅ | + +## Running Tests in CI + +Tests run automatically on push/PR via GitHub Actions: +- Matrix: 5 adapters × 2 platforms +- Tests have automatic retry (3 attempts) for flaky test handling +- Failed test logs uploaded as artifacts + +To debug CI failures: +1. Check the job logs for error messages +2. Download log artifacts from the failed run +3. Reproduce locally with `--verbose` flag diff --git a/tests/e2e/hello_world.js b/tests/e2e/hello_world.js new file mode 100644 index 0000000..b2770fc --- /dev/null +++ b/tests/e2e/hello_world.js @@ -0,0 +1,15 @@ +// JavaScript Hello World for E2E debugging tests + +function main() { + const x = 10; // BREAKPOINT_MARKER: after_x_init + const y = 20; // BREAKPOINT_MARKER: after_y_init + const sum = x + y; // BREAKPOINT_MARKER: after_sum + + console.log(`Hello from JavaScript! Sum is ${sum}`); + + return 0; +} + +// Call main and exit with its return code +const exitCode = main(); +process.exit(exitCode); diff --git a/tests/e2e/hello_world.ts b/tests/e2e/hello_world.ts new file mode 100644 index 0000000..58946c7 --- /dev/null +++ b/tests/e2e/hello_world.ts @@ -0,0 +1,15 @@ +// TypeScript Hello World for E2E debugging tests + +function main(): number { + const x: number = 10; // BREAKPOINT_MARKER: after_x_init + const y: number = 20; // BREAKPOINT_MARKER: after_y_init + const sum: number = x + y; // BREAKPOINT_MARKER: after_sum + + console.log(`Hello from TypeScript! Sum is ${sum}`); + + return 0; +} + +// Call main and exit with its return code +const exitCode: number = main(); +process.exit(exitCode); diff --git a/tests/e2e/run_tests.py b/tests/e2e/run_tests.py deleted file mode 100644 index a0feb1d..0000000 --- a/tests/e2e/run_tests.py +++ /dev/null @@ -1,358 +0,0 @@ -import subprocess -import os -import sys -import time -import shutil -import tempfile - -# Configuration -PROJECT_ROOT = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) -DEBUGGER_BIN = os.path.join(PROJECT_ROOT, "target", "release", "debugger") -TEST_DIR = os.path.join(PROJECT_ROOT, "tests", "e2e") -GCC = "gcc" -RUSTC = "rustc" -GO = "go" -PYTHON = sys.executable - -def log(msg): - print(f"[TEST] {msg}") - -def compile_c(source, output): - cmd = [GCC, "-g", "-o", output, source] - log(f"Compiling C: {' '.join(cmd)}") - subprocess.check_call(cmd) - -def compile_rust(source, output): - cmd = [RUSTC, "-g", "-o", output, source] - log(f"Compiling Rust: {' '.join(cmd)}") - subprocess.check_call(cmd) - -def compile_go(source, output): - # Disable optimizations and inlining for better debugging - cmd = [GO, "build", "-gcflags=all=-N -l", "-o", output, source] - log(f"Compiling Go: {' '.join(cmd)}") - subprocess.check_call(cmd) - -def setup_config(): - """Create a temporary config directory and config file""" - config_dir = tempfile.mkdtemp(prefix="debugger-test-config-") - app_config_dir = os.path.join(config_dir, "debugger-cli") - os.makedirs(app_config_dir, exist_ok=True) - - config_path = os.path.join(app_config_dir, "config.toml") - - # Check where python is - python_path = sys.executable - - config_content = f""" -[adapters.debugpy] -path = "{python_path}" -args = ["-m", "debugpy.adapter"] - -[adapters.go] -path = "dlv" -args = ["dap"] -transport = "tcp" - -[defaults] -adapter = "lldb-dap" - -[timeouts] -dap_initialize_secs = 10 -dap_request_secs = 30 -await_default_secs = 60 -""" - - with open(config_path, "w") as f: - f.write(config_content) - - return config_dir - -def run_debugger_command(cmd, config_dir, input_cmds=None): - full_cmd = [DEBUGGER_BIN] + cmd - log(f"Running: {' '.join(full_cmd)}") - - env = os.environ.copy() - if config_dir: - env["XDG_CONFIG_HOME"] = config_dir - # Also use a temp runtime dir to avoid conflicts - env["XDG_RUNTIME_DIR"] = os.path.join(config_dir, "runtime") - os.makedirs(env["XDG_RUNTIME_DIR"], exist_ok=True) - - process = subprocess.Popen( - full_cmd, - stdin=subprocess.PIPE, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - text=True, - cwd=TEST_DIR, - env=env - ) - - stdout, stderr = process.communicate(input=input_cmds) - - if process.returncode != 0: - log(f"Command failed with code {process.returncode}") - log(f"STDOUT: {stdout}") - log(f"STDERR: {stderr}") - return False, stdout, stderr - - return True, stdout, stderr - -def test_program(name, source_file, compiler_func, expected_output_substr, config_dir, adapter_args=None): - log(f"=== Testing {name} ===") - binary_path = os.path.join(TEST_DIR, name) - source_path = os.path.join(TEST_DIR, source_file) - - # 1. Compile (if compiler provided) - if compiler_func: - try: - compiler_func(source_path, binary_path) - except Exception as e: - log(f"Compilation failed: {e}") - return False - else: - # interpreted language, binary_path is just source_path - binary_path = source_path - - # 2. Start Debugger - - # Stop any existing daemon to ensure clean state - run_debugger_command(["stop"], config_dir) - - # Start debugging with stop-on-entry - log("Starting debugger...") - start_cmd = ["start", binary_path, "--stop-on-entry"] - if adapter_args: - start_cmd.extend(adapter_args) - - ok, out, err = run_debugger_command(start_cmd, config_dir) - if not ok: - log("Failed to start debugger") - return False - - # Set breakpoint at main - log("Setting breakpoint...") - ok, out, err = run_debugger_command(["breakpoint", "add", "main"], config_dir) - if not ok: - log("Failed to set breakpoint") - return False - - # Continue (from entry point) - log("Continuing execution from entry...") - ok, out, err = run_debugger_command(["continue"], config_dir) - if not ok: - log(f"Failed to continue from entry: {err}") - return False - - # Wait for breakpoint hit - log("Waiting for breakpoint stop...") - ok, out, err = run_debugger_command(["await"], config_dir) - if not ok: return False - log(f"Await output:\n{out}") - - # Check threads - log("Checking threads...") - ok, out_threads, err = run_debugger_command(["threads"], config_dir) - log(f"Threads:\n{out_threads}") - - # Inspect variables - log("Inspecting locals...") - ok, out_locals, err = run_debugger_command(["locals"], config_dir) - if not ok: return False - log(f"Locals:\n{out_locals}") - - # Verify variable 'sum' or 'x' exists - if "x" not in out_locals and "y" not in out_locals: - log("WARNING: Locals x/y not found") - - # Continue to finish - log("Continuing to finish...") - ok, out, err = run_debugger_command(["continue"], config_dir) - if not ok: return False - - # Wait for exit - log("Waiting for program exit...") - ok, out, err = run_debugger_command(["await"], config_dir) - # Note: await might fail if session is already terminated, or return exited event. - - # Check output - log("Checking output...") - # Give a moment for output buffer to flush - time.sleep(1.0) - ok, out_prog, err = run_debugger_command(["output"], config_dir) - log(f"Program Output:\n{out_prog}") - - if expected_output_substr not in out_prog: - log(f"FAILED: Expected output '{expected_output_substr}' not found.") - return False - - log(f"SUCCESS: {name} passed.") - return True - -def test_complex_python(config_dir): - log("=== Testing Complex Python App ===") - - app_dir = os.path.join(PROJECT_ROOT, "tests", "complex_app") - main_py = os.path.join(app_dir, "main.py") - - if not os.path.exists(main_py): - log(f"Complex app not found at {main_py}") - return False - - # Start Debugger - run_debugger_command(["stop"], config_dir) - log("Starting debugger on complex app...") - - cmd = ["start", main_py, "--adapter", "debugpy", "--stop-on-entry"] - ok, out, err = run_debugger_command(cmd, config_dir) - if not ok: return False - - # We will run a script of commands to exercise various features - commands = [ - "break scenarios.py:7", # recursion_step - "break scenarios.py:34", # thread_work - "break scenarios.py:59", # catch_exception - "continue", # Hit stop-on-entry - "continue", # Should hit recursion_step - "bt --limit 5", # Check backtrace - "locals", # Check locals - "break remove --all", # Clear breakpoints - "continue", # Finish recursion, should hit catch_exception (exception test runs after recursion) - "continue", # Finish exception, run large data - "continue", # Finish large data, run threads (should hit thread_work) - "threads", # List threads - "continue", # Continue thread 1 - "continue", # Continue thread 2 (if it hits) or finish - "await" # Wait for exit - ] - - # We need to send these commands interactively or batch them. - # The current run_debugger_command helper sends input all at once if provided, - # but the debugger might not be ready for all of them. - # However, since we are using `subprocess.communicate`, it sends all input and close stdin. - # The debugger CLI reads from stdin. If it processes commands sequentially, this might work - # provided it doesn't exit early. - - # A better approach for this test helper might be to just run the sequence. - # But since we need to verify output at steps, implementing a full interactive drive is complex - # in this simple runner. - # Let's try to just run it and see if we get the expected "Complex App Finished" output - # and maybe some intermediate logs in stdout. - - # For now, let's just set breakpoints and continue until finish, checking final output. - - input_script = "\n".join([ - "break scenarios.py:8", # recursion_step (verify line numbers match file) - "continue", # from entry - "continue", # hit recursion - "bt 5", - "locals", - "break remove --all", - "continue", # finish all - "await", - "output" - ]) - - ok, out, err = run_debugger_command([], config_dir, input_cmds=input_script) - - # Check if we see expected things in the output - if "Complex App Finished" not in out: - log("FAILED: Did not see 'Complex App Finished'") - log(f"Output: {out}") - return False - - log("SUCCESS: Complex Python App passed basic run.") - return True - -def check_debugpy(): - try: - subprocess.check_call([PYTHON, "-c", "import debugpy"]) - return True - except subprocess.CalledProcessError: - return False - -def check_delve(): - """Check if Delve (dlv) is available""" - try: - subprocess.check_call(["dlv", "version"], stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL) - return True - except (subprocess.CalledProcessError, FileNotFoundError): - return False - -def main(): - if not os.path.exists(DEBUGGER_BIN): - log(f"Debugger binary not found at {DEBUGGER_BIN}. Please build release first.") - sys.exit(1) - - config_dir = setup_config() - log(f"Created temp config dir at {config_dir}") - - failed = False - - try: - # Test C - if not test_program("test_c", "hello_world.c", compile_c, "Hello from C! Sum is 30", config_dir): - failed = True - - # Test Rust - if not test_program("test_rs", "hello_world.rs", compile_rust, "Hello from Rust! Sum is 30", config_dir): - failed = True - - # Test Python - if check_debugpy(): - # For Python, we use the source file directly and specify debugpy adapter - # Note: hello_world.py needs to exist. We can reuse simple.py or copy it. - # Using fixtures/simple.py for now as hello_world.py might not be there. - - # First, check if hello_world.py exists, if not create it - py_test_file = os.path.join(TEST_DIR, "hello_world.py") - if not os.path.exists(py_test_file): - with open(py_test_file, "w") as f: - f.write(""" -import sys - -def main(): - x = 10 - y = 20 - print(f"Hello from Python! Sum is {x+y}") - return 0 - -if __name__ == "__main__": - sys.exit(main()) -""") - - if not test_program("test_py", "hello_world.py", None, "Hello from Python! Sum is 30", config_dir, ["--adapter", "debugpy"]): - failed = True - else: - log("Skipping Python test (debugpy not found)") - - # Test Go - if check_delve(): - # First, check if hello_world.go exists - go_test_file = os.path.join(TEST_DIR, "hello_world.go") - if os.path.exists(go_test_file): - if not test_program("test_go", "hello_world.go", compile_go, "Hello from Go! Sum is 30", config_dir, ["--adapter", "go"]): - failed = True - else: - log("Skipping Go test (hello_world.go not found)") - else: - log("Skipping Go test (dlv not found)") - - # Test Complex Python - if check_debugpy(): - if not test_complex_python(config_dir): - failed = True - - finally: - # Cleanup - shutil.rmtree(config_dir) - - if failed: - sys.exit(1) - else: - log("All tests passed!") - sys.exit(0) - -if __name__ == "__main__": - main() diff --git a/tests/fixtures/README.md b/tests/fixtures/README.md new file mode 100644 index 0000000..ad1fe72 --- /dev/null +++ b/tests/fixtures/README.md @@ -0,0 +1,155 @@ +# Test Fixtures + +Test fixtures are minimal programs designed for debugging integration tests. Each fixture contains BREAKPOINT_MARKERs - semantic locations where tests can reliably set breakpoints. + +## Fixture Files + +### simple.c / simple.go / simple.js / simple.py + +Single-threaded programs with basic computation. Used for testing breakpoints, stepping, variable inspection, and output capture. + +**Functions:** +- `add(a, b)` - Simple addition +- `factorial(n)` - Recursive factorial (n=5) +- `main` - Calls both functions, prints output + +**BREAKPOINT_MARKERs:** +- `main_start` - Entry point of main function +- `before_add` - Immediately before add() call +- `add_body` - Inside add() function +- `before_factorial` - Immediately before factorial() call +- `factorial_body` - Inside factorial() function (recursive) +- `before_exit` - Final marker before program exits + +**Output:** +``` +Sum: 30 +Factorial: 120 +``` + +### threaded.c / threaded.go + +Multithreaded programs with synchronization. Used for testing thread listing and thread-safe debugging. + +**C (threaded.c):** +- 2 worker threads using pthreads +- Portable barrier (mutex + condvar) synchronizes main + workers (3 threads total) +- Works on both Linux and macOS (macOS lacks pthread_barrier_t) +- Shared counter protected by mutex +- `worker_body(thread_id)` - Helper function called AFTER barrier (safe breakpoint target) + +**Go (threaded.go):** +- 2 worker goroutines +- Buffered channel provides deterministic start ordering +- Shared counter protected by sync.Mutex + +**BREAKPOINT_MARKERs:** +- `main_start` - Entry point of main function +- `main_wait` - Main thread waiting at barrier/channel +- `thread_entry` - Worker thread entry (C: BEFORE barrier, Go: before channel receive) +- `after_barrier` - C only: SAFE breakpoint after barrier synchronization +- `worker_body` - C only: Helper function after barrier (recommended breakpoint) +- `worker_start` - Worker begins critical section +- `worker_end` - Worker exits + +**C Threading Deadlock Warning:** + +Breaking at `thread_entry` or `thread_func` (before barrier) causes deadlock. The debugger stops one thread while the barrier waits for all 3 threads (main + 2 workers) to synchronize. Use `worker_body` function (recommended) or `after_barrier` line marker instead. + +**Why pthread_barrier instead of mutex/condvar?** + +Mutex/condvar synchronization allows non-deterministic thread interleavings - the debugger might stop threads in different orders on different runs, causing flaky tests. Barriers guarantee ALL threads reach the sync point before ANY proceed, making test behavior deterministic. The portable barrier implementation (mutex + condvar + counter) provides barrier semantics on macOS which lacks native `pthread_barrier_t`. + +**Output:** +``` +Starting 2 worker threads +Thread 0 incremented counter to 1 +Thread 1 incremented counter to 2 +Final counter value: 2 +``` + +(Note: Thread output order is non-deterministic) + +## BREAKPOINT_MARKER Convention + +BREAKPOINT_MARKERs are comments marking semantic locations: + +```c +// BREAKPOINT_MARKER: main_start +int x = 10; +``` + +```go +// BREAKPOINT_MARKER: add_body +result := a + b +``` + +```javascript +// BREAKPOINT_MARKER: before_factorial +const fact = factorial(5); +``` + +Tests reference these markers by function name or line number. Markers ensure breakpoints hit meaningful locations even if code changes slightly. + +## Compilation + +Fixtures compile with debug symbols: + +```bash +# C +gcc -g tests/fixtures/simple.c -o tests/fixtures/test_simple_c +gcc -g -pthread tests/fixtures/threaded.c -o tests/fixtures/test_threaded_c + +# Go +go build -gcflags='all=-N -l' -o tests/fixtures/test_simple_go tests/fixtures/simple.go +go build -gcflags='all=-N -l' -o tests/fixtures/test_threaded_go tests/fixtures/threaded.go + +# JavaScript/TypeScript (no compilation needed) +node tests/fixtures/simple.js + +# Python (no compilation needed) +python3 tests/fixtures/simple.py +``` + +Compilation commands are included in scenario `setup:` steps. + +### attach_target.c + +Long-running program for attach mode tests. Loops for 30 seconds with 1-second sleeps, allowing time for attach operations. + +**Purpose:** Test attaching debugger to running process (vs launching new process) + +**BREAKPOINT_MARKERs:** +- `loop_body` - Inside the main loop (safe breakpoint target) + +**Output:** +``` +PID: +``` + +### multi_source/ (Directory) + +Multi-file C project for testing cross-file debugging. + +**Files:** +- `main.c` - Entry point, calls helper functions +- `utils.c` - Helper function implementations +- `utils.h` - Function declarations + +**BREAKPOINT_MARKERs:** +- `main_start` - Entry point in main.c +- `before_helper_call` - Before calling utils.c functions +- `helper_add_body` - Inside helper_add() in utils.c +- `helper_multiply_body` - Inside helper_multiply() in utils.c +- `before_exit` - Final marker in main.c + +**Compilation:** +```bash +gcc -g tests/fixtures/multi_source/main.c tests/fixtures/multi_source/utils.c -o tests/fixtures/multi_source/test_multi +``` + +**Output:** +``` +Sum: 15 +Product: 50 +``` diff --git a/tests/fixtures/attach_target.c b/tests/fixtures/attach_target.c new file mode 100644 index 0000000..e1dfec5 --- /dev/null +++ b/tests/fixtures/attach_target.c @@ -0,0 +1,21 @@ +// Long-running target for attach mode tests +// Runs for 30 seconds with 1-second sleep intervals, allowing time for debugger attach +#include +#include + +int main() { + // Print PID for test harness to capture + printf("PID: %d\n", getpid()); + fflush(stdout); + + // Run for 30 seconds - provides margin for attach operation + // 30s chosen: attach completes <2s locally, 15x safety margin for slow CI + for (int i = 0; i < 30; i++) { + // BREAKPOINT_MARKER: loop_body + int counter = i; + (void)counter; // Prevent optimization + sleep(1); + } + + return 0; +} diff --git a/tests/fixtures/dist/simple.js b/tests/fixtures/dist/simple.js new file mode 100644 index 0000000..e4aeeee --- /dev/null +++ b/tests/fixtures/dist/simple.js @@ -0,0 +1,40 @@ +#!/usr/bin/env node +"use strict"; +function add(a, b) { + // BREAKPOINT_MARKER: add_body + const result = a + b; + return result; +} +function factorial(n) { + // BREAKPOINT_MARKER: factorial_body + if (n <= 1) { + return 1; + } + return n * factorial(n - 1); +} +function multiply(a, b) { + // BREAKPOINT_MARKER: multiply_body + return a * b; +} +function main() { + // BREAKPOINT_MARKER: main_start + const x = 10; + const y = 20; + const message = "hello"; + const obj = { name: "test", value: 42 }; + const arr = [1, 2, 3, 4, 5]; + // BREAKPOINT_MARKER: before_add + const sumResult = add(x, y); + console.log(`Sum: ${sumResult}`); + // BREAKPOINT_MARKER: before_multiply + const product = multiply(x, y); + console.log(`Product: ${product}`); + // BREAKPOINT_MARKER: before_factorial + const fact = factorial(5); + console.log(`Factorial: ${fact}`); + // BREAKPOINT_MARKER: before_exit + return 0; +} +// BREAKPOINT_MARKER: entry_point +process.exit(main()); +//# sourceMappingURL=simple.js.map \ No newline at end of file diff --git a/tests/fixtures/dist/simple.js.map b/tests/fixtures/dist/simple.js.map new file mode 100644 index 0000000..4352b90 --- /dev/null +++ b/tests/fixtures/dist/simple.js.map @@ -0,0 +1 @@ +{"version":3,"file":"simple.js","sourceRoot":"","sources":["../simple.ts"],"names":[],"mappings":";;AAEA,SAAS,GAAG,CAAC,CAAS,EAAE,CAAS;IAC7B,8BAA8B;IAC9B,MAAM,MAAM,GAAW,CAAC,GAAG,CAAC,CAAC;IAC7B,OAAO,MAAM,CAAC;AAClB,CAAC;AAED,SAAS,SAAS,CAAC,CAAS;IACxB,oCAAoC;IACpC,IAAI,CAAC,IAAI,CAAC,EAAE,CAAC;QACT,OAAO,CAAC,CAAC;IACb,CAAC;IACD,OAAO,CAAC,GAAG,SAAS,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC;AAChC,CAAC;AAED,SAAS,QAAQ,CAAC,CAAS,EAAE,CAAS;IAClC,mCAAmC;IACnC,OAAO,CAAC,GAAG,CAAC,CAAC;AACjB,CAAC;AAED,SAAS,IAAI;IACT,gCAAgC;IAChC,MAAM,CAAC,GAAW,EAAE,CAAC;IACrB,MAAM,CAAC,GAAW,EAAE,CAAC;IACrB,MAAM,OAAO,GAAW,OAAO,CAAC;IAChC,MAAM,GAAG,GAAoC,EAAE,IAAI,EAAE,MAAM,EAAE,KAAK,EAAE,EAAE,EAAE,CAAC;IACzE,MAAM,GAAG,GAAa,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC;IAEtC,gCAAgC;IAChC,MAAM,SAAS,GAAW,GAAG,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;IACpC,OAAO,CAAC,GAAG,CAAC,QAAQ,SAAS,EAAE,CAAC,CAAC;IAEjC,qCAAqC;IACrC,MAAM,OAAO,GAAW,QAAQ,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;IACvC,OAAO,CAAC,GAAG,CAAC,YAAY,OAAO,EAAE,CAAC,CAAC;IAEnC,sCAAsC;IACtC,MAAM,IAAI,GAAW,SAAS,CAAC,CAAC,CAAC,CAAC;IAClC,OAAO,CAAC,GAAG,CAAC,cAAc,IAAI,EAAE,CAAC,CAAC;IAElC,iCAAiC;IACjC,OAAO,CAAC,CAAC;AACb,CAAC;AAED,iCAAiC;AACjC,OAAO,CAAC,IAAI,CAAC,IAAI,EAAE,CAAC,CAAC"} \ No newline at end of file diff --git a/tests/fixtures/multi_source/main.c b/tests/fixtures/multi_source/main.c new file mode 100644 index 0000000..a93fc1c --- /dev/null +++ b/tests/fixtures/multi_source/main.c @@ -0,0 +1,19 @@ +// Multi-source project main program +#include +#include "utils.h" + +int main() { + // BREAKPOINT_MARKER: main_start + int x = 5; + int y = 10; + + // BREAKPOINT_MARKER: before_helper_call + int sum = helper_add(x, y); + printf("Sum: %d\n", sum); + + int product = helper_multiply(x, y); + printf("Product: %d\n", product); + + // BREAKPOINT_MARKER: before_exit + return 0; +} diff --git a/tests/fixtures/multi_source/utils.c b/tests/fixtures/multi_source/utils.c new file mode 100644 index 0000000..3d4c440 --- /dev/null +++ b/tests/fixtures/multi_source/utils.c @@ -0,0 +1,14 @@ +// Multi-source project utilities implementation +#include "utils.h" + +int helper_add(int a, int b) { + // BREAKPOINT_MARKER: helper_add_body + int result = a + b; + return result; +} + +int helper_multiply(int a, int b) { + // BREAKPOINT_MARKER: helper_multiply_body + int result = a * b; + return result; +} diff --git a/tests/fixtures/multi_source/utils.h b/tests/fixtures/multi_source/utils.h new file mode 100644 index 0000000..89adeba --- /dev/null +++ b/tests/fixtures/multi_source/utils.h @@ -0,0 +1,8 @@ +// Multi-source project utilities header +#ifndef UTILS_H +#define UTILS_H + +int helper_add(int a, int b); +int helper_multiply(int a, int b); + +#endif // UTILS_H diff --git a/tests/fixtures/package-lock.json b/tests/fixtures/package-lock.json new file mode 100644 index 0000000..3064681 --- /dev/null +++ b/tests/fixtures/package-lock.json @@ -0,0 +1,44 @@ +{ + "name": "fixtures", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "devDependencies": { + "@types/node": "^25.0.10", + "typescript": "^5.9.3" + } + }, + "node_modules/@types/node": { + "version": "25.0.10", + "resolved": "https://registry.npmjs.org/@types/node/-/node-25.0.10.tgz", + "integrity": "sha512-zWW5KPngR/yvakJgGOmZ5vTBemDoSqF3AcV/LrO5u5wTWyEAVVh+IT39G4gtyAkh3CtTZs8aX/yRM82OfzHJRg==", + "dev": true, + "license": "MIT", + "dependencies": { + "undici-types": "~7.16.0" + } + }, + "node_modules/typescript": { + "version": "5.9.3", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.9.3.tgz", + "integrity": "sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==", + "dev": true, + "license": "Apache-2.0", + "bin": { + "tsc": "bin/tsc", + "tsserver": "bin/tsserver" + }, + "engines": { + "node": ">=14.17" + } + }, + "node_modules/undici-types": { + "version": "7.16.0", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-7.16.0.tgz", + "integrity": "sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw==", + "dev": true, + "license": "MIT" + } + } +} diff --git a/tests/fixtures/package.json b/tests/fixtures/package.json new file mode 100644 index 0000000..14f9da3 --- /dev/null +++ b/tests/fixtures/package.json @@ -0,0 +1,6 @@ +{ + "devDependencies": { + "@types/node": "^25.0.10", + "typescript": "^5.9.3" + } +} diff --git a/tests/fixtures/simple.js b/tests/fixtures/simple.js new file mode 100644 index 0000000..1248166 --- /dev/null +++ b/tests/fixtures/simple.js @@ -0,0 +1,47 @@ +#!/usr/bin/env node + +function add(a, b) { + // BREAKPOINT_MARKER: add_body + const result = a + b; + return result; +} + +function factorial(n) { + // BREAKPOINT_MARKER: factorial_body + if (n <= 1) { + return 1; + } + return n * factorial(n - 1); +} + +function multiply(a, b) { + // BREAKPOINT_MARKER: multiply_body + return a * b; +} + +function main() { + // BREAKPOINT_MARKER: main_start + const x = 10; + const y = 20; + const message = "hello"; + const obj = { name: "test", value: 42 }; + const arr = [1, 2, 3, 4, 5]; + + // BREAKPOINT_MARKER: before_add + const sumResult = add(x, y); + console.log(`Sum: ${sumResult}`); + + // BREAKPOINT_MARKER: before_multiply + const product = multiply(x, y); + console.log(`Product: ${product}`); + + // BREAKPOINT_MARKER: before_factorial + const fact = factorial(5); + console.log(`Factorial: ${fact}`); + + // BREAKPOINT_MARKER: before_exit + return 0; +} + +// BREAKPOINT_MARKER: entry_point +process.exit(main()); diff --git a/tests/fixtures/simple.ts b/tests/fixtures/simple.ts new file mode 100644 index 0000000..30f3df1 --- /dev/null +++ b/tests/fixtures/simple.ts @@ -0,0 +1,47 @@ +#!/usr/bin/env node + +function add(a: number, b: number): number { + // BREAKPOINT_MARKER: add_body + const result: number = a + b; + return result; +} + +function factorial(n: number): number { + // BREAKPOINT_MARKER: factorial_body + if (n <= 1) { + return 1; + } + return n * factorial(n - 1); +} + +function multiply(a: number, b: number): number { + // BREAKPOINT_MARKER: multiply_body + return a * b; +} + +function main(): number { + // BREAKPOINT_MARKER: main_start + const x: number = 10; + const y: number = 20; + const message: string = "hello"; + const obj: { name: string; value: number } = { name: "test", value: 42 }; + const arr: number[] = [1, 2, 3, 4, 5]; + + // BREAKPOINT_MARKER: before_add + const sumResult: number = add(x, y); + console.log(`Sum: ${sumResult}`); + + // BREAKPOINT_MARKER: before_multiply + const product: number = multiply(x, y); + console.log(`Product: ${product}`); + + // BREAKPOINT_MARKER: before_factorial + const fact: number = factorial(5); + console.log(`Factorial: ${fact}`); + + // BREAKPOINT_MARKER: before_exit + return 0; +} + +// BREAKPOINT_MARKER: entry_point +process.exit(main()); diff --git a/tests/fixtures/threaded.c b/tests/fixtures/threaded.c new file mode 100644 index 0000000..07970b5 --- /dev/null +++ b/tests/fixtures/threaded.c @@ -0,0 +1,116 @@ +// Multithreaded test program for debugger integration tests +#include +#include +#include + +#define NUM_THREADS 2 + +// Portable barrier implementation (macOS lacks pthread_barrier_t) +typedef struct { + pthread_mutex_t mutex; + pthread_cond_t cond; + int count; + int waiting; + int phase; +} portable_barrier_t; + +int portable_barrier_init(portable_barrier_t *barrier, int count) { + barrier->count = count; + barrier->waiting = 0; + barrier->phase = 0; + if (pthread_mutex_init(&barrier->mutex, NULL) != 0) return -1; + if (pthread_cond_init(&barrier->cond, NULL) != 0) { + pthread_mutex_destroy(&barrier->mutex); + return -1; + } + return 0; +} + +int portable_barrier_wait(portable_barrier_t *barrier) { + pthread_mutex_lock(&barrier->mutex); + int my_phase = barrier->phase; + barrier->waiting++; + if (barrier->waiting == barrier->count) { + barrier->waiting = 0; + barrier->phase++; + pthread_cond_broadcast(&barrier->cond); + } else { + while (my_phase == barrier->phase) { + pthread_cond_wait(&barrier->cond, &barrier->mutex); + } + } + pthread_mutex_unlock(&barrier->mutex); + return 0; +} + +int portable_barrier_destroy(portable_barrier_t *barrier) { + pthread_mutex_destroy(&barrier->mutex); + pthread_cond_destroy(&barrier->cond); + return 0; +} + +// Shared state +portable_barrier_t barrier; +pthread_mutex_t counter_mutex = PTHREAD_MUTEX_INITIALIZER; +int shared_counter = 0; + +// Helper function called AFTER barrier - safe to break here +// BREAKPOINT_MARKER: worker_body +void worker_body(int thread_id) { + // BREAKPOINT_MARKER: worker_start + pthread_mutex_lock(&counter_mutex); + shared_counter++; + int local_count = shared_counter; + pthread_mutex_unlock(&counter_mutex); + + printf("Thread %d incremented counter to %d\n", thread_id, local_count); + // BREAKPOINT_MARKER: worker_end +} + +void* thread_func(void* arg) { + int thread_id = *(int*)arg; + + // BREAKPOINT_MARKER: thread_entry (BEFORE barrier - do NOT break here) + // Breaking here causes deadlock: debugger stops this thread while other threads + // wait for all NUM_THREADS+1 threads (including stopped one) to reach barrier + portable_barrier_wait(&barrier); + + // BREAKPOINT_MARKER: after_barrier (SAFE to break here - all threads synchronized) + worker_body(thread_id); + return NULL; +} + +int main(int argc, char *argv[]) { + pthread_t threads[NUM_THREADS]; + int thread_ids[NUM_THREADS]; + + // Initialize barrier for main thread + worker threads + if (portable_barrier_init(&barrier, NUM_THREADS + 1) != 0) { + fprintf(stderr, "Failed to initialize barrier\n"); + return 1; + } + + // BREAKPOINT_MARKER: main_start + printf("Starting %d worker threads\n", NUM_THREADS); + + // Create worker threads + for (int i = 0; i < NUM_THREADS; i++) { + thread_ids[i] = i; + if (pthread_create(&threads[i], NULL, thread_func, &thread_ids[i]) != 0) { + fprintf(stderr, "Failed to create thread %d\n", i); + return 1; + } + } + + // BREAKPOINT_MARKER: main_wait + portable_barrier_wait(&barrier); + + // Join all threads + for (int i = 0; i < NUM_THREADS; i++) { + pthread_join(threads[i], NULL); + } + + printf("Final counter value: %d\n", shared_counter); + portable_barrier_destroy(&barrier); + return 0; +} diff --git a/tests/fixtures/threaded.go b/tests/fixtures/threaded.go new file mode 100644 index 0000000..d34ec06 --- /dev/null +++ b/tests/fixtures/threaded.go @@ -0,0 +1,56 @@ +// Multithreaded test program for debugger integration tests +package main + +import ( + "fmt" + "sync" +) + +const numWorkers = 2 + +var sharedCounter int +var counterMutex sync.Mutex + +func worker(id int, start chan bool, done *sync.WaitGroup) { + defer done.Done() + + // BREAKPOINT_MARKER: thread_entry + <-start + + // BREAKPOINT_MARKER: worker_start + counterMutex.Lock() + sharedCounter++ + localCount := sharedCounter + counterMutex.Unlock() + + fmt.Printf("Worker %d incremented counter to %d\n", id, localCount) + + // BREAKPOINT_MARKER: worker_end +} + +func main() { + // BREAKPOINT_MARKER: main_start + fmt.Printf("Starting %d workers\n", numWorkers) + + // Go lacks pthread_barrier equivalent in stdlib; buffered channel provides + // deterministic start ordering without requiring all goroutines to synchronize. + // Workers proceed independently after receiving start signal (differs from C + // barrier which requires all threads to reach barrier before any proceed). + startChan := make(chan bool, numWorkers) + var wg sync.WaitGroup + + // Spawn workers + for i := 0; i < numWorkers; i++ { + wg.Add(1) + go worker(i, startChan, &wg) + } + + // BREAKPOINT_MARKER: main_wait + // Signal all workers to start (deterministic execution) + for i := 0; i < numWorkers; i++ { + startChan <- true + } + + wg.Wait() + fmt.Printf("Final counter value: %d\n", sharedCounter) +} diff --git a/tests/fixtures/tsconfig.json b/tests/fixtures/tsconfig.json new file mode 100644 index 0000000..f51a6a3 --- /dev/null +++ b/tests/fixtures/tsconfig.json @@ -0,0 +1,15 @@ +{ + "compilerOptions": { + "target": "ES2020", + "module": "commonjs", + "outDir": "./dist", + "rootDir": ".", + "sourceMap": true, + "strict": true, + "esModuleInterop": true, + "skipLibCheck": true, + "forceConsistentCasingInFileNames": true + }, + "include": ["*.ts"], + "exclude": ["node_modules", "dist"] +} diff --git a/tests/integration.rs b/tests/integration.rs index 76c8663..fa18908 100644 --- a/tests/integration.rs +++ b/tests/integration.rs @@ -144,6 +144,52 @@ impl TestContext { self.create_config_with_args(adapter_name, adapter_path, &[]); } + /// Create a config file for a TCP adapter + fn create_config_with_tcp( + &self, + adapter_name: &str, + adapter_path: &str, + args: &[&str], + spawn_style: &str, + ) { + let args_str = args.iter() + .map(|a| format!("\"{}\"", a)) + .collect::>() + .join(", "); + let config_content = format!( + r#" +[adapters.{adapter_name}] +path = "{adapter_path}" +args = [{args_str}] +transport = "tcp" +spawn_style = "{spawn_style}" + +[defaults] +adapter = "{adapter_name}" + +[timeouts] +dap_initialize_secs = 10 +dap_request_secs = 30 +await_default_secs = 60 + +[daemon] +idle_timeout_minutes = 5 + +[output] +max_events = 1000 +max_bytes_mb = 1 +"#, + adapter_name = adapter_name, + adapter_path = adapter_path, + args_str = args_str, + spawn_style = spawn_style, + ); + + let config_path = self.config_dir.join("debugger-cli").join("config.toml"); + fs::create_dir_all(config_path.parent().unwrap()).expect("Failed to create config dir"); + fs::write(&config_path, config_content).expect("Failed to write config"); + } + /// Create a config file for the test with custom args fn create_config_with_args(&self, adapter_name: &str, adapter_path: &str, args: &[&str]) { let args_str = args.iter() @@ -335,6 +381,20 @@ fn cuda_gdb_available() -> Option { which::which("cuda-gdb").ok() } +/// Checks if js-debug is available for testing +fn js_debug_available() -> Option { + let adapter_dir = debugger::setup::installer::adapters_dir().join("js-debug"); + let dap_path = adapter_dir.join("node_modules/js-debug/src/dapDebugServer.js"); + if dap_path.exists() { + return Some(dap_path); + } + let dap_path = adapter_dir.join("node_modules/js-debug/dist/src/dapDebugServer.js"); + if dap_path.exists() { + return Some(dap_path); + } + None +} + // ============== Tests ============== #[test] @@ -834,3 +894,284 @@ fn test_config_loading() { "Expected failure for nonexistent adapter" ); } + +#[test] +#[ignore = "requires js-debug"] +fn test_basic_debugging_workflow_js() { + let js_debug_path = match js_debug_available() { + Some(path) => path, + None => { + eprintln!("Skipping test: js-debug not available"); + eprintln!("Install with: debugger install js-debug"); + return; + } + }; + + let node_path = match which::which("node") { + Ok(path) => path, + Err(_) => { + eprintln!("Skipping test: Node.js not available"); + return; + } + }; + + let ctx = TestContext::new("basic_workflow_js"); + ctx.create_config_with_tcp( + "js-debug", + node_path.to_str().unwrap(), + &[js_debug_path.to_str().unwrap()], + "tcp-port-arg", + ); + + let manifest_dir = env!("CARGO_MANIFEST_DIR"); + let js_fixture = PathBuf::from(manifest_dir) + .join("tests") + .join("fixtures") + .join("simple.js"); + + let markers = ctx.find_breakpoint_markers(&js_fixture); + let main_start_line = markers.get("main_start").expect("Missing main_start marker"); + + ctx.cleanup_daemon(); + + let output = ctx.run_debugger_ok(&[ + "start", + js_fixture.to_str().unwrap(), + "--stop-on-entry", + ]); + assert!(output.contains("Started debugging") || output.contains("Stopped")); + + let bp_location = format!("simple.js:{}", main_start_line); + let output = ctx.run_debugger_ok(&["break", &bp_location]); + assert!(output.contains("Breakpoint") || output.contains("breakpoint")); + + let output = ctx.run_debugger_ok(&["continue"]); + assert!(output.contains("Continuing") || output.contains("running")); + + let output = ctx.run_debugger_ok(&["await", "--timeout", "30"]); + assert!( + output.contains("Stopped") || output.contains("breakpoint"), + "Expected stop at breakpoint: {}", + output + ); + + let output = ctx.run_debugger_ok(&["backtrace"]); + assert!(output.contains("main") || output.contains("#0")); + + let output = ctx.run_debugger_ok(&["locals"]); + assert!( + output.contains("x") || output.contains("Local"), + "Expected locals output: {}", + output + ); + + let _ = ctx.run_debugger(&["continue"]); + let output = ctx.run_debugger(&["await", "--timeout", "10"]); + assert!( + output.stdout.contains("exited") || output.stdout.contains("terminated") || + output.stderr.contains("exited") || output.stderr.contains("terminated") || + output.stdout.contains("stopped"), + "Expected program to finish: {:?}", + output + ); + + let _ = ctx.run_debugger(&["stop"]); +} + +#[test] +#[ignore = "requires js-debug"] +fn test_basic_debugging_workflow_ts() { + let js_debug_path = match js_debug_available() { + Some(path) => path, + None => { + eprintln!("Skipping test: js-debug not available"); + eprintln!("Install with: debugger install js-debug"); + return; + } + }; + + let node_path = match which::which("node") { + Ok(path) => path, + Err(_) => { + eprintln!("Skipping test: Node.js not available"); + return; + } + }; + + let ctx = TestContext::new("basic_workflow_ts"); + ctx.create_config_with_tcp( + "js-debug", + node_path.to_str().unwrap(), + &[js_debug_path.to_str().unwrap()], + "tcp-port-arg", + ); + + let manifest_dir = env!("CARGO_MANIFEST_DIR"); + let ts_fixture = PathBuf::from(manifest_dir) + .join("tests") + .join("fixtures") + .join("dist") + .join("simple.js"); + + if !ts_fixture.exists() { + eprintln!("Skipping test: TypeScript fixture not compiled"); + eprintln!("Run: cd tests/fixtures && npx tsc simple.ts --outDir dist --sourceMap"); + return; + } + + let markers = ctx.find_breakpoint_markers(&PathBuf::from(manifest_dir) + .join("tests") + .join("fixtures") + .join("simple.ts")); + let main_start_line = markers.get("main_start").expect("Missing main_start marker"); + + ctx.cleanup_daemon(); + + let output = ctx.run_debugger_ok(&[ + "start", + ts_fixture.to_str().unwrap(), + "--stop-on-entry", + ]); + assert!(output.contains("Started debugging") || output.contains("Stopped")); + + let bp_location = format!("simple.ts:{}", main_start_line); + let output = ctx.run_debugger_ok(&["break", &bp_location]); + assert!(output.contains("Breakpoint") || output.contains("breakpoint")); + + let output = ctx.run_debugger_ok(&["continue"]); + assert!(output.contains("Continuing") || output.contains("running")); + + let output = ctx.run_debugger_ok(&["await", "--timeout", "30"]); + assert!( + output.contains("Stopped") || output.contains("breakpoint"), + "Expected stop at breakpoint: {}", + output + ); + + let output = ctx.run_debugger_ok(&["backtrace"]); + assert!(output.contains("main") || output.contains("#0")); + + let _ = ctx.run_debugger(&["stop"]); +} + +#[test] +#[ignore = "requires js-debug"] +fn test_stepping_js() { + let js_debug_path = match js_debug_available() { + Some(path) => path, + None => { + eprintln!("Skipping test: js-debug not available"); + return; + } + }; + + let node_path = match which::which("node") { + Ok(path) => path, + Err(_) => { + eprintln!("Skipping test: Node.js not available"); + return; + } + }; + + let ctx = TestContext::new("stepping_js"); + ctx.create_config_with_tcp( + "js-debug", + node_path.to_str().unwrap(), + &[js_debug_path.to_str().unwrap()], + "tcp-port-arg", + ); + + let manifest_dir = env!("CARGO_MANIFEST_DIR"); + let js_fixture = PathBuf::from(manifest_dir) + .join("tests") + .join("fixtures") + .join("simple.js"); + + let markers = ctx.find_breakpoint_markers(&js_fixture); + let before_add_line = markers.get("before_add").expect("Missing before_add marker"); + + ctx.cleanup_daemon(); + + ctx.run_debugger_ok(&["start", js_fixture.to_str().unwrap(), "--stop-on-entry"]); + + let bp_location = format!("simple.js:{}", before_add_line); + ctx.run_debugger_ok(&["break", &bp_location]); + ctx.run_debugger_ok(&["continue"]); + let output = ctx.run_debugger_ok(&["await", "--timeout", "30"]); + assert!(output.contains("Stopped") || output.contains("breakpoint")); + + ctx.run_debugger_ok(&["step"]); + let _output = ctx.run_debugger_ok(&["await", "--timeout", "10"]); + + let output = ctx.run_debugger_ok(&["backtrace"]); + assert!( + output.contains("add") || output.contains("simple.js"), + "Expected to be in add(): {}", + output + ); + + ctx.run_debugger_ok(&["finish"]); + let _ = ctx.run_debugger(&["await", "--timeout", "10"]); + + let output = ctx.run_debugger_ok(&["backtrace"]); + assert!(output.contains("main"), "Expected to be in main(): {}", output); + + ctx.run_debugger(&["stop"]); +} + +#[test] +#[ignore = "requires js-debug"] +fn test_expression_evaluation_js() { + let js_debug_path = match js_debug_available() { + Some(path) => path, + None => { + eprintln!("Skipping test: js-debug not available"); + return; + } + }; + + let node_path = match which::which("node") { + Ok(path) => path, + Err(_) => { + eprintln!("Skipping test: Node.js not available"); + return; + } + }; + + let ctx = TestContext::new("eval_js"); + ctx.create_config_with_tcp( + "js-debug", + node_path.to_str().unwrap(), + &[js_debug_path.to_str().unwrap()], + "tcp-port-arg", + ); + + let manifest_dir = env!("CARGO_MANIFEST_DIR"); + let js_fixture = PathBuf::from(manifest_dir) + .join("tests") + .join("fixtures") + .join("simple.js"); + + let markers = ctx.find_breakpoint_markers(&js_fixture); + let before_add_line = markers.get("before_add").expect("Missing before_add marker"); + + ctx.cleanup_daemon(); + + ctx.run_debugger_ok(&["start", js_fixture.to_str().unwrap(), "--stop-on-entry"]); + + let bp_location = format!("simple.js:{}", before_add_line); + ctx.run_debugger_ok(&["break", &bp_location]); + ctx.run_debugger_ok(&["continue"]); + ctx.run_debugger_ok(&["await", "--timeout", "30"]); + + let output = ctx.run_debugger_ok(&["print", "x"]); + assert!(output.contains("10") || output.contains("x ="), "Expected x=10: {}", output); + + let output = ctx.run_debugger_ok(&["print", "y"]); + assert!(output.contains("20") || output.contains("y ="), "Expected y=20: {}", output); + + let output = ctx.run_debugger_ok(&["print", "x + y"]); + assert!(output.contains("30"), "Expected x+y=30: {}", output); + + ctx.run_debugger(&["stop"]); +} diff --git a/tests/scenarios/README.md b/tests/scenarios/README.md new file mode 100644 index 0000000..55bf12c --- /dev/null +++ b/tests/scenarios/README.md @@ -0,0 +1,114 @@ +# Test Scenarios + +Test scenarios define end-to-end debugging workflows using a YAML DSL. Each scenario compiles a fixture program, starts a debug session, executes commands, and verifies behavior. + +## Naming Convention + +Scenarios follow the pattern `_.yml`: + +- `hello_world_c.yml` - Basic C program debugging +- `conditional_breakpoint_go.yml` - Conditional breakpoints in Go +- `thread_list_c.yml` - Thread listing with C pthreads +- `stack_navigation_js.yml` - Frame navigation in JavaScript + +## YAML DSL Format + +### Structure + +```yaml +name: "Human-readable test name" +description: "What this test verifies" + +setup: + - shell: "gcc -g tests/fixtures/simple.c -o tests/fixtures/test_simple_c" + +target: + program: "tests/fixtures/test_simple_c" + args: [] + adapter: "lldb" # Optional: defaults to lldb-dap + stop_on_entry: true + +steps: + - action: command + command: "break main" + expect: + success: true + + - action: await + timeout: 10 + expect: + reason: "breakpoint" + + - action: inspect_locals + asserts: + - name: "x" + value_contains: "10" +``` + +### Step Types + +| Step Type | Purpose | Fields | +|-----------|---------|--------| +| `command` | Execute debugger command | `command`, `expect` | +| `await` | Wait for stop event | `timeout`, `expect` | +| `inspect_locals` | Verify local variables | `asserts` | +| `inspect_stack` | Verify stack frames | `asserts` | +| `check_output` | Verify program stdout/stderr | `contains`, `equals` | +| `evaluate` | Evaluate expression | `expression`, `expect` | + +### Adapter Field + +Adapter names map to debug backends: + +- `lldb` or omitted - lldb-dap (C, C++, Rust) +- `go` - Delve (Go) +- `python` - debugpy (Python) +- `js-debug` - js-debug (JavaScript, TypeScript) +- `gdb` - GDB 14.1+ or cdt-gdb-adapter (C, C++) + +## Running Tests Locally + +```bash +# Run single scenario +debugger test tests/scenarios/hello_world_c.yml + +# Verbose output +debugger test tests/scenarios/conditional_breakpoint_go.yml --verbose + +# Run with specific adapter +debugger test tests/scenarios/hello_world_c.yml --adapter gdb +``` + +## Running Tests in CI + +GitHub Actions runs all scenarios across adapter/OS matrix: + +- **LLDB**: Ubuntu + macOS (C, Rust) +- **GDB**: Ubuntu + macOS (C) +- **Delve**: Ubuntu + macOS (Go) +- **debugpy**: Ubuntu + macOS (Python) +- **js-debug**: Ubuntu + macOS (JavaScript, TypeScript) + +Tests run on every push and PR via GitHub Actions. The workflow includes parallel jobs for each adapter (LLDB, GDB, Delve, debugpy, js-debug) on Ubuntu and macOS, with graceful fallback for macOS GDB installation failures. + +## Adapter Feature Compatibility + +Not all features work with all adapters. Tests are created only for compatible combinations. + +| Feature | LLDB | GDB | Delve | debugpy | js-debug | +|---------|------|-----|-------|---------|----------| +| Conditional breakpoints | ✅ | ✅ | ✅ | ✅ | ✅ | +| Hit count breakpoints | ✅ | ✅ | ✅ | ❌ | ❌ | +| Thread listing | ✅ | ✅ | ✅ (goroutines) | ✅ | ✅ | +| Stack navigation | ✅ | ✅ | ✅ | ✅ | ✅ | +| Output capture | ✅ | ✅ | ✅ | ✅ | ✅ | +| Pause | ✅ | ✅ | ✅ | ✅ | ✅ | +| Restart | ✅ | ✅ | ✅ | ✅ | ✅ | + +## Writing New Scenarios + +1. Use existing fixtures from `tests/fixtures/` when possible +2. All `await` steps MUST specify `timeout: 10` (or other value) +3. Scenarios MUST end with program termination (`exited` or `terminated` reason) +4. Use BREAKPOINT_MARKERs from fixtures to identify semantic locations +5. For language-specific scenarios, set `adapter:` in target config diff --git a/tests/scenarios/attach_process_c.yml b/tests/scenarios/attach_process_c.yml new file mode 100644 index 0000000..c82e4ba --- /dev/null +++ b/tests/scenarios/attach_process_c.yml @@ -0,0 +1,40 @@ +# Attach Mode Test +# Tests attaching to a running process + +name: "C Attach Process Test" +description: "Verifies debugger can attach to a running process and set breakpoints" + +setup: + # Compile attach target + - shell: "gcc -g tests/fixtures/attach_target.c -o tests/fixtures/test_attach_c" + # Start target in background and capture PID to file + # Use timeout to ensure process terminates even if test fails + - shell: "timeout 30 tests/fixtures/test_attach_c > /tmp/attach_output.txt 2>&1 & echo $! > /tmp/attach_pid.txt" + # Wait for target to start and print PID + - shell: "sleep 1" + +target: + # Attach mode uses pid_file field to read PID from setup step + program: "tests/fixtures/test_attach_c" + mode: "attach" + pid_file: "/tmp/attach_pid.txt" + adapter: "lldb" + +steps: + # Set breakpoint in running process + - action: command + command: "break tests/fixtures/attach_target.c:15" + expect: + success: true + + # Continue execution + - action: command + command: "continue" + + # Wait for breakpoint hit + - action: await + timeout: 10 + expect: + reason: "breakpoint" + file: "attach_target.c" + line: 15 diff --git a/tests/scenarios/breakpoint_management_c.yml b/tests/scenarios/breakpoint_management_c.yml new file mode 100644 index 0000000..99c1d43 --- /dev/null +++ b/tests/scenarios/breakpoint_management_c.yml @@ -0,0 +1,55 @@ +# Breakpoint Management Test (C) +# Tests adding, listing, and removing breakpoints + +name: "C Breakpoint Management Test" +description: "Verifies breakpoint list and remove commands work correctly" + +setup: + - shell: "gcc -g tests/fixtures/simple.c -o tests/fixtures/test_simple_c" + +target: + program: "tests/fixtures/test_simple_c" + args: [] + stop_on_entry: true + +steps: + - action: command + command: "break main" + expect: + success: true + + - action: command + command: "break add" + expect: + success: true + + - action: command + command: "breakpoint list" + expect: + output_contains: "main" + + - action: command + command: "breakpoint list" + expect: + output_contains: "add" + + - action: command + command: "breakpoint remove 1" + expect: + success: true + + - action: command + command: "continue" + + - action: await + timeout: 10 + expect: + reason: "breakpoint" + + - action: command + command: "continue" + + - action: await + timeout: 10 + expect: + reason: "exited" diff --git a/tests/scenarios/conditional_breakpoint_c.yml b/tests/scenarios/conditional_breakpoint_c.yml new file mode 100644 index 0000000..0e8f041 --- /dev/null +++ b/tests/scenarios/conditional_breakpoint_c.yml @@ -0,0 +1,50 @@ +# Conditional Breakpoint Test (C) +# Tests conditional breakpoint functionality + +name: "C Conditional Breakpoint Test" +description: "Verifies conditional breakpoints stop only when condition is true" + +setup: + - shell: "gcc -g tests/fixtures/simple.c -o tests/fixtures/test_simple_c" + +target: + program: "../fixtures/test_simple_c" + args: [] + stop_on_entry: true + +steps: + # Set conditional breakpoint: only stop when n == 3 + # factorial(5) calls recursively: n=5,4,3,2,1 + # Should only stop once when n=3 + - action: command + command: "break factorial --condition \"n == 3\"" + expect: + success: true + + - action: command + command: "continue" + + - action: await + timeout: 10 + expect: + reason: "breakpoint" + + # Verify we stopped at n=3 (not n=5 or n=4) + - action: inspect_locals + asserts: + - name: "n" + value_contains: "3" + + # Remove breakpoint and continue to exit + - action: command + command: "breakpoint remove all" + expect: + success: true + + - action: command + command: "continue" + + - action: await + timeout: 10 + expect: + reason: "exited" diff --git a/tests/scenarios/conditional_breakpoint_go.yml b/tests/scenarios/conditional_breakpoint_go.yml new file mode 100644 index 0000000..70ac838 --- /dev/null +++ b/tests/scenarios/conditional_breakpoint_go.yml @@ -0,0 +1,41 @@ +# Conditional Breakpoint Test (Go) +# Tests conditional breakpoint functionality + +name: "Go Conditional Breakpoint Test" +description: "Verifies conditional breakpoints stop only when condition is true" + +setup: + - shell: "go build -gcflags='all=-N -l' -o tests/fixtures/test_simple_go tests/fixtures/simple.go" + +target: + program: "../fixtures/test_simple_go" + args: [] + adapter: "go" + stop_on_entry: true + +steps: + - action: command + command: "break main.factorial --condition \"n > 3\"" + expect: + success: true + + - action: command + command: "continue" + + - action: await + timeout: 10 + expect: + reason: "breakpoint" + + - action: inspect_locals + asserts: + - name: "n" + value_contains: "4" + + - action: command + command: "continue" + + - action: await + timeout: 10 + expect: + reason: "terminated" diff --git a/tests/scenarios/conditional_breakpoint_js.yml b/tests/scenarios/conditional_breakpoint_js.yml new file mode 100644 index 0000000..b23dd65 --- /dev/null +++ b/tests/scenarios/conditional_breakpoint_js.yml @@ -0,0 +1,38 @@ +# Conditional Breakpoint Test (JavaScript) +# Tests conditional breakpoint functionality + +name: "JavaScript Conditional Breakpoint Test" +description: "Verifies conditional breakpoints stop only when condition is true" + +target: + program: "tests/fixtures/simple.js" + args: [] + adapter: "js-debug" + stop_on_entry: true + +steps: + - action: command + command: "break factorial --condition \"n > 3\"" + expect: + success: true + + - action: command + command: "continue" + + - action: await + timeout: 10 + expect: + reason: "breakpoint" + + - action: inspect_locals + asserts: + - name: "n" + value_contains: "4" + + - action: command + command: "continue" + + - action: await + timeout: 10 + expect: + reason: "terminated" diff --git a/tests/scenarios/conditional_breakpoint_py.yml b/tests/scenarios/conditional_breakpoint_py.yml new file mode 100644 index 0000000..3e2bd69 --- /dev/null +++ b/tests/scenarios/conditional_breakpoint_py.yml @@ -0,0 +1,38 @@ +# Conditional Breakpoint Test (Python) +# Tests conditional breakpoint functionality + +name: "Python Conditional Breakpoint Test" +description: "Verifies conditional breakpoints stop only when condition is true" + +target: + program: "tests/fixtures/simple.py" + args: [] + adapter: "python" + stop_on_entry: true + +steps: + - action: command + command: "break factorial --condition \"n > 3\"" + expect: + success: true + + - action: command + command: "continue" + + - action: await + timeout: 10 + expect: + reason: "breakpoint" + + - action: inspect_locals + asserts: + - name: "n" + value_contains: "4" + + - action: command + command: "continue" + + - action: await + timeout: 10 + expect: + reason: "exited" diff --git a/tests/scenarios/error_bad_expression_c.yml b/tests/scenarios/error_bad_expression_c.yml new file mode 100644 index 0000000..e34e6c8 --- /dev/null +++ b/tests/scenarios/error_bad_expression_c.yml @@ -0,0 +1,45 @@ +# Bad Expression Error Test +# Tests error handling for syntax error in expression evaluation + +name: "C Bad Expression Test" +description: "Verifies error when evaluating malformed expression" + +setup: + - shell: "gcc -g tests/fixtures/simple.c -o tests/fixtures/test_simple_c" + +target: + program: "tests/fixtures/test_simple_c" + adapter: "lldb" + stop_on_entry: true + +steps: + # Set breakpoint at main start + - action: command + command: "break simple.c:19" + expect: + success: true + + # Continue to breakpoint + - action: command + command: "continue" + + - action: await + timeout: 10 + expect: + reason: "breakpoint" + + # Attempt to evaluate expression with syntax error + # expect.success: false allows the evaluation to fail gracefully + - action: evaluate + expression: "1 + + 2" + expect: + success: false + result_contains: "error" + + - action: command + command: "continue" + + - action: await + timeout: 10 + expect: + reason: "exited" diff --git a/tests/scenarios/error_invalid_breakpoint_c.yml b/tests/scenarios/error_invalid_breakpoint_c.yml new file mode 100644 index 0000000..379b595 --- /dev/null +++ b/tests/scenarios/error_invalid_breakpoint_c.yml @@ -0,0 +1,30 @@ +# Invalid Breakpoint Error Test +# Tests error handling for breakpoint at non-existent location + +name: "C Invalid Breakpoint Test" +description: "Verifies error when setting breakpoint at invalid location" + +setup: + - shell: "gcc -g tests/fixtures/simple.c -o tests/fixtures/test_simple_c" + +target: + program: "tests/fixtures/test_simple_c" + adapter: "lldb" + stop_on_entry: true + +steps: + # Attempt to set breakpoint at non-existent file and line + # expect.success: false validates command failure + - action: command + command: "break nonexistent.c:999" + expect: + success: false + + # Continue to program exit + - action: command + command: "continue" + + - action: await + timeout: 10 + expect: + reason: "exited" diff --git a/tests/scenarios/error_undefined_variable_c.yml b/tests/scenarios/error_undefined_variable_c.yml new file mode 100644 index 0000000..25c3ef7 --- /dev/null +++ b/tests/scenarios/error_undefined_variable_c.yml @@ -0,0 +1,45 @@ +# Undefined Variable Error Test +# Tests error handling when evaluating undefined variable + +name: "C Undefined Variable Test" +description: "Verifies error when evaluating undefined variable" + +setup: + - shell: "gcc -g tests/fixtures/simple.c -o tests/fixtures/test_simple_c" + +target: + program: "tests/fixtures/test_simple_c" + adapter: "lldb" + stop_on_entry: true + +steps: + # Set breakpoint at main start + - action: command + command: "break simple.c:19" + expect: + success: true + + # Continue to breakpoint + - action: command + command: "continue" + + - action: await + timeout: 10 + expect: + reason: "breakpoint" + + # Attempt to evaluate undefined variable + # expect.success: false allows the evaluation to fail gracefully + - action: evaluate + expression: "undefined_xyz" + expect: + success: false + result_contains: "error" + + - action: command + command: "continue" + + - action: await + timeout: 10 + expect: + reason: "exited" diff --git a/tests/scenarios/expression_eval_js.yml b/tests/scenarios/expression_eval_js.yml new file mode 100644 index 0000000..efa56cb --- /dev/null +++ b/tests/scenarios/expression_eval_js.yml @@ -0,0 +1,80 @@ +# JavaScript Expression Evaluation Test +# Tests evaluate command for inspecting and computing expressions + +name: "JavaScript Expression Evaluation Test" +description: "Verifies expression evaluation in debug context" + +# Debug target configuration +target: + program: "tests/fixtures/simple.js" + args: [] + adapter: "js-debug" + stop_on_entry: true + +# Test steps +steps: + # 1. Set breakpoint after variables are initialized (line 28 is after arr) + - action: command + command: "break simple.js:30" + expect: + success: true + + # 2. Continue to breakpoint + - action: command + command: "continue" + + - action: await + timeout: 15 + expect: + reason: "breakpoint" + + # 3. Evaluate simple variable + - action: command + command: "eval x" + expect: + success: true + output_contains: "10" + + # 4. Evaluate expression with arithmetic + - action: command + command: "eval x + y" + expect: + success: true + output_contains: "30" + + # 5. Evaluate function call expression + - action: command + command: "eval add(5, 3)" + expect: + success: true + output_contains: "8" + + # 6. Evaluate object property access + - action: command + command: "eval obj.name" + expect: + success: true + output_contains: "test" + + # 7. Evaluate array access + - action: command + command: "eval arr[1]" + expect: + success: true + output_contains: "2" + + # 8. Evaluate string method + - action: command + command: "eval message.toUpperCase()" + expect: + success: true + output_contains: "HELLO" + + # 9. Continue to exit + - action: command + command: "continue" + + - action: await + timeout: 10 + expect: + reason: "terminated" diff --git a/tests/scenarios/hello_world_js.yml b/tests/scenarios/hello_world_js.yml new file mode 100644 index 0000000..aa2fbd1 --- /dev/null +++ b/tests/scenarios/hello_world_js.yml @@ -0,0 +1,90 @@ +# JavaScript Hello World Test +# Tests JavaScript debugging with js-debug (VS Code JavaScript Debugger) + +name: "JavaScript Hello World Test" +description: "Verifies basic JavaScript debugging functionality with Node.js" + +# Debug target configuration +target: + program: "tests/e2e/hello_world.js" + args: [] + adapter: "js-debug" + stop_on_entry: true + +# Test steps +steps: + # 1. Set a breakpoint at line 5 (after x initialization) + - action: command + command: "break hello_world.js:5" + expect: + success: true + + # 2. Continue to the breakpoint + - action: command + command: "continue" + + # 3. Wait for stop at breakpoint + - action: await + timeout: 15 + expect: + reason: "breakpoint" + file: "hello_world.js" + + # 4. Check local variables (x should be initialized) + - action: inspect_locals + asserts: + - name: "x" + value_contains: "10" + + # 5. Step over to initialize y + - action: command + command: "next" + + # 6. Wait for step + - action: await + timeout: 10 + expect: + reason: "step" + + # 7. Check y is now visible + - action: inspect_locals + asserts: + - name: "x" + value_contains: "10" + - name: "y" + value_contains: "20" + + # 8. Step over to initialize sum + - action: command + command: "next" + + - action: await + timeout: 10 + expect: + reason: "step" + + # 9. Verify all variables + - action: inspect_locals + asserts: + - name: "x" + value_contains: "10" + - name: "y" + value_contains: "20" + - name: "sum" + value_contains: "30" + + # 10. Check stack trace + - action: inspect_stack + asserts: + - index: 0 + function: "main" + + # 11. Continue to exit + - action: command + command: "continue" + + # 12. Wait for program exit + - action: await + timeout: 10 + expect: + reason: "terminated" diff --git a/tests/scenarios/hello_world_ts.yml b/tests/scenarios/hello_world_ts.yml new file mode 100644 index 0000000..094c909 --- /dev/null +++ b/tests/scenarios/hello_world_ts.yml @@ -0,0 +1,71 @@ +# TypeScript Hello World Test +# Tests TypeScript debugging with js-debug and sourcemaps + +name: "TypeScript Hello World Test" +description: "Verifies TypeScript debugging with sourcemap support" + +# Setup: Compile TypeScript to JavaScript with sourcemaps +setup: + - shell: "cd tests/e2e && npx tsc --outDir dist --sourceMap hello_world.ts || true" + +# Debug target configuration +# Note: We debug the compiled JS but set breakpoints in TS source +target: + program: "tests/e2e/dist/hello_world.js" + args: [] + adapter: "js-debug" + stop_on_entry: true + +# Test steps +steps: + # 1. Set a breakpoint in the TypeScript source file + - action: command + command: "break hello_world.ts:5" + expect: + success: true + + # 2. Continue to the breakpoint + - action: command + command: "continue" + + # 3. Wait for stop at breakpoint (should map to TS source) + - action: await + timeout: 15 + expect: + reason: "breakpoint" + # With sourcemaps, we should see the TS file + file: "hello_world.ts" + + # 4. Check local variables + - action: inspect_locals + asserts: + - name: "x" + value_contains: "10" + + # 5. Step over + - action: command + command: "next" + + # 6. Wait for step + - action: await + timeout: 10 + expect: + reason: "step" + + # 7. Verify variables after stepping + - action: inspect_locals + asserts: + - name: "x" + value_contains: "10" + - name: "y" + value_contains: "20" + + # 8. Continue to exit + - action: command + command: "continue" + + # 9. Wait for program exit + - action: await + timeout: 10 + expect: + reason: "terminated" diff --git a/tests/scenarios/hitcount_breakpoint_c.yml b/tests/scenarios/hitcount_breakpoint_c.yml new file mode 100644 index 0000000..2299167 --- /dev/null +++ b/tests/scenarios/hitcount_breakpoint_c.yml @@ -0,0 +1,40 @@ +# Hit Count Breakpoint Test (C) +# Tests hit count breakpoint functionality + +name: "C Hit Count Breakpoint Test" +description: "Verifies hit count breakpoints stop after specified number of hits" + +setup: + - shell: "gcc -g tests/fixtures/simple.c -o tests/fixtures/test_simple_c" + +target: + program: "tests/fixtures/test_simple_c" + args: [] + stop_on_entry: true + +steps: + - action: command + command: "break factorial --hit-count 3" + expect: + success: true + + - action: command + command: "continue" + + - action: await + timeout: 10 + expect: + reason: "breakpoint" + + - action: inspect_stack + asserts: + - index: 0 + function: "factorial" + + - action: command + command: "continue" + + - action: await + timeout: 10 + expect: + reason: "exited" diff --git a/tests/scenarios/hitcount_breakpoint_go.yml b/tests/scenarios/hitcount_breakpoint_go.yml new file mode 100644 index 0000000..1a6b746 --- /dev/null +++ b/tests/scenarios/hitcount_breakpoint_go.yml @@ -0,0 +1,41 @@ +# Hit Count Breakpoint Test (Go) +# Tests hit count breakpoint functionality + +name: "Go Hit Count Breakpoint Test" +description: "Verifies hit count breakpoints stop after specified number of hits" + +setup: + - shell: "go build -gcflags='all=-N -l' -o tests/fixtures/test_simple_go tests/fixtures/simple.go" + +target: + program: "../fixtures/test_simple_go" + args: [] + adapter: "go" + stop_on_entry: true + +steps: + - action: command + command: "break main.factorial --hit-count 3" + expect: + success: true + + - action: command + command: "continue" + + - action: await + timeout: 10 + expect: + reason: "breakpoint" + + - action: inspect_stack + asserts: + - index: 0 + function: "main.factorial" + + - action: command + command: "continue" + + - action: await + timeout: 10 + expect: + reason: "terminated" diff --git a/tests/scenarios/multi_source_c.yml b/tests/scenarios/multi_source_c.yml new file mode 100644 index 0000000..dc78fa1 --- /dev/null +++ b/tests/scenarios/multi_source_c.yml @@ -0,0 +1,60 @@ +# Multi-Source C Test +# Tests debugging across multiple source files + +name: "C Multi-Source Project Test" +description: "Verifies debugging works with multi-file C projects" + +setup: + # Compile multi-source project: all sources in single command + - shell: "gcc -g tests/fixtures/multi_source/main.c tests/fixtures/multi_source/utils.c -o tests/fixtures/multi_source/test_multi" + +target: + program: "../fixtures/multi_source/test_multi" + stop_on_entry: true + +steps: + # Set breakpoint in utils.c (separate file from main) + # Use function name since file paths can be tricky with relative resolution + - action: command + command: "break helper_add" + expect: + success: true + + # Continue to breakpoint in helper function + - action: command + command: "continue" + + # Should stop in utils.c at helper_add + - action: await + timeout: 10 + expect: + reason: "breakpoint" + file: "utils.c" + line: 6 + + # Inspect locals in utils.c context + - action: inspect_locals + asserts: + - name: "a" + value: "5" + - name: "b" + value: "10" + + # Inspect stack - should show main.c -> utils.c call chain + - action: inspect_stack + asserts: + - index: 0 + function: "helper_add" + file: "utils.c" + - index: 1 + function: "main" + file: "main.c" + + # Continue to exit + - action: command + command: "continue" + + - action: await + timeout: 10 + expect: + reason: "exited" diff --git a/tests/scenarios/output_capture_c.yml b/tests/scenarios/output_capture_c.yml new file mode 100644 index 0000000..d8c7e19 --- /dev/null +++ b/tests/scenarios/output_capture_c.yml @@ -0,0 +1,47 @@ +# Output Capture Test (C) +# Tests output capture functionality + +name: "C Output Capture Test" +description: "Verifies output command captures stdout correctly" + +setup: + - shell: "gcc -g tests/fixtures/simple.c -o tests/fixtures/test_simple_c" + +target: + program: "tests/fixtures/test_simple_c" + args: [] + stop_on_entry: true + +steps: + - action: command + command: "break main" + expect: + success: true + + - action: command + command: "continue" + + - action: await + timeout: 10 + expect: + reason: "breakpoint" + + - action: command + command: "continue" + + - action: await + timeout: 10 + expect: + reason: "exited" + + - action: check_output + contains: "Sum:" + + - action: check_output + contains: "Factorial:" + + # Test --tail flag returns only last line + - action: command + command: "output --tail 1" + expect: + output_contains: "Factorial:" diff --git a/tests/scenarios/output_capture_go.yml b/tests/scenarios/output_capture_go.yml new file mode 100644 index 0000000..4e01c18 --- /dev/null +++ b/tests/scenarios/output_capture_go.yml @@ -0,0 +1,48 @@ +# Output Capture Test (Go) +# Tests output capture functionality + +name: "Go Output Capture Test" +description: "Verifies output command captures stdout correctly" + +setup: + - shell: "go build -gcflags='all=-N -l' -o tests/fixtures/test_simple_go tests/fixtures/simple.go" + +target: + program: "../fixtures/test_simple_go" + args: [] + adapter: "go" + stop_on_entry: true + +steps: + - action: command + command: "break main.main" + expect: + success: true + + - action: command + command: "continue" + + - action: await + timeout: 10 + expect: + reason: "breakpoint" + + - action: command + command: "continue" + + - action: await + timeout: 10 + expect: + reason: "terminated" + + - action: check_output + contains: "Sum:" + + - action: check_output + contains: "Factorial:" + + # Test --tail flag returns only last line + - action: command + command: "output --tail 1" + expect: + output_contains: "Factorial:" diff --git a/tests/scenarios/output_capture_js.yml b/tests/scenarios/output_capture_js.yml new file mode 100644 index 0000000..febfef4 --- /dev/null +++ b/tests/scenarios/output_capture_js.yml @@ -0,0 +1,45 @@ +# Output Capture Test (JavaScript) +# Tests output capture functionality + +name: "JavaScript Output Capture Test" +description: "Verifies output command captures stdout correctly" + +target: + program: "tests/fixtures/simple.js" + args: [] + adapter: "js-debug" + stop_on_entry: true + +steps: + - action: command + command: "break main" + expect: + success: true + + - action: command + command: "continue" + + - action: await + timeout: 10 + expect: + reason: "breakpoint" + + - action: command + command: "continue" + + - action: await + timeout: 10 + expect: + reason: "terminated" + + - action: check_output + contains: "Sum:" + + - action: check_output + contains: "Factorial:" + + # Test --tail flag returns only last line + - action: command + command: "output --tail 1" + expect: + output_contains: "Factorial:" diff --git a/tests/scenarios/output_capture_py.yml b/tests/scenarios/output_capture_py.yml new file mode 100644 index 0000000..c8aa36a --- /dev/null +++ b/tests/scenarios/output_capture_py.yml @@ -0,0 +1,45 @@ +# Output Capture Test (Python) +# Tests output capture functionality + +name: "Python Output Capture Test" +description: "Verifies output command captures stdout correctly" + +target: + program: "tests/fixtures/simple.py" + args: [] + adapter: "python" + stop_on_entry: true + +steps: + - action: command + command: "break main" + expect: + success: true + + - action: command + command: "continue" + + - action: await + timeout: 10 + expect: + reason: "breakpoint" + + - action: command + command: "continue" + + - action: await + timeout: 10 + expect: + reason: "exited" + + - action: check_output + contains: "Sum:" + + - action: check_output + contains: "Factorial:" + + # Test --tail flag returns only last line + - action: command + command: "output --tail 1" + expect: + output_contains: "Factorial:" diff --git a/tests/scenarios/pause_resume_c.yml b/tests/scenarios/pause_resume_c.yml new file mode 100644 index 0000000..1828793 --- /dev/null +++ b/tests/scenarios/pause_resume_c.yml @@ -0,0 +1,62 @@ +# Pause/Resume Test (C) +# Tests pause command to stop running program +# Uses factorial breakpoint to ensure program runs long enough for reliable pause + +name: "C Pause/Resume Test" +description: "Verifies pause command stops a running program" + +setup: + - shell: "gcc -g tests/fixtures/simple.c -o tests/fixtures/test_simple_c" + +target: + program: "tests/fixtures/test_simple_c" + args: [] + stop_on_entry: true + +steps: + # Set breakpoint in factorial to ensure we can pause during computation + - action: command + command: "break factorial" + expect: + success: true + + - action: command + command: "continue" + + # Wait for first factorial hit + - action: await + timeout: 10 + expect: + reason: "breakpoint" + + # Remove breakpoint and continue - program will run through factorial(5) + - action: command + command: "breakpoint remove --all" + + - action: command + command: "continue" + + # Immediately pause - factorial recursion should still be running + - action: command + command: "pause" + expect: + success: true + + - action: await + timeout: 5 + expect: + reason: "pause" + + # Verify we're stopped somewhere in the program + - action: command + command: "backtrace" + expect: + success: true + + - action: command + command: "continue" + + - action: await + timeout: 10 + expect: + reason: "exited" diff --git a/tests/scenarios/program_restart_c.yml b/tests/scenarios/program_restart_c.yml new file mode 100644 index 0000000..2dbc755 --- /dev/null +++ b/tests/scenarios/program_restart_c.yml @@ -0,0 +1,50 @@ +# Program Restart Test (C) +# Tests restart command functionality + +name: "C Program Restart Test" +description: "Verifies restart command restarts program from beginning" + +setup: + - shell: "gcc -g tests/fixtures/simple.c -o tests/fixtures/test_simple_c" + +target: + program: "tests/fixtures/test_simple_c" + args: [] + stop_on_entry: true + +steps: + - action: command + command: "break main" + expect: + success: true + + - action: command + command: "continue" + + - action: await + timeout: 10 + expect: + reason: "breakpoint" + + - action: command + command: "restart" + expect: + success: true + + - action: await + timeout: 10 + expect: + reason: "breakpoint" + + - action: inspect_stack + asserts: + - index: 0 + function: "main" + + - action: command + command: "continue" + + - action: await + timeout: 10 + expect: + reason: "exited" diff --git a/tests/scenarios/program_restart_go.yml b/tests/scenarios/program_restart_go.yml new file mode 100644 index 0000000..517daf2 --- /dev/null +++ b/tests/scenarios/program_restart_go.yml @@ -0,0 +1,51 @@ +# Program Restart Test (Go) +# Tests restart command functionality + +name: "Go Program Restart Test" +description: "Verifies restart command restarts program from beginning" + +setup: + - shell: "go build -gcflags='all=-N -l' -o tests/fixtures/test_simple_go tests/fixtures/simple.go" + +target: + program: "../fixtures/test_simple_go" + args: [] + adapter: "go" + stop_on_entry: true + +steps: + - action: command + command: "break main.main" + expect: + success: true + + - action: command + command: "continue" + + - action: await + timeout: 10 + expect: + reason: "breakpoint" + + - action: command + command: "restart" + expect: + success: true + + - action: await + timeout: 10 + expect: + reason: "breakpoint" + + - action: inspect_stack + asserts: + - index: 0 + function: "main.main" + + - action: command + command: "continue" + + - action: await + timeout: 10 + expect: + reason: "terminated" diff --git a/tests/scenarios/stack_navigation_c.yml b/tests/scenarios/stack_navigation_c.yml new file mode 100644 index 0000000..8cd67b7 --- /dev/null +++ b/tests/scenarios/stack_navigation_c.yml @@ -0,0 +1,83 @@ +# Stack Navigation Test (C) +# Tests frame navigation commands (frame, up, down) + +name: "C Stack Navigation Test" +description: "Verifies stack frame navigation works correctly" + +setup: + - shell: "gcc -g tests/fixtures/simple.c -o tests/fixtures/test_simple_c" + +target: + program: "../fixtures/test_simple_c" + args: [] + stop_on_entry: true + +steps: + - action: command + command: "break factorial" + expect: + success: true + + - action: command + command: "continue" + + - action: await + timeout: 10 + expect: + reason: "breakpoint" + + - action: command + command: "continue" + + - action: await + timeout: 10 + expect: + reason: "breakpoint" + + - action: command + command: "continue" + + - action: await + timeout: 10 + expect: + reason: "breakpoint" + + - action: command + command: "backtrace" + expect: + output_contains: "factorial" + + - action: command + command: "frame 1" + expect: + success: true + + - action: inspect_locals + asserts: + - name: "n" + value_contains: "4" + + - action: command + command: "up" + expect: + success: true + + - action: command + command: "down" + expect: + success: true + + # Delete the breakpoint before continuing to avoid hitting it more times + # (factorial is recursive and will hit the breakpoint 5 times total) + - action: command + command: "breakpoint remove all" + expect: + success: true + + - action: command + command: "continue" + + - action: await + timeout: 10 + expect: + reason: "exited" diff --git a/tests/scenarios/stack_navigation_go.yml b/tests/scenarios/stack_navigation_go.yml new file mode 100644 index 0000000..45b16db --- /dev/null +++ b/tests/scenarios/stack_navigation_go.yml @@ -0,0 +1,77 @@ +# Stack Navigation Test (Go) +# Tests frame navigation commands (frame, up, down) + +name: "Go Stack Navigation Test" +description: "Verifies stack frame navigation works correctly" + +setup: + - shell: "go build -gcflags='all=-N -l' -o tests/fixtures/test_simple_go tests/fixtures/simple.go" + +target: + program: "../fixtures/test_simple_go" + args: [] + adapter: "go" + stop_on_entry: true + +steps: + - action: command + command: "break main.factorial" + expect: + success: true + + - action: command + command: "continue" + + - action: await + timeout: 10 + expect: + reason: "breakpoint" + + - action: command + command: "continue" + + - action: await + timeout: 10 + expect: + reason: "breakpoint" + + - action: command + command: "continue" + + - action: await + timeout: 10 + expect: + reason: "breakpoint" + + - action: command + command: "backtrace" + expect: + output_contains: "factorial" + + - action: command + command: "frame 1" + expect: + success: true + + - action: inspect_locals + asserts: + - name: "n" + value_contains: "4" + + - action: command + command: "up" + expect: + success: true + + - action: command + command: "down" + expect: + success: true + + - action: command + command: "continue" + + - action: await + timeout: 10 + expect: + reason: "terminated" diff --git a/tests/scenarios/stack_navigation_js.yml b/tests/scenarios/stack_navigation_js.yml new file mode 100644 index 0000000..bc4a89b --- /dev/null +++ b/tests/scenarios/stack_navigation_js.yml @@ -0,0 +1,74 @@ +# Stack Navigation Test (JavaScript) +# Tests frame navigation commands (frame, up, down) + +name: "JavaScript Stack Navigation Test" +description: "Verifies stack frame navigation works correctly" + +target: + program: "tests/fixtures/simple.js" + args: [] + adapter: "js-debug" + stop_on_entry: true + +steps: + - action: command + command: "break factorial" + expect: + success: true + + - action: command + command: "continue" + + - action: await + timeout: 10 + expect: + reason: "breakpoint" + + - action: command + command: "continue" + + - action: await + timeout: 10 + expect: + reason: "breakpoint" + + - action: command + command: "continue" + + - action: await + timeout: 10 + expect: + reason: "breakpoint" + + - action: command + command: "backtrace" + expect: + output_contains: "factorial" + + - action: command + command: "frame 1" + expect: + success: true + + - action: inspect_locals + asserts: + - name: "n" + value_contains: "4" + + - action: command + command: "up" + expect: + success: true + + - action: command + command: "down" + expect: + success: true + + - action: command + command: "continue" + + - action: await + timeout: 10 + expect: + reason: "terminated" diff --git a/tests/scenarios/stack_navigation_py.yml b/tests/scenarios/stack_navigation_py.yml new file mode 100644 index 0000000..a5627c2 --- /dev/null +++ b/tests/scenarios/stack_navigation_py.yml @@ -0,0 +1,74 @@ +# Stack Navigation Test (Python) +# Tests frame navigation commands (frame, up, down) + +name: "Python Stack Navigation Test" +description: "Verifies stack frame navigation works correctly" + +target: + program: "tests/fixtures/simple.py" + args: [] + adapter: "python" + stop_on_entry: true + +steps: + - action: command + command: "break factorial" + expect: + success: true + + - action: command + command: "continue" + + - action: await + timeout: 10 + expect: + reason: "breakpoint" + + - action: command + command: "continue" + + - action: await + timeout: 10 + expect: + reason: "breakpoint" + + - action: command + command: "continue" + + - action: await + timeout: 10 + expect: + reason: "breakpoint" + + - action: command + command: "backtrace" + expect: + output_contains: "factorial" + + - action: command + command: "frame 1" + expect: + success: true + + - action: inspect_locals + asserts: + - name: "n" + value_contains: "4" + + - action: command + command: "up" + expect: + success: true + + - action: command + command: "down" + expect: + success: true + + - action: command + command: "continue" + + - action: await + timeout: 10 + expect: + reason: "exited" diff --git a/tests/scenarios/stepping_js.yml b/tests/scenarios/stepping_js.yml new file mode 100644 index 0000000..30de719 --- /dev/null +++ b/tests/scenarios/stepping_js.yml @@ -0,0 +1,84 @@ +# JavaScript Stepping Test +# Tests step-in, step-out, and step-over functionality + +name: "JavaScript Stepping Test" +description: "Verifies stepping commands work correctly in JavaScript" + +# Debug target configuration +target: + program: "tests/fixtures/simple.js" + args: [] + adapter: "js-debug" + stop_on_entry: true + +# Test steps +steps: + # 1. Set breakpoint at function call (line with add(x, y)) + - action: command + command: "break simple.js:31" + expect: + success: true + + # 2. Continue to breakpoint + - action: command + command: "continue" + + - action: await + timeout: 15 + expect: + reason: "breakpoint" + file: "simple.js" + + # 3. Step into the function + - action: command + command: "step" + + - action: await + timeout: 10 + expect: + reason: "step" + + # 4. Verify we're inside the add function + - action: inspect_stack + asserts: + - index: 0 + function: "add" + + # 5. Step out back to main + - action: command + command: "finish" + + - action: await + timeout: 10 + expect: + reason: "step" + + # 6. Verify we're back in main + - action: inspect_stack + asserts: + - index: 0 + function: "main" + + # 7. Step over (should not enter multiply) + - action: command + command: "next" + + - action: await + timeout: 10 + expect: + reason: "step" + + # 8. Verify we skipped into multiply + - action: inspect_stack + asserts: + - index: 0 + function: "main" + + # 9. Continue to exit + - action: command + command: "continue" + + - action: await + timeout: 10 + expect: + reason: "terminated" diff --git a/tests/scenarios/thread_list_c.yml b/tests/scenarios/thread_list_c.yml new file mode 100644 index 0000000..eae8e64 --- /dev/null +++ b/tests/scenarios/thread_list_c.yml @@ -0,0 +1,41 @@ +# Thread List Test (C) +# Tests thread listing functionality with multithreaded program + +name: "C Thread List Test" +description: "Verifies thread list command shows multiple threads" + +setup: + - shell: "gcc -g -pthread tests/fixtures/threaded.c -o tests/fixtures/test_threaded_c" + +target: + program: "tests/fixtures/test_threaded_c" + args: [] + stop_on_entry: true + +steps: + # Break at worker_body (AFTER barrier) to avoid deadlock - barrier needs all 3 threads + - action: command + command: "break worker_body" + expect: + success: true + + - action: command + command: "continue" + + - action: await + timeout: 10 + expect: + reason: "breakpoint" + + - action: command + command: "threads" + expect: + output_contains: "Thread" + + - action: command + command: "continue" + + - action: await + timeout: 15 + expect: + reason: "exited" diff --git a/tests/scenarios/thread_list_go.yml b/tests/scenarios/thread_list_go.yml new file mode 100644 index 0000000..47f5131 --- /dev/null +++ b/tests/scenarios/thread_list_go.yml @@ -0,0 +1,41 @@ +# Thread List Test (Go) +# Tests goroutine listing functionality + +name: "Go Thread List Test" +description: "Verifies thread list command shows goroutines" + +setup: + - shell: "go build -gcflags='all=-N -l' -o tests/fixtures/test_threaded_go tests/fixtures/threaded.go" + +target: + program: "../fixtures/test_threaded_go" + args: [] + adapter: "go" + stop_on_entry: true + +steps: + - action: command + command: "break main.worker" + expect: + success: true + + - action: command + command: "continue" + + - action: await + timeout: 10 + expect: + reason: "breakpoint" + + - action: command + command: "threads" + expect: + output_contains: "Goroutine" + + - action: command + command: "continue" + + - action: await + timeout: 15 + expect: + reason: "terminated"