diff --git a/.ci/Dockerfile.cypress b/.ci/Dockerfile.cypress new file mode 100644 index 0000000000..e595fcc1ba --- /dev/null +++ b/.ci/Dockerfile.cypress @@ -0,0 +1,12 @@ +FROM cypress/browsers:node18.12.0-chrome106-ff106 + +ENV APP /usr/src/app +WORKDIR $APP + +COPY package.json yarn.lock .yarnrc $APP/ +COPY viz-lib $APP/viz-lib +RUN npm install yarn@1.22.22 -g && yarn --frozen-lockfile --network-concurrency 1 > /dev/null + +COPY . $APP + +RUN ./node_modules/.bin/cypress verify diff --git a/.ci/compose.ci.yaml b/.ci/compose.ci.yaml new file mode 100644 index 0000000000..984bf7b123 --- /dev/null +++ b/.ci/compose.ci.yaml @@ -0,0 +1,25 @@ +services: + redash: + build: ../ + command: manage version + depends_on: + - postgres + - redis + ports: + - "5000:5000" + environment: + PYTHONUNBUFFERED: 0 + REDASH_LOG_LEVEL: "INFO" + REDASH_REDIS_URL: "redis://redis:6379/0" + POSTGRES_PASSWORD: "FmTKs5vX52ufKR1rd8tn4MoSP7zvCJwb" + REDASH_DATABASE_URL: "postgresql://postgres:FmTKs5vX52ufKR1rd8tn4MoSP7zvCJwb@postgres/postgres" + REDASH_COOKIE_SECRET: "2H9gNG9obnAQ9qnR9BDTQUph6CbXKCzF" + redis: + image: redis:7-alpine + restart: unless-stopped + postgres: + image: postgres:18-alpine + command: "postgres -c fsync=off -c full_page_writes=off -c synchronous_commit=OFF" + restart: unless-stopped + environment: + POSTGRES_HOST_AUTH_METHOD: "trust" diff --git a/.ci/compose.cypress.yaml b/.ci/compose.cypress.yaml new file mode 100644 index 0000000000..ba95f53fec --- /dev/null +++ b/.ci/compose.cypress.yaml @@ -0,0 +1,73 @@ +x-redash-service: &redash-service + build: + context: ../ + args: + install_groups: "main" + code_coverage: ${CODE_COVERAGE} +x-redash-environment: &redash-environment + REDASH_LOG_LEVEL: "INFO" + REDASH_REDIS_URL: "redis://redis:6379/0" + POSTGRES_PASSWORD: "FmTKs5vX52ufKR1rd8tn4MoSP7zvCJwb" + REDASH_DATABASE_URL: "postgresql://postgres:FmTKs5vX52ufKR1rd8tn4MoSP7zvCJwb@postgres/postgres" + REDASH_RATELIMIT_ENABLED: "false" + REDASH_ENFORCE_CSRF: "true" + REDASH_COOKIE_SECRET: "2H9gNG9obnAQ9qnR9BDTQUph6CbXKCzF" +services: + server: + <<: *redash-service + command: server + depends_on: + - postgres + - redis + ports: + - "5000:5000" + environment: + <<: *redash-environment + PYTHONUNBUFFERED: 0 + scheduler: + <<: *redash-service + command: scheduler + depends_on: + - server + environment: + <<: *redash-environment + worker: + <<: *redash-service + command: worker + depends_on: + - server + environment: + <<: *redash-environment + PYTHONUNBUFFERED: 0 + cypress: + ipc: host + build: + context: ../ + dockerfile: .ci/Dockerfile.cypress + depends_on: + - server + - worker + - scheduler + environment: + CYPRESS_baseUrl: "http://server:5000" + CYPRESS_coverage: ${CODE_COVERAGE} + PERCY_TOKEN: ${PERCY_TOKEN} + PERCY_BRANCH: ${CIRCLE_BRANCH} + PERCY_COMMIT: ${CIRCLE_SHA1} + PERCY_PULL_REQUEST: ${CIRCLE_PR_NUMBER} + COMMIT_INFO_BRANCH: ${CIRCLE_BRANCH} + COMMIT_INFO_MESSAGE: ${COMMIT_INFO_MESSAGE} + COMMIT_INFO_AUTHOR: ${CIRCLE_USERNAME} + COMMIT_INFO_SHA: ${CIRCLE_SHA1} + COMMIT_INFO_REMOTE: ${CIRCLE_REPOSITORY_URL} + CYPRESS_PROJECT_ID: ${CYPRESS_PROJECT_ID} + CYPRESS_RECORD_KEY: ${CYPRESS_RECORD_KEY} + redis: + image: redis:7-alpine + restart: unless-stopped + postgres: + image: postgres:18-alpine + command: "postgres -c fsync=off -c full_page_writes=off -c synchronous_commit=OFF" + restart: unless-stopped + environment: + POSTGRES_HOST_AUTH_METHOD: "trust" diff --git a/.ci/docker_build b/.ci/docker_build new file mode 100755 index 0000000000..324c7e996e --- /dev/null +++ b/.ci/docker_build @@ -0,0 +1,39 @@ +#!/bin/bash + +# This script only needs to run on the main Redash repo + +if [ "${GITHUB_REPOSITORY}" != "getredash/redash" ]; then + echo "Skipping image build for Docker Hub, as this isn't the main Redash repository" + exit 0 +fi + +if [ "${GITHUB_REF_NAME}" != "master" ] && [ "${GITHUB_REF_NAME}" != "preview-image" ]; then + echo "Skipping image build for Docker Hub, as this isn't the 'master' nor 'preview-image' branch" + exit 0 +fi + +if [ "x${DOCKER_USER}" = "x" ] || [ "x${DOCKER_PASS}" = "x" ]; then + echo "Skipping image build for Docker Hub, as the login details aren't available" + exit 0 +fi + +set -e +VERSION=$(jq -r .version package.json) +VERSION_TAG="$VERSION.b${GITHUB_RUN_ID}.${GITHUB_RUN_NUMBER}" + +export DOCKER_BUILDKIT=1 +export COMPOSE_DOCKER_CLI_BUILD=1 + +docker login -u "${DOCKER_USER}" -p "${DOCKER_PASS}" + +DOCKERHUB_REPO="redash/redash" +DOCKER_TAGS="-t redash/redash:preview -t redash/preview:${VERSION_TAG}" + +# Build the docker container +docker build --build-arg install_groups="main,all_ds,dev" ${DOCKER_TAGS} . + +# Push the container to the preview build locations +docker push "${DOCKERHUB_REPO}:preview" +docker push "redash/preview:${VERSION_TAG}" + +echo "Built: ${VERSION_TAG}" diff --git a/.ci/pack b/.ci/pack new file mode 100755 index 0000000000..16223c5a9b --- /dev/null +++ b/.ci/pack @@ -0,0 +1,9 @@ +#!/bin/bash +NAME=redash +VERSION=$(jq -r .version package.json) +FULL_VERSION=$VERSION+b$CIRCLE_BUILD_NUM +FILENAME=$NAME.$FULL_VERSION.tar.gz + +mkdir -p /tmp/artifacts/ + +tar -zcv -f /tmp/artifacts/$FILENAME --exclude=".git" --exclude="optipng*" --exclude="cypress" --exclude="*.pyc" --exclude="*.pyo" --exclude="venv" * diff --git a/.ci/update_version b/.ci/update_version new file mode 100755 index 0000000000..53b537208c --- /dev/null +++ b/.ci/update_version @@ -0,0 +1,6 @@ +#!/bin/bash +VERSION=$(jq -r .version package.json) +FULL_VERSION=${VERSION}+b${GITHUB_RUN_ID}.${GITHUB_RUN_NUMBER} + +sed -ri "s/^__version__ = '([A-Za-z0-9.-]*)'/__version__ = '${FULL_VERSION}'/" redash/__init__.py +sed -i "s/dev/${GITHUB_SHA}/" client/app/version.json diff --git a/.circleci/config.yml b/.circleci/config.yml index 8845e400e9..0a543d1ad0 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -3,17 +3,18 @@ version: 2.0 jobs: backend-lint: docker: - - image: circleci/python:3.7.0 + - image: cimg/python:3.8 steps: - checkout - - run: sudo pip install flake8 - - run: ./bin/flake8_tests.sh + - run: pip3 install black==23.1.0 ruff==0.0.287 + - run: ruff check . + - run: black --check . backend-unit-tests: environment: COMPOSE_FILE: .circleci/docker-compose.circle.yml COMPOSE_PROJECT_NAME: redash docker: - - image: circleci/buildpack-deps:xenial + - image: cimg/base:2025.04 steps: - setup_remote_docker - checkout @@ -21,18 +22,18 @@ jobs: name: Build Docker Images command: | set -x - docker-compose build --build-arg skip_ds_deps=true --build-arg skip_frontend_build=true - docker-compose up -d + docker compose build --build-arg install_groups="main,all_ds,dev" --build-arg skip_frontend_build=true + docker compose up -d sleep 10 - run: name: Create Test Database - command: docker-compose run --rm postgres psql -h postgres -U postgres -c "create database tests;" + command: docker compose -p redash run --rm postgres psql -h postgres -U postgres -c "create database tests;" - run: name: List Enabled Query Runners - command: docker-compose run --rm redash manage ds list_types + command: docker compose -p redash run --rm redash manage ds list_types - run: name: Run Tests - command: docker-compose run --name tests redash tests --junitxml=junit.xml --cov-report xml --cov=redash --cov-config .coveragerc tests/ + command: docker compose -p redash run --name tests redash tests --junitxml=junit.xml --cov-report=xml --cov=redash --cov-config=.coveragerc tests/ - run: name: Copy Test Results command: | @@ -46,56 +47,102 @@ jobs: path: coverage.xml frontend-lint: docker: - - image: cimg/node:12.22.8 + - image: cimg/node:18.20 steps: - checkout - run: mkdir -p /tmp/test-results/eslint - - run: npm ci - - run: npm run lint:ci + - run: | + npm install --global --force yarn@$1.22.22 + yarn cache clean && yarn --frozen-lockfile --network-concurrency 1 + - run: yarn lint:ci - store_test_results: path: /tmp/test-results frontend-unit-tests: docker: - - image: cimg/node:12.22.8 + - image: cimg/node:18.20 steps: - checkout - - run: sudo apt update - - run: sudo apt install python3-pip - - run: sudo pip3 install -r requirements_bundles.txt - - run: npm ci - - run: npm run bundle + - run: | + npm install --global --force yarn@$1.22.22 + yarn cache clean && yarn --frozen-lockfile --network-concurrency 1 - run: name: Run App Tests - command: npm test + command: yarn test - run: name: Run Visualizations Tests - command: (cd viz-lib && npm test) - - run: npm run lint + command: (cd viz-lib && yarn test) + - run: yarn lint + + frontend-e2e-tests: + environment: + COMPOSE_FILE: .ci/compose.cypress.yml + COMPOSE_PROJECT_NAME: cypress + CYPRESS_INSTALL_BINARY: 0 + PUPPETEER_SKIP_CHROMIUM_DOWNLOAD: 1 + # PERCY_TOKEN: ${{ secrets.PERCY_TOKEN }} + # CYPRESS_PROJECT_ID: ${{ secrets.CYPRESS_PROJECT_ID }} + # CYPRESS_RECORD_KEY: ${{ secrets.CYPRESS_RECORD_KEY }} + docker: + - image: cimg/node:18.20 + steps: + - run: checkout + - run: + name: Install Dependencies + command: | + npm install --global --force yarn@$1.22.22 + yarn cache clean && yarn --frozen-lockfile --network-concurrency 1 + # - name: Enable Code Coverage Report For Master Branch + # if: endsWith(github.ref, '/master') + # run: | + # echo "CODE_COVERAGE=true" >> "$GITHUB_ENV" + - run: + name: Setup Redash Server + command: | + set -x + yarn cypress build + yarn cypress start -- --skip-db-seed + docker compose run cypress yarn cypress db-seed + - run: + name: Execute Cypress Tests + command: yarn cypress run-ci + # - name: "Failure: output container logs to console" + # if: failure() + # run: docker compose logs + - run: + name: Copy Code Coverage Results + command: docker cp cypress:/usr/src/app/coverage ./coverage || true + # - name: Store Coverage Results + # uses: actions/upload-artifact@v4 + # with: + # name: coverage + # path: coverage build-docker-image: docker: - - image: cimg/node:12.22.8 + - image: cimg/node:18.20 steps: - setup_remote_docker - checkout - run: echo "export MOZILLA_VERSION=master" >> $BASH_ENV - - run: sudo apt update - - run: sudo apt install python3-pip - - run: sudo pip3 install -r requirements_bundles.txt + - run: | + npm install --global --force yarn@$1.22.22 + yarn cache clean && yarn --frozen-lockfile --network-concurrency 1 - run: .circleci/update_version - - run: npm run bundle + # Bundle extensions: + # - run: npm run bundle - run: .circleci/docker_build build-docker-image-tag: docker: - - image: cimg/node:12.22.8 + - image: cimg/node:18.20 steps: - setup_remote_docker - checkout - run: echo "export MOZILLA_VERSION=$CIRCLE_TAG" >> $BASH_ENV - - run: sudo apt update - - run: sudo apt install python3-pip - - run: sudo pip3 install -r requirements_bundles.txt + - run: | + npm install --global --force yarn@$1.22.22 + yarn cache clean && yarn --frozen-lockfile --network-concurrency 1 - run: .circleci/update_version - - run: npm run bundle + # Bundle extensions: + # - run: npm run bundle - run: .circleci/docker_build # Create alias from tag to "latest": - run: docker tag $DOCKERHUB_REPO:$CIRCLE_TAG $DOCKERHUB_REPO:latest diff --git a/.circleci/docker-compose.circle.yml b/.circleci/docker-compose.circle.yml index e756a92ff3..89837364bd 100644 --- a/.circleci/docker-compose.circle.yml +++ b/.circleci/docker-compose.circle.yml @@ -12,11 +12,16 @@ services: PYTHONUNBUFFERED: 0 REDASH_LOG_LEVEL: "INFO" REDASH_REDIS_URL: "redis://redis:6379/0" + # POSTGRES_PASSWORD: "FmTKs5vX52ufKR1rd8tn4MoSP7zvCJwb" REDASH_DATABASE_URL: "postgresql://postgres@postgres/postgres" + # REDASH_DATABASE_URL: "postgresql://postgres:FmTKs5vX52ufKR1rd8tn4MoSP7zvCJwb@postgres/postgres" + REDASH_COOKIE_SECRET: "2H9gNG9obnAQ9qnR9BDTQUph6CbXKCzF" redis: - image: redis:3.0-alpine + image: redis:7-alpine restart: unless-stopped postgres: - image: postgres:9.5.6-alpine + image: postgres:18-alpine command: "postgres -c fsync=off -c full_page_writes=off -c synchronous_commit=OFF" restart: unless-stopped + environment: + POSTGRES_HOST_AUTH_METHOD: "trust" diff --git a/.dockerignore b/.dockerignore index 8e3dfae173..b5a2c33ebb 100644 --- a/.dockerignore +++ b/.dockerignore @@ -1,5 +1,4 @@ client/.tmp/ -client/dist/ node_modules/ viz-lib/node_modules/ .tmp/ diff --git a/.github/ISSUE_TEMPLATE/---bug_report.md b/.github/ISSUE_TEMPLATE/---bug_report.md index f376d6f1ce..1399ef7791 100644 --- a/.github/ISSUE_TEMPLATE/---bug_report.md +++ b/.github/ISSUE_TEMPLATE/---bug_report.md @@ -7,10 +7,10 @@ about: Report reproducible software issues so we can improve We use GitHub only for bug reports 🐛 -Anything else should be posted to https://discuss.redash.io đŸ‘« +Anything else should be a discussion: https://github.com/getredash/redash/discussions/ đŸ‘« -🚹For support, help & questions use https://discuss.redash.io/c/support -💡For feature requests & ideas use https://discuss.redash.io/c/feature-requests +🚹For support, help & questions use https://github.com/getredash/redash/discussions/categories/q-a +💡For feature requests & ideas use https://github.com/getredash/redash/discussions/categories/ideas **Found a security vulnerability?** Please email security@redash.io to report any security vulnerabilities. We will acknowledge receipt of your vulnerability and strive to send you regular updates about our progress. If you're curious about the status of your disclosure please feel free to email us again. If you want to encrypt your disclosure email, you can use this PGP key. diff --git a/.github/ISSUE_TEMPLATE/--anything_else.md b/.github/ISSUE_TEMPLATE/--anything_else.md index 9db411b781..d6886cc4ce 100644 --- a/.github/ISSUE_TEMPLATE/--anything_else.md +++ b/.github/ISSUE_TEMPLATE/--anything_else.md @@ -1,17 +1,17 @@ --- name: "\U0001F4A1Anything else" -about: "For help, support, features & ideas - please use https://discuss.redash.io \U0001F46B " +about: "For help, support, features & ideas - please use Discussions \U0001F46B " labels: "Support Question" --- We use GitHub only for bug reports 🐛 -Anything else should be posted to https://discuss.redash.io đŸ‘« +Anything else should be a discussion: https://github.com/getredash/redash/discussions/ đŸ‘« -🚹For support, help & questions use https://discuss.redash.io/c/support -💡For feature requests & ideas use https://discuss.redash.io/c/feature-requests +🚹For support, help & questions use https://github.com/getredash/redash/discussions/categories/q-a +💡For feature requests & ideas use https://github.com/getredash/redash/discussions/categories/ideas Alternatively, check out these resources below. Thanks! 😁. -- [Forum](https://disucss.redash.io) +- [Discussions](https://github.com/getredash/redash/discussions/) - [Knowledge Base](https://redash.io/help) diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index a4e1d25210..8b6e58a6f2 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -1,15 +1,26 @@ -## What type of PR is this? (check all applicable) - +## What type of PR is this? + - [ ] Refactor - [ ] Feature - [ ] Bug Fix -- [ ] New Query Runner (Data Source) +- [ ] New Query Runner (Data Source) - [ ] New Alert Destination - [ ] Other ## Description + + +## How is this tested? + +- [ ] Unit tests (pytest, jest) +- [ ] E2E Tests (Cypress) +- [ ] Manually +- [ ] N/A + + ## Related Tickets & Documents + ## Mobile & Desktop Screenshots/Recordings (if there are UI changes) diff --git a/.github/support.yml b/.github/support.yml deleted file mode 100644 index 164b588b36..0000000000 --- a/.github/support.yml +++ /dev/null @@ -1,23 +0,0 @@ -# Configuration for Support Requests - https://github.com/dessant/support-requests - -# Label used to mark issues as support requests -supportLabel: Support Question - -# Comment to post on issues marked as support requests, `{issue-author}` is an -# optional placeholder. Set to `false` to disable -supportComment: > - :wave: @{issue-author}, we use the issue tracker exclusively for bug reports - and planned work. However, this issue appears to be a support request. - Please use [our forum](https://discuss.redash.io) to get help. - -# Close issues marked as support requests -close: true - -# Lock issues marked as support requests -lock: false - -# Assign `off-topic` as the reason for locking. Set to `false` to disable -setLockReason: true - -# Repository to extend settings from -# _extends: repo diff --git a/.github/workflows/build-image-tag.yml b/.github/workflows/build-image-tag.yml new file mode 100644 index 0000000000..7f5069d820 --- /dev/null +++ b/.github/workflows/build-image-tag.yml @@ -0,0 +1,33 @@ +name: Build docker image tag + +on: + push: + branches-ignore: + - '**' + tags: + # GHA doesn't support regex here so less precise tag matching will have to do + - 'm[0-9]+*' + +jobs: + build-docker-image-tag: + docker: + - image: cimg/node:18.20 + steps: + - uses: actions/setup-node@v4 + with: + node-version: ${{ env.NODE_VERSION }} + cache: 'yarn' + - uses: actions/checkout@v4 + with: + ref: ${{ github.event.pull_request.head.sha }} + - run: echo "export MOZILLA_VERSION=$CIRCLE_TAG" >> $BASH_ENV + - run: | + npm install --global --force yarn@$1.22.22 + yarn cache clean && yarn --frozen-lockfile --network-concurrency 1 + - run: .ci/update_version + # Bundle extensions: + # - run: npm run bundle + - run: .ci/docker_build + # Create alias from tag to "latest": + - run: docker tag $DOCKERHUB_REPO:$CIRCLE_TAG $DOCKERHUB_REPO:latest + - run: docker push $DOCKERHUB_REPO:latest \ No newline at end of file diff --git a/.github/workflows/build-image.yml b/.github/workflows/build-image.yml new file mode 100644 index 0000000000..3292903946 --- /dev/null +++ b/.github/workflows/build-image.yml @@ -0,0 +1,26 @@ +name: Build docker image + +on: + push: + branches: + - master + +jobs: + build-docker-image: + runs-on: ubuntu-latest + steps: + - uses: actions/setup-node@v4 + with: + node-version: ${{ env.NODE_VERSION }} + cache: 'yarn' + - uses: actions/checkout@v4 + with: + ref: ${{ github.event.pull_request.head.sha }} + - run: echo "export MOZILLA_VERSION=master" >> $BASH_ENV + - run: | + npm install --global --force yarn@$1.22.22 + yarn cache clean && yarn --frozen-lockfile --network-concurrency 1 + - run: .ci/update_version + # Bundle extensions: + # - run: npm run bundle + - run: .ci/docker_build \ No newline at end of file diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 0000000000..1cee14f8ab --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,177 @@ +name: Tests +on: + push: + branches: + - master + pull_request: + branches: + - master +env: + NODE_VERSION: 18 + YARN_VERSION: 1.22.22 +jobs: + backend-lint: + runs-on: ubuntu-22.04 + steps: + - if: github.event.pull_request.mergeable == 'false' + name: Exit if PR is not mergeable + run: exit 1 + - uses: actions/checkout@v4 + with: + fetch-depth: 1 + ref: ${{ github.event.pull_request.head.sha }} + - uses: actions/setup-python@v5 + with: + python-version: '3.8' + - run: sudo pip install black==23.1.0 ruff==0.0.287 + - run: ruff check . + - run: black --check . + + backend-unit-tests: + runs-on: ubuntu-22.04 + needs: backend-lint + env: + COMPOSE_FILE: .ci/compose.ci.yaml + COMPOSE_PROJECT_NAME: redash + COMPOSE_DOCKER_CLI_BUILD: 1 + DOCKER_BUILDKIT: 1 + steps: + - if: github.event.pull_request.mergeable == 'false' + name: Exit if PR is not mergeable + run: exit 1 + - uses: actions/checkout@v4 + with: + fetch-depth: 1 + ref: ${{ github.event.pull_request.head.sha }} + - name: Build Docker Images + run: | + set -x + docker compose build --build-arg install_groups="main,all_ds,dev" --build-arg skip_frontend_build=true + docker compose up -d + sleep 10 + - name: Create Test Database + run: docker compose -p redash run --rm postgres psql -h postgres -U postgres -c "create database tests;" + - name: List Enabled Query Runners + run: docker compose -p redash run --rm redash manage ds list_types + - name: Run Tests + run: docker compose -p redash run --name tests redash tests --junitxml=junit.xml --cov-report=xml --cov=redash --cov-config=.coveragerc tests/ + - name: Copy Test Results + run: | + mkdir -p /tmp/test-results/unit-tests + docker cp tests:/app/coverage.xml ./coverage.xml + docker cp tests:/app/junit.xml /tmp/test-results/unit-tests/results.xml + # - name: Upload coverage reports to Codecov + # uses: codecov/codecov-action@v3 + # with: + # token: ${{ secrets.CODECOV_TOKEN }} + - name: Store Test Results + uses: actions/upload-artifact@v4 + with: + name: backend-test-results + path: /tmp/test-results + - name: Store Coverage Results + uses: actions/upload-artifact@v4 + with: + name: coverage + path: coverage.xml + + frontend-lint: + runs-on: ubuntu-22.04 + steps: + - if: github.event.pull_request.mergeable == 'false' + name: Exit if PR is not mergeable + run: exit 1 + - uses: actions/checkout@v4 + with: + fetch-depth: 1 + ref: ${{ github.event.pull_request.head.sha }} + - uses: actions/setup-node@v4 + with: + node-version: ${{ env.NODE_VERSION }} + cache: 'yarn' + - name: Install Dependencies + run: | + npm install --global --force yarn@$YARN_VERSION + yarn cache clean && yarn --frozen-lockfile --network-concurrency 1 + - name: Run Lint + run: yarn lint:ci + - name: Store Test Results + uses: actions/upload-artifact@v4 + with: + name: frontend-test-results + path: /tmp/test-results + + frontend-unit-tests: + runs-on: ubuntu-22.04 + needs: frontend-lint + steps: + - if: github.event.pull_request.mergeable == 'false' + name: Exit if PR is not mergeable + run: exit 1 + - uses: actions/checkout@v4 + with: + fetch-depth: 1 + ref: ${{ github.event.pull_request.head.sha }} + - uses: actions/setup-node@v4 + with: + node-version: ${{ env.NODE_VERSION }} + cache: 'yarn' + - name: Install Dependencies + run: | + npm install --global --force yarn@$YARN_VERSION + yarn cache clean && yarn --frozen-lockfile --network-concurrency 1 + - name: Run App Tests + run: yarn test + - name: Run Visualizations Tests + run: cd viz-lib && yarn test + - run: yarn lint + + frontend-e2e-tests: + runs-on: ubuntu-22.04 + needs: frontend-lint + env: + COMPOSE_FILE: .ci/compose.cypress.yaml + COMPOSE_PROJECT_NAME: cypress + CYPRESS_INSTALL_BINARY: 0 + PUPPETEER_SKIP_CHROMIUM_DOWNLOAD: 1 + # PERCY_TOKEN: ${{ secrets.PERCY_TOKEN }} + # CYPRESS_PROJECT_ID: ${{ secrets.CYPRESS_PROJECT_ID }} + # CYPRESS_RECORD_KEY: ${{ secrets.CYPRESS_RECORD_KEY }} + steps: + - if: github.event.pull_request.mergeable == 'false' + name: Exit if PR is not mergeable + run: exit 1 + - uses: actions/checkout@v4 + with: + fetch-depth: 1 + ref: ${{ github.event.pull_request.head.sha }} + - uses: actions/setup-node@v4 + with: + node-version: ${{ env.NODE_VERSION }} + cache: 'yarn' + - name: Enable Code Coverage Report For Master Branch + if: endsWith(github.ref, '/master') + run: | + echo "CODE_COVERAGE=true" >> "$GITHUB_ENV" + - name: Install Dependencies + run: | + npm install --global --force yarn@$YARN_VERSION + yarn cache clean && yarn --frozen-lockfile --network-concurrency 1 + - name: Setup Redash Server + run: | + set -x + yarn cypress build + yarn cypress start -- --skip-db-seed + docker compose run cypress yarn cypress db-seed + - name: Execute Cypress Tests + run: yarn cypress run-ci + - name: "Failure: output container logs to console" + if: failure() + run: docker compose logs + - name: Copy Code Coverage Results + run: docker cp cypress:/usr/src/app/coverage ./coverage || true + - name: Store Coverage Results + uses: actions/upload-artifact@v4 + with: + name: coverage + path: coverage diff --git a/.github/workflows/periodic-snapshot.yml b/.github/workflows/periodic-snapshot.yml new file mode 100644 index 0000000000..2925227912 --- /dev/null +++ b/.github/workflows/periodic-snapshot.yml @@ -0,0 +1,87 @@ +name: Periodic Snapshot + +on: + # DISABLED + # schedule: + # - cron: '10 0 1 * *' # 10 minutes after midnight on the first day of every month + # workflow_dispatch: + # inputs: + # bump: + # description: 'Bump the last digit of the version' + # required: false + # type: boolean + # version: + # description: 'Specific version to set' + # required: false + # default: '' + +env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + +permissions: + actions: write + contents: write + +jobs: + bump-version-and-tag: + runs-on: ubuntu-latest + if: github.ref_name == github.event.repository.default_branch + steps: + - uses: actions/checkout@v4 + with: + ssh-key: ${{ secrets.ACTION_PUSH_KEY }} + + - run: | + git config user.name 'github-actions[bot]' + git config user.email '41898282+github-actions[bot]@users.noreply.github.com' + + # Function to bump the version + bump_version() { + local version="$1" + local IFS=. + read -r major minor patch <<< "$version" + patch=$((patch + 1)) + echo "$major.$minor.$patch-dev" + } + + # Determine the new version tag + if [ "${{ github.event_name }}" = "workflow_dispatch" ]; then + BUMP_INPUT="${{ github.event.inputs.bump }}" + SPECIFIC_VERSION="${{ github.event.inputs.version }}" + + # Check if both bump and specific version are provided + if [ "$BUMP_INPUT" = "true" ] && [ -n "$SPECIFIC_VERSION" ]; then + echo "::error::Error: Cannot specify both bump and specific version." + exit 1 + fi + + if [ -n "$SPECIFIC_VERSION" ]; then + TAG_NAME="$SPECIFIC_VERSION-dev" + elif [ "$BUMP_INPUT" = "true" ]; then + CURRENT_VERSION=$(grep '"version":' package.json | awk -F\" '{print $4}') + TAG_NAME=$(bump_version "$CURRENT_VERSION") + else + echo "No version bump or specific version provided for manual dispatch." + exit 1 + fi + else + TAG_NAME="$(date +%y.%m).0-dev" + fi + + echo "New version tag: $TAG_NAME" + + # Update version in files + gawk -i inplace -F: -v q=\" -v tag=${TAG_NAME} '/^ "version": / { print $1 FS, q tag q ","; next} { print }' package.json + gawk -i inplace -F= -v q=\" -v tag=${TAG_NAME} '/^__version__ =/ { print $1 FS, q tag q; next} { print }' redash/__init__.py + gawk -i inplace -F= -v q=\" -v tag=${TAG_NAME} '/^version =/ { print $1 FS, q tag q; next} { print }' pyproject.toml + + git add package.json redash/__init__.py pyproject.toml + git commit -m "Snapshot: ${TAG_NAME}" + git tag ${TAG_NAME} + git push --atomic origin master refs/tags/${TAG_NAME} + + # Run the 'preview-image' workflow if run this workflow manually + # For more information, please see the: https://docs.github.com/en/actions/security-guides/automatic-token-authentication + if [ "$BUMP_INPUT" = "true" ] || [ -n "$SPECIFIC_VERSION" ]; then + gh workflow run preview-image.yml --ref $TAG_NAME + fi diff --git a/.github/workflows/preview-image.yml b/.github/workflows/preview-image.yml new file mode 100644 index 0000000000..24269d5608 --- /dev/null +++ b/.github/workflows/preview-image.yml @@ -0,0 +1,185 @@ +name: Preview Image +on: + push: + tags: + - '*-dev' + workflow_dispatch: + inputs: + dockerRepository: + description: 'Docker repository' + required: true + default: 'preview' + type: choice + options: + - preview + - redash + +env: + NODE_VERSION: 18 + +jobs: + build-skip-check: + runs-on: ubuntu-22.04 + outputs: + skip: ${{ steps.skip-check.outputs.skip }} + steps: + - name: Skip? + id: skip-check + run: | + if [[ "${{ vars.DOCKER_USER }}" == '' ]]; then + echo 'Docker user is empty. Skipping build+push' + echo skip=true >> "$GITHUB_OUTPUT" + elif [[ "${{ secrets.DOCKER_PASS }}" == '' ]]; then + echo 'Docker password is empty. Skipping build+push' + echo skip=true >> "$GITHUB_OUTPUT" + elif [[ "${{ vars.DOCKER_REPOSITORY }}" == '' ]]; then + echo 'Docker repository is empty. Skipping build+push' + echo skip=true >> "$GITHUB_OUTPUT" + else + echo 'Docker user and password are set and branch is `master`.' + echo 'Building + pushing `preview` image.' + echo skip=false >> "$GITHUB_OUTPUT" + fi + + build-docker-image: + runs-on: ${{ matrix.os }} + strategy: + fail-fast: false + matrix: + arch: + - amd64 + - arm64 + include: + - arch: amd64 + os: ubuntu-22.04 + - arch: arm64 + os: ubuntu-22.04-arm + outputs: + VERSION_TAG: ${{ steps.version.outputs.VERSION_TAG }} + needs: + - build-skip-check + if: needs.build-skip-check.outputs.skip == 'false' + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 1 + ref: ${{ github.event.push.after }} + + - uses: actions/setup-node@v4 + with: + node-version: ${{ env.NODE_VERSION }} + cache: 'yarn' + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + - name: Login to DockerHub + uses: docker/login-action@v3 + with: + username: ${{ vars.DOCKER_USER }} + password: ${{ secrets.DOCKER_PASS }} + + - name: Install Dependencies + env: + PUPPETEER_SKIP_CHROMIUM_DOWNLOAD: true + run: | + npm install --global --force yarn@1.22.22 + yarn cache clean && yarn --frozen-lockfile --network-concurrency 1 + + - name: Set version + id: version + run: | + set -x + .ci/update_version + VERSION_TAG=$(jq -r .version package.json) + echo "VERSION_TAG=$VERSION_TAG" >> "$GITHUB_OUTPUT" + + - name: Build and push preview image to Docker Hub + id: build-preview + uses: docker/build-push-action@v4 + if: ${{ github.event.inputs.dockerRepository == 'preview' || !github.event.workflow_run }} + with: + tags: | + ${{ vars.DOCKER_REPOSITORY }}/redash + ${{ vars.DOCKER_REPOSITORY }}/preview + context: . + build-args: | + test_all_deps=true + outputs: type=image,push-by-digest=true,push=true + cache-from: type=gha,scope=${{ matrix.arch }} + cache-to: type=gha,mode=max,scope=${{ matrix.arch }} + env: + DOCKER_CONTENT_TRUST: true + + - name: Build and push release image to Docker Hub + id: build-release + uses: docker/build-push-action@v4 + if: ${{ github.event.inputs.dockerRepository == 'redash' }} + with: + tags: | + ${{ vars.DOCKER_REPOSITORY }}/redash:${{ steps.version.outputs.VERSION_TAG }} + context: . + build-args: | + test_all_deps=true + outputs: type=image,push-by-digest=false,push=true + cache-from: type=gha,scope=${{ matrix.arch }} + cache-to: type=gha,mode=max,scope=${{ matrix.arch }} + env: + DOCKER_CONTENT_TRUST: true + + - name: "Failure: output container logs to console" + if: failure() + run: docker compose logs + + - name: Export digest + run: | + mkdir -p ${{ runner.temp }}/digests + if [[ "${{ github.event.inputs.dockerRepository }}" == 'preview' || !github.event.workflow_run ]]; then + digest="${{ steps.build-preview.outputs.digest}}" + else + digest="${{ steps.build-release.outputs.digest}}" + fi + touch "${{ runner.temp }}/digests/${digest#sha256:}" + + - name: Upload digest + uses: actions/upload-artifact@v4 + with: + name: digests-${{ matrix.arch }} + path: ${{ runner.temp }}/digests/* + if-no-files-found: error + + merge-docker-image: + runs-on: ubuntu-22.04 + needs: build-docker-image + steps: + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + - name: Login to DockerHub + uses: docker/login-action@v3 + with: + username: ${{ vars.DOCKER_USER }} + password: ${{ secrets.DOCKER_PASS }} + + - name: Download digests + uses: actions/download-artifact@v4 + with: + path: ${{ runner.temp }}/digests + pattern: digests-* + merge-multiple: true + + - name: Create and push manifest for the preview image + if: ${{ github.event.inputs.dockerRepository == 'preview' || !github.event.workflow_run }} + working-directory: ${{ runner.temp }}/digests + run: | + docker buildx imagetools create -t ${{ vars.DOCKER_REPOSITORY }}/redash:preview \ + $(printf '${{ vars.DOCKER_REPOSITORY }}/redash:preview@sha256:%s ' *) + docker buildx imagetools create -t ${{ vars.DOCKER_REPOSITORY }}/preview:${{ needs.build-docker-image.outputs.VERSION_TAG }} \ + $(printf '${{ vars.DOCKER_REPOSITORY }}/preview:${{ needs.build-docker-image.outputs.VERSION_TAG }}@sha256:%s ' *) + + - name: Create and push manifest for the release image + if: ${{ github.event.inputs.dockerRepository == 'redash' }} + working-directory: ${{ runner.temp }}/digests + run: | + docker buildx imagetools create -t ${{ vars.DOCKER_REPOSITORY }}/redash:${{ needs.build-docker-image.outputs.VERSION_TAG }} \ + $(printf '${{ vars.DOCKER_REPOSITORY }}/redash:${{ needs.build-docker-image.outputs.VERSION_TAG }}@sha256:%s ' *) diff --git a/.github/workflows/restyled.yml b/.github/workflows/restyled.yml new file mode 100644 index 0000000000..5c1d5e7f37 --- /dev/null +++ b/.github/workflows/restyled.yml @@ -0,0 +1,37 @@ +name: Restyled + +on: + # DISABLED due to org policies + # pull_request: + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +jobs: + restyled: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + with: + ref: ${{ github.event.pull_request.head.sha }} + + - uses: restyled-io/actions/setup@v4 + - id: restyler + uses: restyled-io/actions/run@v4 + with: + fail-on-differences: true + + - if: | + !cancelled() && + steps.restyler.outputs.success == 'true' && + github.event.pull_request.head.repo.full_name == github.repository + uses: peter-evans/create-pull-request@v6 + with: + base: ${{ steps.restyler.outputs.restyled-base }} + branch: ${{ steps.restyler.outputs.restyled-head }} + title: ${{ steps.restyler.outputs.restyled-title }} + body: ${{ steps.restyler.outputs.restyled-body }} + labels: "restyled" + reviewers: ${{ github.event.pull_request.user.login }} + delete-branch: true diff --git a/.gitignore b/.gitignore index ec0a379187..3fba4897ec 100644 --- a/.gitignore +++ b/.gitignore @@ -5,6 +5,8 @@ venv/ .coveralls.yml .idea *.pyc +.nyc_output +coverage .coverage coverage.xml client/dist @@ -15,6 +17,7 @@ client/dist _build .vscode .env +.tool-versions dump.rdb diff --git a/.npmrc b/.npmrc new file mode 100644 index 0000000000..c42da845b4 --- /dev/null +++ b/.npmrc @@ -0,0 +1 @@ +engine-strict = true diff --git a/.nvmrc b/.nvmrc new file mode 100644 index 0000000000..3f430af82b --- /dev/null +++ b/.nvmrc @@ -0,0 +1 @@ +v18 diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 0000000000..e8e6795c5e --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,10 @@ +repos: + - repo: https://github.com/psf/black + rev: 23.1.0 + hooks: + - id: black + language_version: python3 + - repo: https://github.com/charliermarsh/ruff-pre-commit + rev: "v0.0.287" + hooks: + - id: ruff diff --git a/.restyled.yaml b/.restyled.yaml index 9a9537ce7a..ddb249dab0 100644 --- a/.restyled.yaml +++ b/.restyled.yaml @@ -38,7 +38,9 @@ request_review: author # # These can be used to tell other automation to avoid our PRs. # -labels: ["Skip CI"] +labels: + - restyled + - "Skip CI" # Labels to ignore # @@ -50,13 +52,16 @@ labels: ["Skip CI"] # Restylers to run, and how restylers: - name: black - image: restyled/restyler-black:v19.10b0 + image: restyled/restyler-black:v24.4.2 include: - redash - tests - migrations/versions - name: prettier - image: restyled/restyler-prettier:v1.19.1-2 + image: restyled/restyler-prettier:v3.3.2-2 + command: + - prettier + - --write include: - client/app/**/*.js - client/app/**/*.jsx diff --git a/.yarn/.gitignore b/.yarn/.gitignore new file mode 100644 index 0000000000..d6b7ef32c8 --- /dev/null +++ b/.yarn/.gitignore @@ -0,0 +1,2 @@ +* +!.gitignore diff --git a/tests/extensions/__init__.py b/.yarnrc similarity index 100% rename from tests/extensions/__init__.py rename to .yarnrc diff --git a/CHANGELOG.md b/CHANGELOG.md index 16d33956ba..9c53e7b7f4 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,152 @@ # Change Log +## V10.1.0 - 2021-11-23 + +This release includes patches for three security vulnerabilities: + +- Insecure default configuration affects installations where REDASH_COOKIE_SECRET is not set explicitly (CVE-2021-41192) +- SSRF vulnerability affects installations that enabled URL-loading data sources (CVE-2021-43780) +- Incorrect usage of state parameter in OAuth client code affects installations where Google Login is enabled (CVE-2021-43777) + +And a couple features that didn't merge in time for 10.0.0 + +- Big Query: Speed up schema loading (#5632) +- Add support for Firebolt data source (#5606) +- Fix: Loading schema for Sqlite DB with "Order" column name fails (#5623) + +## v10.0.0 - 2021-10-01 + +A few changes were merged during the V10 beta period. + +- New Data Source: CSV/Excel Files +- Fix: Edit Source button disappeared for users without CanEdit permissions +- We pinned our docker base image to Python3.7-slim-buster to avoid build issues +- Fix: dashboard list pagination didn't work + +## v10.0.0-beta - 2021-06-16 + +Just over a year since our last release, the V10 beta is ready. Since we never made a non-beta release of V9, we expect many users will upgrade directly from V8 -> V10. This will bring a lot of exciting features. Please check out the V9 beta release notes below to learn more. + +This V10 beta incorporates fixes for the feedback we received on the V9 beta along with a few long-requested features (horizontal bar charts!) and other changes to improve UX and reliability. + +This release was made possible by contributions from 35+ people (the Github API didn't let us pull handles this time around): Alex Kovar, Alexander Rusanov, Arik Fraimovich, Ben Amor, Christopher Grant, Đáș·ng Minh DĆ©ng, Daniel Lang, deecay, Elad Ossadon, Gabriel Dutra, iwakiriK, Jannis Leidel, Jerry, Jesse Whitehouse, Jiajie Zhong, Jim Sparkman, Jonathan Hult, Josh Bohde, Justin Talbot, koooge, Lei Ni, Levko Kravets, Lingkai Kong, max-voronov, Mike Nason, Nolan Nichols, Omer Lachish, Patrick Yang, peterlee, Rafael Wendel, Sebastian Tramp, simonschneider-db, Tim Gates, Tobias Macey, Vipul Mathur, and Vladislav Denisov + +Our special thanks to [Sohail Ahmed](https://pk.linkedin.com/in/sohail-ahmed-755776184) for reporting a vulnerability in our "forgot password" page (#5425) + +### Upgrading + +(This section is duplicated from the previous release - since many users will upgrade directly from V8 -> V10) + +Typically, if you are running your own instance of Redash and wish to upgrade, you would simply modify the Docker tag in your `docker-compose.yml` file. Since RQ has replaced Celery in this version, there are a couple extra modifications that need to be done in your `docker-compose.yml`: + +1. Under `services/scheduler/environment`, omit `QUEUES` and `WORKERS_COUNT` (and omit `environment` altogether if it is empty). +2. Under `services`, add a new service for general RQ jobs: + +```yaml +worker: + <<: *redash-service + command: worker + environment: + QUEUES: "periodic emails default" + WORKERS_COUNT: 1 +``` + +Following that, force a recreation of your containers with `docker-compose up --force-recreate --build` and you should be good to go. +### UX +- Redash now uses a vertical navbar +- Dashboard list now includes “My Dashboards” filter +- Dashboard parameters can now be re-ordered +- Queries can now be executed with Shift + Enter on all platforms. +- Added New Dashboard/Query/Alert buttons to corresponding list pages +- Dashboard text widgets now prompt to confirm before closing the text editor +- A plus sign is now shown between tags used for search +- On the queries list view “My Queries” has moved above “Archived” +- Improved behavior for filtering by tags in list views +- When a user’s session expires for inactivity, they are prompted to log-in with a pop-up so they don’t lose their place in the app +- Numerous accessibility changes towards the a11y standard +- Hide the “Create” menu button if current user doesn’t have permission to any data sources + +### Visualizations +- Feature: Added support for horizontal box plots +- Feature: Added support for horizontal bar charts +- Feature: Added “Reverse” option for Chart visualization legend +- Feature: Added option to align Chart Y-axes at zero +- Feature: The table visualization header is now fixed when scrolling +- Feature: Added USA map to choropleth visualization +- Fix: Selected filters were reset when switching visualizations +- Fix: Stacked bar chart showed the wrong Y-axis range in some cases +- Fix: Bar chart with second y axis overlapped data series +- Fix: Y-axis autoscale failed when min or max was set +- Fix: Custom JS visualization was broken because of a typo +- Fix: Too large visualization caused filters block to collapse +- Fix: Sankey visualization looked inconsistent if the data source returned VARCHAR instead of numeric types + +### Structural Updates +- Redash now prevents CSRF attacks +- Migration to TypeScript +- Upgrade to Antd version 4 +### Data Sources +- New Data Sources: SPARQL Endpoint, Eccenca Corporate Memory, TrinoDB +- Databricks + - Custom Schema Browser that allows switching between databases + - Option added to truncate large results + - Support for multiple-statement queries + - Schema browser can now use eventlet instead of RQ +- MongoDB: + - Moved Username and Password out of the connection string so that password can be stored secretly +- Oracle: + - Fix: Annotated queries always failed. Annotation is now disabled +- Postgres/CockroachDB: + - SSL certfile/keyfile fields are now handled as secret +- Python: + - Feature: Custom built-ins are now supported + - Fix: Query runner was not compatible with Python 3 +- Snowflake: + - Data source now accepts a custom host address (for use with proxies) +- TreasureData: + - API key field is now handled as secret +- Yandex: + - OAuth token field is now handled as secret + +### Alerts +- Feature: Added ability to mute alerts without deleting them +- Change: Non-email alert destination details are now obfuscated to avoid leaking sensitive information (webhook URLs, tokens etc.) +- Fix: numerical comparisons failed if value from query was a string + +### Parameters +- Added “Last 12 months” option for dynamic date ranges + +### Bug Fixes +- Fix: Private addresses were not allowed even when enforcing was disabled +- Fix: Python query runner wasn’t updated for Python 3 +- Fix: Sorting queries by schedule returned the wrong order +- Fix: Counter visualization was enormous in some cases +- Fix: Dashboard URL will now change when the dashboard title changes +- Fix: URL parameters were removed when forking a query +- Fix: Create link on data sources page was broken +- Fix: Queries could be reassigned to read-only data sources +- Fix: Multi-select dropdown was very slow if there were 1k+ options +- Fix: Search Input couldn’t be focused or updated while editing a dashboard +- Fix: The CLI command for “status” did not work +- Fix: The dashboard list screen displayed too few items under certain pagination configurations + +### Other +- Added an environment variable to disable public sharing links for queries and dashboards +- Alert destinations are now encrypted at the database +- The base query runner now has stubs to implement result truncating for other data sources +- Static SAML configuration and assertion encryption are now supported +- Adds new component for adding extra actions to the query and dashboard pages +- Non-admins with at least view_only permission on a dashboard can now make GET requests to the data source resource +- Added a BLOCKED_DOMAINS setting to prevent sign-ups from emails at specific domains +- Added a rate limit to the “forgot password” page +- RQ workers will now shutdown gracefully for known error codes +- Scheduled execution failure counter now resets following a successful ad hoc execution +- Redash now deletes locks for cancelled queries +- Upgraded Ace Editor from v6 to v9 +- Added a periodic job to remove ghost locks +- Removed content width limit on all pages +- Introduce a React component + ## v9.0.0-beta - 2020-06-11 This release was long time in the making and has several major changes: diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index e9c28e6bc6..e090a0f8fa 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -4,19 +4,7 @@ Thank you for taking the time to contribute! :tada::+1: The following is a set of guidelines for contributing to Redash. These are guidelines, not rules, please use your best judgement and feel free to propose changes to this document in a pull request. -## Quick Links: - -- [Feature Requests](https://discuss.redash.io/c/feature-requests) -- [Documentation](https://redash.io/help/) -- [Blog](https://blog.redash.io/) -- [Twitter](https://twitter.com/getredash) - ---- -:star: If you already here and love the project, please make sure to press the Star button. :star: - ---- - - +:star: If you're already here and love the project, please make sure to press the Star button. :star: ## Table of Contents [How can I contribute?](#how-can-i-contribute) @@ -32,6 +20,13 @@ The following is a set of guidelines for contributing to Redash. These are guide - [Release Method](#release-method) - [Code of Conduct](#code-of-conduct) +## Quick Links: + +- [User Forum](https://github.com/getredash/redash/discussions) +- [Documentation](https://redash.io/help/) + + +--- ## How can I contribute? ### Reporting Bugs @@ -39,25 +34,54 @@ The following is a set of guidelines for contributing to Redash. These are guide When creating a new bug report, please make sure to: - Search for existing issues first. If you find a previous report of your issue, please update the existing issue with additional information instead of creating a new one. -- If you are not sure if your issue is really a bug or just some configuration/setup problem, please start a discussion in [the support forum](https://discuss.redash.io/c/support) first. Unless you can provide clear steps to reproduce, it's probably better to start with a thread in the forum and later to open an issue. +- If you are not sure if your issue is really a bug or just some configuration/setup problem, please start a [Q&A discussion](https://github.com/getredash/redash/discussions/new?category=q-a) first. Unless you can provide clear steps to reproduce, it's probably better to start with a discussion and later to open an issue. - If you still decide to open an issue, please review the template and guidelines and include as much details as possible. ### Suggesting Enhancements / Feature Requests If you would like to suggest an enhancement or ask for a new feature: -- Please check [the forum](https://discuss.redash.io/c/feature-requests/5) for existing threads about what you want to suggest/ask. If there is, feel free to upvote it to signal interest or add your comments. +- Please check [the Ideas discussions](https://github.com/getredash/redash/discussions/categories/ideas) for existing threads about what you want to suggest/ask. If there is, feel free to upvote it to signal interest or add your comments. - If there is no open thread, you're welcome to start one to have a discussion about what you want to suggest. Try to provide as much details and context as possible and include information about *the problem you want to solve* rather only *your proposed solution*. ### Pull Requests -- **Code contributions are welcomed**. For big changes or significant features, it's usually better to reach out first and discuss what you want to implement and how (we recommend reading: [Pull Request First](https://medium.com/practical-blend/pull-request-first-f6bb667a9b6#.ozlqxvj36)). This to make sure that what you want to implement is aligned with our goals for the project and that no one else is already working on it. -- Include screenshots and animated GIFs in your pull request whenever possible. +**Code contributions are welcomed**. For big changes or significant features, it's usually better to reach out first and discuss what you want to implement and how (we recommend reading: [Pull Request First](https://medium.com/practical-blend/pull-request-first-f6bb667a9b6#.ozlqxvj36)). This is to make sure that what you want to implement is aligned with our goals for the project and that no one else is already working on it. + +#### Criteria for Review / Merging + +When you open your pull request, please follow this repository’s PR template carefully: + +- Indicate the type of change + - If you implement multiple unrelated features, bug fixes, or refactors please split them into individual pull requests. +- Describe the change +- If fixing a bug, please describe the bug or link to an existing github issue / forum discussion +- Include UI screenshots / GIFs whenever possible - Please add [documentation](#documentation) for new features or changes in functionality along with the code. - Please follow existing code style: - Python: we use [Black](https://github.com/psf/black) to auto format the code. - Javascript: we use [Prettier](https://github.com/prettier/prettier) to auto-format the code. - + +#### Initial Review (1 week) + +During this phase, a team member will apply the “Team Review” label if a pull request meets our criteria or a “Needs More Information” label if not. If more information is required, the team member will comment which criteria have not been met. + +If your pull request receives the “Needs More Information” label, please make the requested changes and then remove the label. This resets the 1 week timer for an initial review. + +Stale pull requests that remain untouched in “Needs More Information” for more than 4 weeks will be closed. + +If a team member closes your pull request, you may reopen it after you have made the changes requested during initial review. After you make these changes, remove the “Needs More Information” label. This again resets the timer for another initial review. + +#### Full Review (2 weeks) + +After the “Team Review” label is applied, a member of the core team will review the PR within 2 weeks. + +Reviews will approve, request changes, or ask questions to discuss areas of uncertainty. After you’ve responded, a member of the team will re-review within one week. + +#### Merging (1 week) + +After your pull request has been approved, a member of the core team will merge the pull request within a week. + ### Documentation The project's documentation can be found at [https://redash.io/help/](https://redash.io/help/). The [documentation sources](https://github.com/getredash/website/tree/master/src/pages/kb) are hosted on GitHub. To contribute edits / new pages, you can use GitHub's interface. Click the "Edit on GitHub" link on the documentation page to quickly open the edit interface. diff --git a/Dockerfile b/Dockerfile index 488305b7be..850638edd8 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,30 +1,52 @@ -FROM node:12 as frontend-builder +FROM node:18-bookworm AS frontend-builder + +RUN npm install --global --force yarn@1.22.22 # Controls whether to build the frontend assets ARG skip_frontend_build +ENV CYPRESS_INSTALL_BINARY=0 +ENV PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=1 + +RUN useradd -m -d /frontend redash +USER redash + WORKDIR /frontend -COPY package.json package-lock.json /frontend/ -COPY viz-lib /frontend/viz-lib -RUN if [ "x$skip_frontend_build" = "x" ] ; then npm ci --unsafe-perm; fi +COPY --chown=redash package.json yarn.lock .yarnrc /frontend/ +COPY --chown=redash viz-lib /frontend/viz-lib +COPY --chown=redash scripts /frontend/scripts -COPY client /frontend/client -COPY webpack.config.js /frontend/ -RUN if [ "x$skip_frontend_build" = "x" ] ; then npm run build; else mkdir -p /frontend/client/dist && touch /frontend/client/dist/multi_org.html && touch /frontend/client/dist/index.html; fi -FROM python:3.7-slim-buster +# Controls whether to instrument code for coverage information +ARG code_coverage +ENV BABEL_ENV=${code_coverage:+test} -EXPOSE 5000 +# Avoid issues caused by lags in disk and network I/O speeds when working on top of QEMU emulation for multi-platform image building. +RUN yarn config set network-timeout 300000 + +RUN if [ "x$skip_frontend_build" = "x" ] ; then yarn --frozen-lockfile --network-concurrency 1; fi -# Controls whether to install extra dependencies needed for all data sources. -ARG skip_ds_deps -# Controls whether to install dev dependencies. -ARG skip_dev_deps +COPY --chown=redash client /frontend/client +COPY --chown=redash webpack.config.js /frontend/ +RUN < /etc/apt/sources.list.d/mssql-release.list && \ - apt-get update && \ - ACCEPT_EULA=Y apt-get install -y msodbcsql17 && \ apt-get clean && \ rm -rf /var/lib/apt/lists/* -ARG databricks_odbc_driver_url=https://databricks.com/wp-content/uploads/2.6.10.1010-2/SimbaSparkODBC-2.6.10.1010-2-Debian-64bit.zip -ADD $databricks_odbc_driver_url /tmp/simba_odbc.zip -RUN unzip /tmp/simba_odbc.zip -d /tmp/ \ - && dpkg -i /tmp/SimbaSparkODBC-*/*.deb \ - && echo "[Simba]\nDriver = /opt/simba/spark/lib/64/libsparkodbc_sb64.so" >> /etc/odbcinst.ini \ - && rm /tmp/simba_odbc.zip \ - && rm -rf /tmp/SimbaSparkODBC* + +ARG TARGETPLATFORM +ARG databricks_odbc_driver_url=https://databricks-bi-artifacts.s3.us-east-2.amazonaws.com/simbaspark-drivers/odbc/2.6.26/SimbaSparkODBC-2.6.26.1045-Debian-64bit.zip +RUN < /etc/apt/sources.list.d/mssql-release.list + apt-get update + ACCEPT_EULA=Y apt-get install -y --no-install-recommends msodbcsql18 + apt-get clean + rm -rf /var/lib/apt/lists/* + curl "$databricks_odbc_driver_url" --location --output /tmp/simba_odbc.zip + chmod 600 /tmp/simba_odbc.zip + unzip /tmp/simba_odbc.zip -d /tmp/simba + dpkg -i /tmp/simba/*.deb + printf "[Simba]\nDriver = /opt/simba/spark/lib/64/libsparkodbc_sb64.so" >> /etc/odbcinst.ini + rm /tmp/simba_odbc.zip + rm -rf /tmp/simba + fi +EOF WORKDIR /app -# Disalbe PIP Cache and Version Check -ENV PIP_DISABLE_PIP_VERSION_CHECK=1 -ENV PIP_NO_CACHE_DIR=1 +ENV POETRY_VERSION=1.8.3 +ENV POETRY_HOME=/etc/poetry +ENV POETRY_VIRTUALENVS_CREATE=false +RUN curl -sSL https://install.python-poetry.org | python3 - + +# Avoid crashes, including corrupted cache artifacts, when building multi-platform images with GitHub Actions. +RUN /etc/poetry/bin/poetry cache clear pypi --all + +COPY pyproject.toml poetry.lock ./ -# We first copy only the requirements file, to avoid rebuilding on every file -# change. -COPY requirements.txt requirements_bundles.txt requirements_dev.txt requirements_all_ds.txt ./ -RUN if [ "x$skip_dev_deps" = "x" ] ; then pip install -r requirements.txt -r requirements_dev.txt; else pip install -r requirements.txt; fi -RUN if [ "x$skip_ds_deps" = "x" ] ; then pip install -r requirements_all_ds.txt ; else echo "Skipping pip install -r requirements_all_ds.txt" ; fi +ARG POETRY_OPTIONS="--no-root --no-interaction --no-ansi" +# for LDAP authentication, install with `ldap3` group +# disabled by default due to GPL license conflict +ARG install_groups="main,all_ds,dev" +RUN /etc/poetry/bin/poetry install --only $install_groups $POETRY_OPTIONS -COPY . /app -COPY --from=frontend-builder /frontend/client/dist /app/client/dist -RUN chown -R redash /app +COPY --chown=redash . /app +COPY --from=frontend-builder --chown=redash /frontend/client/dist /app/client/dist +RUN chown redash /app USER redash ENTRYPOINT ["/app/bin/docker-entrypoint"] diff --git a/LICENSE.borders b/LICENSE.borders new file mode 100644 index 0000000000..f9e6eff226 --- /dev/null +++ b/LICENSE.borders @@ -0,0 +1,3 @@ +The Bahrain map data used in Redash was downloaded from +https://cartographyvectors.com/map/857-bahrain-detailed-boundary in PR #6192. +* Free for personal and commercial purpose with attribution. diff --git a/Makefile b/Makefile index 7bc3849190..c5abda5656 100644 --- a/Makefile +++ b/Makefile @@ -1,57 +1,80 @@ -.PHONY: compose_build up test_db create_database clean down bundle tests lint backend-unit-tests frontend-unit-tests test build watch start redis-cli bash +.PHONY: compose_build up test_db create_database clean clean-all down tests lint backend-unit-tests frontend-unit-tests test build watch start redis-cli bash -compose_build: - docker-compose build +compose_build: .env + COMPOSE_DOCKER_CLI_BUILD=1 DOCKER_BUILDKIT=1 docker compose build up: - docker-compose up -d --build + docker compose up -d redis postgres --remove-orphans + docker compose exec -u postgres postgres psql postgres --csv \ + -1tqc "SELECT table_name FROM information_schema.tables WHERE table_name = 'organizations'" 2> /dev/null \ + | grep -q "organizations" || make create_database + COMPOSE_DOCKER_CLI_BUILD=1 DOCKER_BUILDKIT=1 docker compose up -d --build --remove-orphans test_db: @for i in `seq 1 5`; do \ - if (docker-compose exec postgres sh -c 'psql -U postgres -c "select 1;"' 2>&1 > /dev/null) then break; \ + if (docker compose exec postgres sh -c 'psql -U postgres -c "select 1;"' 2>&1 > /dev/null) then break; \ else echo "postgres initializing..."; sleep 5; fi \ done - docker-compose exec postgres sh -c 'psql -U postgres -c "drop database if exists tests;" && psql -U postgres -c "create database tests;"' + docker compose exec postgres sh -c 'psql -U postgres -c "drop database if exists tests;" && psql -U postgres -c "create database tests;"' -create_database: - docker-compose run server create_db +create_database: .env + docker compose run server create_db clean: - docker-compose down && docker-compose rm + docker compose down + docker compose --project-name cypress down + docker compose rm --stop --force + docker compose --project-name cypress rm --stop --force + docker image rm --force \ + cypress-server:latest cypress-worker:latest cypress-scheduler:latest \ + redash-server:latest redash-worker:latest redash-scheduler:latest + docker container prune --force + docker image prune --force + docker volume prune --force + +clean-all: clean + docker image rm --force \ + redash/redash:latest redis:7-alpine maildev/maildev:latest \ + pgautoupgrade/pgautoupgrade:15-alpine3.8 pgautoupgrade/pgautoupgrade:latest down: - docker-compose down + docker compose down -bundle: - docker-compose run server bin/bundle-extensions +.env: + printf "REDASH_COOKIE_SECRET=`pwgen -1s 32`\nREDASH_SECRET_KEY=`pwgen -1s 32`\n" >> .env + +env: .env + +format: + pre-commit run --all-files tests: - docker-compose run server tests + docker compose run server tests lint: - ./bin/flake8_tests.sh + ruff check . + black --check . --diff backend-unit-tests: up test_db - docker-compose run --rm --name tests server tests + docker compose run --rm --name tests server tests -frontend-unit-tests: bundle - npm ci - npm run bundle - npm test +frontend-unit-tests: + CYPRESS_INSTALL_BINARY=0 PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=1 yarn --frozen-lockfile + yarn test -test: lint backend-unit-tests frontend-unit-tests +test: backend-unit-tests frontend-unit-tests lint -build: bundle - npm run build +build: + yarn build -watch: bundle - npm run watch +watch: + yarn watch -start: bundle - npm run start +start: + yarn start redis-cli: - docker-compose run --rm redis redis-cli -h redis + docker compose run --rm redis redis-cli -h redis bash: - docker-compose run --rm server bash + docker compose run --rm server bash diff --git a/README.md b/README.md index b7b487c2b3..15c09ee0f7 100644 --- a/README.md +++ b/README.md @@ -3,7 +3,7 @@

[![Documentation](https://img.shields.io/badge/docs-redash.io/help-brightgreen.svg)](https://redash.io/help/) -[![Build Status](https://circleci.com/gh/getredash/redash.png?style=shield&circle-token=8a695aa5ec2cbfa89b48c275aea298318016f040)](https://circleci.com/gh/getredash/redash/tree/master) +[![GitHub Build](https://github.com/getredash/redash/actions/workflows/ci.yml/badge.svg)](https://github.com/getredash/redash/actions) Redash is designed to enable anyone, regardless of the level of technical sophistication, to harness the power of data big and small. SQL users leverage Redash to explore, query, visualize, and share data from any data sources. Their work in turn enables anybody in their organization to use the data. Every day, millions of users at thousands of organizations around the world use Redash to develop insights and make data-driven decisions. @@ -31,48 +31,71 @@ Redash features: Redash supports more than 35 SQL and NoSQL [data sources](https://redash.io/help/data-sources/supported-data-sources). It can also be extended to support more. Below is a list of built-in sources: - Amazon Athena +- Amazon CloudWatch / Insights - Amazon DynamoDB - Amazon Redshift +- ArangoDB - Axibase Time Series Database -- Cassandra +- Apache Cassandra - ClickHouse - CockroachDB +- Couchbase - CSV -- Databricks (Apache Spark) +- Databricks - DB2 by IBM -- Druid +- Dgraph +- Apache Drill +- Apache Druid +- e6data +- Eccenca Corporate Memory - Elasticsearch +- Exasol +- Microsoft Excel +- Firebolt +- Databend - Google Analytics - Google BigQuery - Google Spreadsheets - Graphite - Greenplum -- Hive -- Impala +- Apache Hive +- Apache Impala - InfluxDB -- JIRA +- InfluxDBv2 +- IBM Netezza Performance Server +- JIRA (JQL) - JSON - Apache Kylin - OmniSciDB (Formerly MapD) +- MariaDB - MemSQL - Microsoft Azure Data Warehouse / Synapse - Microsoft Azure SQL Database +- Microsoft Azure Data Explorer / Kusto - Microsoft SQL Server - MongoDB - MySQL - Oracle +- Apache Phoenix +- Apache Pinot - PostgreSQL - Presto - Prometheus - Python - Qubole - Rockset +- RisingWave - Salesforce - ScyllaDB - Shell Scripts - Snowflake +- SPARQL - SQLite +- TiDB +- Tinybird - TreasureData +- Trino +- Uptycs - Vertica - Yandex AppMetrrica - Yandex Metrica @@ -80,12 +103,13 @@ Redash supports more than 35 SQL and NoSQL [data sources](https://redash.io/help ## Getting Help * Issues: https://github.com/getredash/redash/issues -* Discussion Forum: https://discuss.redash.io/ +* Discussion Forum: https://github.com/getredash/redash/discussions/ +* Development Discussion: https://discord.gg/tN5MdmfGBp ## Reporting Bugs and Contributing Code * Want to report a bug or request a feature? Please open [an issue](https://github.com/getredash/redash/issues/new). -* Want to help us build **_Redash_**? Fork the project, edit in a [dev environment](https://redash.io/help-onpremise/dev/guide.html) and make a pull request. We need all the help we can get! +* Want to help us build **_Redash_**? Fork the project, edit in a [dev environment](https://github.com/getredash/redash/wiki/Local-development-setup) and make a pull request. We need all the help we can get! ## Security diff --git a/bin/bundle-extensions b/bin/bundle-extensions deleted file mode 100755 index ce0e300854..0000000000 --- a/bin/bundle-extensions +++ /dev/null @@ -1,115 +0,0 @@ -#!/usr/bin/env python3 -"""Copy bundle extension files to the client/app/extension directory""" -import logging -import os -from pathlib import Path -from shutil import copy -from collections import OrderedDict as odict - -import importlib_metadata -import importlib_resources - -# Name of the subdirectory -BUNDLE_DIRECTORY = "bundle" - -logger = logging.getLogger(__name__) - - -# Make a directory for extensions and set it as an environment variable -# to be picked up by webpack. -extensions_relative_path = Path("client", "app", "extensions") -extensions_directory = Path(__file__).parent.parent / extensions_relative_path - -if not extensions_directory.exists(): - extensions_directory.mkdir() -os.environ["EXTENSIONS_DIRECTORY"] = str(extensions_relative_path) - - -def entry_point_module(entry_point): - """Returns the dotted module path for the given entry point""" - return entry_point.pattern.match(entry_point.value).group("module") - - -def load_bundles(): - """"Load bundles as defined in Redash extensions. - - The bundle entry point can be defined as a dotted path to a module - or a callable, but it won't be called but just used as a means - to find the files under its file system path. - - The name of the directory it looks for files in is "bundle". - - So a Python package with an extension bundle could look like this:: - - my_extensions/ - ├── __init__.py - └── wide_footer - ├── __init__.py - └── bundle - ├── extension.js - └── styles.css - - and would then need to register the bundle with an entry point - under the "redash.bundles" group, e.g. in your setup.py:: - - setup( - # ... - entry_points={ - "redash.bundles": [ - "wide_footer = my_extensions.wide_footer", - ] - # ... - }, - # ... - ) - - """ - bundles = odict() - for entry_point in importlib_metadata.entry_points().get("redash.bundles", []): - logger.info('Loading Redash bundle "%s".', entry_point.name) - module = entry_point_module(entry_point) - # Try to get a list of bundle files - try: - bundle_dir = importlib_resources.files(module).joinpath(BUNDLE_DIRECTORY) - except (ImportError, TypeError): - # Module isn't a package, so can't have a subdirectory/-package - logger.error( - 'Redash bundle module "%s" could not be imported: "%s"', - entry_point.name, - module, - ) - continue - if not bundle_dir.is_dir(): - logger.error( - 'Redash bundle directory "%s" could not be found or is not a directory: "%s"', - entry_point.name, - bundle_dir, - ) - continue - bundles[entry_point.name] = list(bundle_dir.rglob("*")) - return bundles - - -bundles = load_bundles().items() -if bundles: - print("Number of extension bundles found: {}".format(len(bundles))) -else: - print("No extension bundles found.") - -for bundle_name, paths in bundles: - # Shortcut in case not paths were found for the bundle - if not paths: - print('No paths found for bundle "{}".'.format(bundle_name)) - continue - - # The destination for the bundle files with the entry point name as the subdirectory - destination = Path(extensions_directory, bundle_name) - if not destination.exists(): - destination.mkdir() - - # Copy the bundle directory from the module to its destination. - print('Copying "{}" bundle to {}:'.format(bundle_name, destination.resolve())) - for src_path in paths: - dest_path = destination / src_path.name - print(" - {} -> {}".format(src_path, dest_path)) - copy(str(src_path), str(dest_path)) diff --git a/bin/docker-entrypoint b/bin/docker-entrypoint index 052e8cba37..2f570aba3b 100755 --- a/bin/docker-entrypoint +++ b/bin/docker-entrypoint @@ -2,42 +2,51 @@ set -e scheduler() { - /app/manage.py db upgrade echo "Starting RQ scheduler..." exec /app/manage.py rq scheduler } dev_scheduler() { - /app/manage.py db upgrade echo "Starting dev RQ scheduler..." exec watchmedo auto-restart --directory=./redash/ --pattern=*.py --recursive -- ./manage.py rq scheduler } worker() { - /app/manage.py db upgrade echo "Starting RQ worker..." export WORKERS_COUNT=${WORKERS_COUNT:-2} export QUEUES=${QUEUES:-} - supervisord -c worker.conf + exec supervisord -c worker.conf +} + +workers_healthcheck() { + WORKERS_COUNT=${WORKERS_COUNT} + echo "Checking active workers count against $WORKERS_COUNT..." + ACTIVE_WORKERS_COUNT=`echo $(rq info --url $REDASH_REDIS_URL -R | grep workers | grep -oP ^[0-9]+)` + if [ "$ACTIVE_WORKERS_COUNT" -lt "$WORKERS_COUNT" ]; then + echo "$ACTIVE_WORKERS_COUNT workers are active, Exiting" + exit 1 + else + echo "$ACTIVE_WORKERS_COUNT workers are active" + exit 0 + fi } dev_worker() { - /app/manage.py db upgrade echo "Starting dev RQ worker..." exec watchmedo auto-restart --directory=./redash/ --pattern=*.py --recursive -- ./manage.py rq worker $QUEUES } server() { - /app/manage.py db upgrade # Recycle gunicorn workers every n-th request. See http://docs.gunicorn.org/en/stable/settings.html#max-requests for more details. MAX_REQUESTS=${MAX_REQUESTS:-1000} MAX_REQUESTS_JITTER=${MAX_REQUESTS_JITTER:-100} - exec /usr/local/bin/gunicorn -b 0.0.0.0:5000 --name redash -w${REDASH_WEB_WORKERS:-4} redash.wsgi:app --max-requests $MAX_REQUESTS --max-requests-jitter $MAX_REQUESTS_JITTER + TIMEOUT=${REDASH_GUNICORN_TIMEOUT:-60} + exec /usr/local/bin/gunicorn -b 0.0.0.0:5000 --name redash -w${REDASH_WEB_WORKERS:-4} redash.wsgi:app --max-requests $MAX_REQUESTS --max-requests-jitter $MAX_REQUESTS_JITTER --timeout $TIMEOUT } create_db() { @@ -58,7 +67,7 @@ help() { echo "" echo "shell -- open shell" echo "dev_server -- start Flask development server with debugger and auto reload" - echo "debug -- start Flask development server with remote debugger via ptvsd" + echo "debug -- start Flask development server with remote debugger via debugpy" echo "create_db -- create database tables" echo "manage -- CLI to manage redash" echo "tests -- run tests" @@ -80,6 +89,10 @@ case "$1" in shift worker ;; + workers_healthcheck) + shift + workers_healthcheck + ;; server) shift server @@ -131,4 +144,3 @@ case "$1" in exec "$@" ;; esac - diff --git a/bin/dockerflow-version b/bin/dockerflow-version deleted file mode 100755 index 027d61971f..0000000000 --- a/bin/dockerflow-version +++ /dev/null @@ -1,13 +0,0 @@ -#!/bin/bash - -set -eo pipefail - -VERSION="$1" - -printf '{"commit":"%s","version":"%s","source":"https://github.com/%s/%s","build":"%s"}\n' \ - "$CIRCLE_SHA1" \ - "$VERSION" \ - "$CIRCLE_PROJECT_USERNAME" \ - "$CIRCLE_PROJECT_REPONAME" \ - "$CIRCLE_BUILD_URL" \ -> version.json diff --git a/bin/flake8_tests.sh b/bin/flake8_tests.sh deleted file mode 100755 index 3c27f7fee2..0000000000 --- a/bin/flake8_tests.sh +++ /dev/null @@ -1,9 +0,0 @@ -#!/bin/sh - -set -o errexit # fail the build if any task fails - -flake8 --version ; pip --version -# stop the build if there are Python syntax errors or undefined names -flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics -# exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide -flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics diff --git a/bin/get_changes.py b/bin/get_changes.py index 60091bb772..aad1223837 100644 --- a/bin/get_changes.py +++ b/bin/get_changes.py @@ -1,35 +1,44 @@ #!/bin/env python3 -import sys import re import subprocess +import sys def get_change_log(previous_sha): - args = ['git', '--no-pager', 'log', '--merges', '--grep', 'Merge pull request', '--pretty=format:"%h|%s|%b|%p"', 'master...{}'.format(previous_sha)] + args = [ + "git", + "--no-pager", + "log", + "--merges", + "--grep", + "Merge pull request", + '--pretty=format:"%h|%s|%b|%p"', + "master...{}".format(previous_sha), + ] log = subprocess.check_output(args) changes = [] - for line in log.split('\n'): + for line in log.split("\n"): try: - sha, subject, body, parents = line[1:-1].split('|') + sha, subject, body, parents = line[1:-1].split("|") except ValueError: continue try: - pull_request = re.match("Merge pull request #(\d+)", subject).groups()[0] + pull_request = re.match(r"Merge pull request #(\d+)", subject).groups()[0] pull_request = " #{}".format(pull_request) - except Exception as ex: + except Exception: pull_request = "" - author = subprocess.check_output(['git', 'log', '-1', '--pretty=format:"%an"', parents.split(' ')[-1]])[1:-1] + author = subprocess.check_output(["git", "log", "-1", '--pretty=format:"%an"', parents.split(" ")[-1]])[1:-1] changes.append("{}{}: {} ({})".format(sha, pull_request, body.strip(), author)) return changes -if __name__ == '__main__': +if __name__ == "__main__": previous_sha = sys.argv[1] changes = get_change_log(previous_sha) diff --git a/bin/migrations-graph b/bin/migrations-graph deleted file mode 100755 index 5998d4233d..0000000000 --- a/bin/migrations-graph +++ /dev/null @@ -1,83 +0,0 @@ -#!/usr/bin/env python -""" -A quick helper script to print the Alembic migration history -via Graphiz and show it via GraphvizOnline on -https://dreampuf.github.io/GraphvizOnline/. - -This requires the Graphviz Python library: - - $ pip install --user graphviz - -Then run it with the path to the Alembic config file: - - $ migrations-graph --config migrations/alembic.ini - -""" -import os -import sys -import urllib.parse -import urllib.request - -import click -from alembic import util -from alembic.script import ScriptDirectory -from alembic.config import Config -from alembic.util import CommandError -from graphviz import Digraph - -# Make sure redash can be imported in the migration files -sys.path.insert(0, os.path.dirname(os.path.dirname(__file__))) - - -def get_revisions(config, rev_range=None): - script = ScriptDirectory.from_config(config) - - if rev_range is not None: - if ":" not in rev_range: - raise util.CommandError( - "History range requires [start]:[end], [start]:, or :[end]" - ) - base, head = rev_range.strip().split(":") - else: - base = head = None - - return script.walk_revisions(base=base or "base", head=head or "heads") - - -def generate_revision_graph(revisions): - dot = Digraph() - for revision in revisions: - dot.node(revision.revision) - if revision.down_revision is None: - dot.edge("base", revision.revision) - continue - if isinstance(revision.down_revision, str): - dot.edge(revision.down_revision, revision.revision) - continue - for down_revision in revision.down_revision: - dot.edge(down_revision, revision.revision) - return dot - - -@click.command() -@click.option("--config", default="alembic.ini", help="path to alembic config file") -@click.option("--name", default="alembic", help="name of the alembic ini section") -def cli(config, name): - """ - Generates a simple Graphviz dot file and creates a link to - view it online via https://dreampuf.github.io/GraphvizOnline/. - """ - alembic_config = Config(file_=config, ini_section=name) - try: - revisions = get_revisions(alembic_config) - except CommandError as e: - sys.exit(e) - - dot = generate_revision_graph(revisions) - encoded_dot = urllib.parse.quote(bytes(dot.source, "utf-8")) - viz_url = "https://dreampuf.github.io/GraphvizOnline/#%s" % encoded_dot - print("Generated graph for migration history in %s: %s " % (config, viz_url)) - - -if __name__ == "__main__": - cli() diff --git a/bin/release_manager.py b/bin/release_manager.py index 3d9b21c895..bd2200d523 100644 --- a/bin/release_manager.py +++ b/bin/release_manager.py @@ -1,17 +1,20 @@ #!/usr/bin/env python3 import os -import sys import re import subprocess +import sys +from urllib.parse import urlparse + import requests import simplejson -github_token = os.environ['GITHUB_TOKEN'] -auth = (github_token, 'x-oauth-basic') -repo = 'getredash/redash' +github_token = os.environ["GITHUB_TOKEN"] +auth = (github_token, "x-oauth-basic") +repo = "getredash/redash" + def _github_request(method, path, params=None, headers={}): - if not path.startswith('https://api.github.com'): + if urlparse(path).hostname != "api.github.com": url = "https://api.github.com/{}".format(path) else: url = path @@ -22,15 +25,18 @@ def _github_request(method, path, params=None, headers={}): response = requests.request(method, url, data=params, auth=auth) return response + def exception_from_error(message, response): - return Exception("({}) {}: {}".format(response.status_code, message, response.json().get('message', '?'))) + return Exception("({}) {}: {}".format(response.status_code, message, response.json().get("message", "?"))) + def rc_tag_name(version): return "v{}-rc".format(version) + def get_rc_release(version): tag = rc_tag_name(version) - response = _github_request('get', 'repos/{}/releases/tags/{}'.format(repo, tag)) + response = _github_request("get", "repos/{}/releases/tags/{}".format(repo, tag)) if response.status_code == 404: return None @@ -39,84 +45,101 @@ def get_rc_release(version): raise exception_from_error("Unknown error while looking RC release: ", response) + def create_release(version, commit_sha): tag = rc_tag_name(version) params = { - 'tag_name': tag, - 'name': "{} - RC".format(version), - 'target_commitish': commit_sha, - 'prerelease': True + "tag_name": tag, + "name": "{} - RC".format(version), + "target_commitish": commit_sha, + "prerelease": True, } - response = _github_request('post', 'repos/{}/releases'.format(repo), params) + response = _github_request("post", "repos/{}/releases".format(repo), params) if response.status_code != 201: raise exception_from_error("Failed creating new release", response) return response.json() + def upload_asset(release, filepath): - upload_url = release['upload_url'].replace('{?name,label}', '') - filename = filepath.split('/')[-1] + upload_url = release["upload_url"].replace("{?name,label}", "") + filename = filepath.split("/")[-1] with open(filepath) as file_content: - headers = {'Content-Type': 'application/gzip'} - response = requests.post(upload_url, file_content, params={'name': filename}, headers=headers, auth=auth, verify=False) + headers = {"Content-Type": "application/gzip"} + response = requests.post( + upload_url, file_content, params={"name": filename}, headers=headers, auth=auth, verify=False + ) if response.status_code != 201: # not 200/201/... - raise exception_from_error('Failed uploading asset', response) + raise exception_from_error("Failed uploading asset", response) return response + def remove_previous_builds(release): - for asset in release['assets']: - response = _github_request('delete', asset['url']) + for asset in release["assets"]: + response = _github_request("delete", asset["url"]) if response.status_code != 204: raise exception_from_error("Failed deleting asset", response) + def get_changelog(commit_sha): - latest_release = _github_request('get', 'repos/{}/releases/latest'.format(repo)) + latest_release = _github_request("get", "repos/{}/releases/latest".format(repo)) if latest_release.status_code != 200: - raise exception_from_error('Failed getting latest release', latest_release) + raise exception_from_error("Failed getting latest release", latest_release) latest_release = latest_release.json() - previous_sha = latest_release['target_commitish'] - - args = ['git', '--no-pager', 'log', '--merges', '--grep', 'Merge pull request', '--pretty=format:"%h|%s|%b|%p"', '{}...{}'.format(previous_sha, commit_sha)] + previous_sha = latest_release["target_commitish"] + + args = [ + "git", + "--no-pager", + "log", + "--merges", + "--grep", + "Merge pull request", + '--pretty=format:"%h|%s|%b|%p"', + "{}...{}".format(previous_sha, commit_sha), + ] log = subprocess.check_output(args) - changes = ["Changes since {}:".format(latest_release['name'])] + changes = ["Changes since {}:".format(latest_release["name"])] - for line in log.split('\n'): + for line in log.split("\n"): try: - sha, subject, body, parents = line[1:-1].split('|') + sha, subject, body, parents = line[1:-1].split("|") except ValueError: continue try: - pull_request = re.match("Merge pull request #(\d+)", subject).groups()[0] + pull_request = re.match(r"Merge pull request #(\d+)", subject).groups()[0] pull_request = " #{}".format(pull_request) - except Exception as ex: + except Exception: pull_request = "" - author = subprocess.check_output(['git', 'log', '-1', '--pretty=format:"%an"', parents.split(' ')[-1]])[1:-1] + author = subprocess.check_output(["git", "log", "-1", '--pretty=format:"%an"', parents.split(" ")[-1]])[1:-1] changes.append("{}{}: {} ({})".format(sha, pull_request, body.strip(), author)) return "\n".join(changes) + def update_release_commit_sha(release, commit_sha): params = { - 'target_commitish': commit_sha, + "target_commitish": commit_sha, } - response = _github_request('patch', 'repos/{}/releases/{}'.format(repo, release['id']), params) + response = _github_request("patch", "repos/{}/releases/{}".format(repo, release["id"]), params) if response.status_code != 200: raise exception_from_error("Failed updating commit sha for existing release", response) return response.json() + def update_release(version, build_filepath, commit_sha): try: release = get_rc_release(version) @@ -125,21 +148,22 @@ def update_release(version, build_filepath, commit_sha): else: release = create_release(version, commit_sha) - print("Using release id: {}".format(release['id'])) + print("Using release id: {}".format(release["id"])) remove_previous_builds(release) response = upload_asset(release, build_filepath) changelog = get_changelog(commit_sha) - response = _github_request('patch', release['url'], {'body': changelog}) + response = _github_request("patch", release["url"], {"body": changelog}) if response.status_code != 200: raise exception_from_error("Failed updating release description", response) except Exception as ex: print(ex) -if __name__ == '__main__': + +if __name__ == "__main__": commit_sha = sys.argv[1] version = sys.argv[2] filepath = sys.argv[3] diff --git a/bin/upgrade b/bin/upgrade deleted file mode 100755 index 376866f1ed..0000000000 --- a/bin/upgrade +++ /dev/null @@ -1,242 +0,0 @@ -#!/usr/bin/env python3 -import urllib -import argparse -import os -import subprocess -import sys -from collections import namedtuple -from fnmatch import fnmatch - -import requests - -try: - import semver -except ImportError: - print("Missing required library: semver.") - exit(1) - -REDASH_HOME = os.environ.get('REDASH_HOME', '/opt/redash') -CURRENT_VERSION_PATH = '{}/current'.format(REDASH_HOME) - - -def run(cmd, cwd=None): - if not cwd: - cwd = REDASH_HOME - - return subprocess.check_output(cmd, cwd=cwd, shell=True, stderr=subprocess.STDOUT) - - -def confirm(question): - reply = str(input(question + ' (y/n): ')).lower().strip() - - if reply[0] == 'y': - return True - if reply[0] == 'n': - return False - else: - return confirm("Please use 'y' or 'n'") - - -def version_path(version_name): - return "{}/{}".format(REDASH_HOME, version_name) - -END_CODE = '\033[0m' - - -def colored_string(text, color): - if sys.stdout.isatty(): - return "{}{}{}".format(color, text, END_CODE) - else: - return text - - -def h1(text): - print(colored_string(text, '\033[4m\033[1m')) - - -def green(text): - print(colored_string(text, '\033[92m')) - - -def red(text): - print(colored_string(text, '\033[91m')) - - -class Release(namedtuple('Release', ('version', 'download_url', 'filename', 'description'))): - def v1_or_newer(self): - return semver.compare(self.version, '1.0.0-alpha') >= 0 - - def is_newer(self, version): - return semver.compare(self.version, version) > 0 - - @property - def version_name(self): - return self.filename.replace('.tar.gz', '') - - -def get_latest_release_from_ci(): - response = requests.get('https://circleci.com/api/v1.1/project/github/getredash/redash/latest/artifacts?branch=master') - - if response.status_code != 200: - exit("Failed getting releases (status code: %s)." % response.status_code) - - tarball_asset = filter(lambda asset: asset['url'].endswith('.tar.gz'), response.json())[0] - filename = urllib.unquote(tarball_asset['pretty_path'].split('/')[-1]) - version = filename.replace('redash.', '').replace('.tar.gz', '') - - release = Release(version, tarball_asset['url'], filename, '') - - return release - - -def get_release(channel): - if channel == 'ci': - return get_latest_release_from_ci() - - response = requests.get('https://version.redash.io/api/releases?channel={}'.format(channel)) - release = response.json()[0] - - filename = release['download_url'].split('/')[-1] - release = Release(release['version'], release['download_url'], filename, release['description']) - - return release - - -def link_to_current(version_name): - green("Linking to current version...") - run('ln -nfs {} {}'.format(version_path(version_name), CURRENT_VERSION_PATH)) - - -def restart_services(): - # We're doing this instead of simple 'supervisorctl restart all' because - # otherwise it won't notice that /opt/redash/current pointing at a different - # directory. - green("Restarting...") - try: - run('sudo /etc/init.d/redash_supervisord restart') - except subprocess.CalledProcessError as e: - run('sudo service supervisor restart') - - -def update_requirements(version_name): - green("Installing new Python packages (if needed)...") - new_requirements_file = '{}/requirements.txt'.format(version_path(version_name)) - - install_requirements = False - - try: - run('diff {}/requirements.txt {}'.format(CURRENT_VERSION_PATH, new_requirements_file)) != 0 - except subprocess.CalledProcessError as e: - if e.returncode != 0: - install_requirements = True - - if install_requirements: - run('sudo pip install -r {}'.format(new_requirements_file)) - - -def apply_migrations(release): - green("Running migrations (if needed)...") - if not release.v1_or_newer(): - return apply_migrations_pre_v1(release.version_name) - - run("sudo -u redash bin/run ./manage.py db upgrade", cwd=version_path(release.version_name)) - - -def find_migrations(version_name): - current_migrations = set([f for f in os.listdir("{}/migrations".format(CURRENT_VERSION_PATH)) if fnmatch(f, '*_*.py')]) - new_migrations = sorted([f for f in os.listdir("{}/migrations".format(version_path(version_name))) if fnmatch(f, '*_*.py')]) - - return [m for m in new_migrations if m not in current_migrations] - - -def apply_migrations_pre_v1(version_name): - new_migrations = find_migrations(version_name) - - if new_migrations: - green("New migrations to run: ") - print(', '.join(new_migrations)) - else: - print("No new migrations in this version.") - - if new_migrations and confirm("Apply new migrations? (make sure you have backup)"): - for migration in new_migrations: - print("Applying {}...".format(migration)) - run("sudo sudo -u redash PYTHONPATH=. bin/run python migrations/{}".format(migration), cwd=version_path(version_name)) - - -def download_and_unpack(release): - directory_name = release.version_name - - green("Downloading release tarball...") - run('sudo wget --header="Accept: application/octet-stream" -O {} {}'.format(release.filename, release.download_url)) - green("Unpacking to: {}...".format(directory_name)) - run('sudo mkdir -p {}'.format(directory_name)) - run('sudo tar -C {} -xvf {}'.format(directory_name, release.filename)) - - green("Changing ownership to redash...") - run('sudo chown redash {}'.format(directory_name)) - - green("Linking .env file...") - run('sudo ln -nfs {}/.env {}/.env'.format(REDASH_HOME, version_path(directory_name))) - - -def current_version(): - real_current_path = os.path.realpath(CURRENT_VERSION_PATH).replace('.b', '+b') - return real_current_path.replace(REDASH_HOME + '/', '').replace('redash.', '') - - -def verify_minimum_version(): - green("Current version: " + current_version()) - if semver.compare(current_version(), '0.12.0') < 0: - red("You need to have Redash v0.12.0 or newer to upgrade to post v1.0.0 releases.") - green("To upgrade to v0.12.0, run the upgrade script set to the legacy channel (--channel legacy).") - exit(1) - - -def show_description_and_confirm(description): - if description: - print(description) - - if not confirm("Continue with upgrade?"): - red("Cancelling upgrade.") - exit(1) - - -def verify_newer_version(release): - if not release.is_newer(current_version()): - red("The found release is not newer than your current deployed release ({}).".format(current_version())) - if not confirm("Continue with upgrade?"): - red("Cancelling upgrade.") - exit(1) - - -def deploy_release(channel): - h1("Starting Redash upgrade:") - - release = get_release(channel) - green("Found version: {}".format(release.version)) - - if release.v1_or_newer(): - verify_minimum_version() - - verify_newer_version(release) - show_description_and_confirm(release.description) - - try: - download_and_unpack(release) - update_requirements(release.version_name) - apply_migrations(release) - link_to_current(release.version_name) - restart_services() - green("Done! Enjoy.") - except subprocess.CalledProcessError as e: - red("Failed running: {}".format(e.cmd)) - red("Exit status: {}\nOutput:\n{}".format(e.returncode, e.output)) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser() - parser.add_argument("--channel", help="The channel to get release from (default: stable).", default='stable') - args = parser.parse_args() - - deploy_release(args.channel) diff --git a/client/.babelrc b/client/.babelrc index 0fe25a043c..e8b6be2c9b 100644 --- a/client/.babelrc +++ b/client/.babelrc @@ -1,19 +1,29 @@ { "presets": [ - ["@babel/preset-env", { - "exclude": [ - "@babel/plugin-transform-async-to-generator", - "@babel/plugin-transform-arrow-functions" - ], - "useBuiltIns": "usage" - }], - "@babel/preset-react" + [ + "@babel/preset-env", + { + "exclude": ["@babel/plugin-transform-async-to-generator", "@babel/plugin-transform-arrow-functions"], + "corejs": "2", + "useBuiltIns": "usage" + } + ], + "@babel/preset-react", + "@babel/preset-typescript" ], "plugins": [ "@babel/plugin-proposal-class-properties", "@babel/plugin-transform-object-assign", - ["babel-plugin-transform-builtin-extend", { - "globals": ["Error"] - }] - ] + [ + "babel-plugin-transform-builtin-extend", + { + "globals": ["Error"] + } + ] + ], + "env": { + "test": { + "plugins": ["istanbul"] + } + } } diff --git a/client/.eslintrc.js b/client/.eslintrc.js index 152bf9ca3d..d1bb2599a1 100644 --- a/client/.eslintrc.js +++ b/client/.eslintrc.js @@ -1,17 +1,71 @@ module.exports = { root: true, - extends: ["react-app", "plugin:compat/recommended", "prettier"], - plugins: ["jest", "compat", "no-only-tests"], + parser: "@typescript-eslint/parser", + extends: [ + "react-app", + "plugin:compat/recommended", + "prettier", + "plugin:jsx-a11y/recommended", + // Remove any typescript-eslint rules that would conflict with prettier + "prettier/@typescript-eslint", + ], + plugins: ["jest", "compat", "no-only-tests", "@typescript-eslint", "jsx-a11y"], settings: { - "import/resolver": "webpack" + "import/resolver": "webpack", }, env: { browser: true, - node: true + node: true, }, rules: { // allow debugger during development "no-debugger": process.env.NODE_ENV === "production" ? 2 : 0, - "jsx-a11y/anchor-is-valid": "off", - } + "jsx-a11y/anchor-is-valid": [ + // TMP + "off", + { + components: ["Link"], + aspects: ["noHref", "invalidHref", "preferButton"], + }, + ], + "jsx-a11y/no-redundant-roles": "error", + "jsx-a11y/no-autofocus": "off", + "jsx-a11y/click-events-have-key-events": "off", // TMP + "jsx-a11y/no-static-element-interactions": "off", // TMP + "jsx-a11y/no-noninteractive-element-interactions": "off", // TMP + "no-console": ["warn", { allow: ["warn", "error"] }], + "no-restricted-imports": [ + "error", + { + paths: [ + { + name: "antd", + message: "Please use 'import XXX from antd/lib/XXX' import instead.", + }, + { + name: "antd/lib", + message: "Please use 'import XXX from antd/lib/XXX' import instead.", + }, + ], + }, + ], + }, + overrides: [ + { + // Only run typescript-eslint on TS files + files: ["*.ts", "*.tsx", ".*.ts", ".*.tsx"], + extends: ["plugin:@typescript-eslint/recommended"], + rules: { + // Do not require functions (especially react components) to have explicit returns + "@typescript-eslint/explicit-function-return-type": "off", + // Do not require to type every import from a JS file to speed up development + "@typescript-eslint/no-explicit-any": "off", + // Do not complain about useless contructors in declaration files + "no-useless-constructor": "off", + "@typescript-eslint/no-useless-constructor": "error", + // Many API fields and generated types use camelcase + "@typescript-eslint/camelcase": "off", + }, + }, + ], }; diff --git a/client/app/assets/images/db-logos/arangodb.png b/client/app/assets/images/db-logos/arangodb.png new file mode 100644 index 0000000000..1b2defd2d6 Binary files /dev/null and b/client/app/assets/images/db-logos/arangodb.png differ diff --git a/client/app/assets/images/db-logos/corporate_memory.png b/client/app/assets/images/db-logos/corporate_memory.png new file mode 100644 index 0000000000..f168b02ecd Binary files /dev/null and b/client/app/assets/images/db-logos/corporate_memory.png differ diff --git a/client/app/assets/images/db-logos/databend.png b/client/app/assets/images/db-logos/databend.png new file mode 100644 index 0000000000..dec146f4cb Binary files /dev/null and b/client/app/assets/images/db-logos/databend.png differ diff --git a/client/app/assets/images/db-logos/databricks.png b/client/app/assets/images/db-logos/databricks.png index 7624f2adcf..e28eb9212f 100644 Binary files a/client/app/assets/images/db-logos/databricks.png and b/client/app/assets/images/db-logos/databricks.png differ diff --git a/client/app/assets/images/db-logos/dynamodb_sql.png b/client/app/assets/images/db-logos/dynamodb_sql.png deleted file mode 100644 index 22d9e6430b..0000000000 Binary files a/client/app/assets/images/db-logos/dynamodb_sql.png and /dev/null differ diff --git a/client/app/assets/images/db-logos/e6data.png b/client/app/assets/images/db-logos/e6data.png new file mode 100644 index 0000000000..af5cf71c4b Binary files /dev/null and b/client/app/assets/images/db-logos/e6data.png differ diff --git a/client/app/assets/images/db-logos/elasticsearch2.png b/client/app/assets/images/db-logos/elasticsearch2.png new file mode 100644 index 0000000000..e7cb9c8345 Binary files /dev/null and b/client/app/assets/images/db-logos/elasticsearch2.png differ diff --git a/client/app/assets/images/db-logos/elasticsearch2_OpenDistroSQLElasticSearch.png b/client/app/assets/images/db-logos/elasticsearch2_OpenDistroSQLElasticSearch.png new file mode 100644 index 0000000000..e7cb9c8345 Binary files /dev/null and b/client/app/assets/images/db-logos/elasticsearch2_OpenDistroSQLElasticSearch.png differ diff --git a/client/app/assets/images/db-logos/elasticsearch2_XPackSQLElasticSearch.png b/client/app/assets/images/db-logos/elasticsearch2_XPackSQLElasticSearch.png new file mode 100644 index 0000000000..e7cb9c8345 Binary files /dev/null and b/client/app/assets/images/db-logos/elasticsearch2_XPackSQLElasticSearch.png differ diff --git a/client/app/assets/images/db-logos/excel.png b/client/app/assets/images/db-logos/excel.png new file mode 100644 index 0000000000..0015044885 Binary files /dev/null and b/client/app/assets/images/db-logos/excel.png differ diff --git a/client/app/assets/images/db-logos/firebolt.png b/client/app/assets/images/db-logos/firebolt.png new file mode 100644 index 0000000000..7b6c02a66d Binary files /dev/null and b/client/app/assets/images/db-logos/firebolt.png differ diff --git a/client/app/assets/images/db-logos/google_analytics4.png b/client/app/assets/images/db-logos/google_analytics4.png new file mode 100644 index 0000000000..eaddd9d569 Binary files /dev/null and b/client/app/assets/images/db-logos/google_analytics4.png differ diff --git a/client/app/assets/images/db-logos/google_search_console.png b/client/app/assets/images/db-logos/google_search_console.png new file mode 100644 index 0000000000..a302ca590f Binary files /dev/null and b/client/app/assets/images/db-logos/google_search_console.png differ diff --git a/client/app/assets/images/db-logos/ignite.png b/client/app/assets/images/db-logos/ignite.png new file mode 100644 index 0000000000..046e381012 Binary files /dev/null and b/client/app/assets/images/db-logos/ignite.png differ diff --git a/client/app/assets/images/db-logos/influxdbv2.png b/client/app/assets/images/db-logos/influxdbv2.png new file mode 100644 index 0000000000..f3846cb199 Binary files /dev/null and b/client/app/assets/images/db-logos/influxdbv2.png differ diff --git a/client/app/assets/images/db-logos/nz.png b/client/app/assets/images/db-logos/nz.png new file mode 100644 index 0000000000..663687470c Binary files /dev/null and b/client/app/assets/images/db-logos/nz.png differ diff --git a/client/app/assets/images/db-logos/pinot.png b/client/app/assets/images/db-logos/pinot.png new file mode 100644 index 0000000000..7527e7b157 Binary files /dev/null and b/client/app/assets/images/db-logos/pinot.png differ diff --git a/client/app/assets/images/db-logos/qubole.png b/client/app/assets/images/db-logos/qubole.png deleted file mode 100644 index dfdc2fa2e2..0000000000 Binary files a/client/app/assets/images/db-logos/qubole.png and /dev/null differ diff --git a/client/app/assets/images/db-logos/risingwave.png b/client/app/assets/images/db-logos/risingwave.png new file mode 100644 index 0000000000..ae4a13f129 Binary files /dev/null and b/client/app/assets/images/db-logos/risingwave.png differ diff --git a/client/app/assets/images/db-logos/sparql_endpoint.png b/client/app/assets/images/db-logos/sparql_endpoint.png new file mode 100644 index 0000000000..31ac155d44 Binary files /dev/null and b/client/app/assets/images/db-logos/sparql_endpoint.png differ diff --git a/client/app/assets/images/db-logos/tinybird.png b/client/app/assets/images/db-logos/tinybird.png new file mode 100644 index 0000000000..129555e39f Binary files /dev/null and b/client/app/assets/images/db-logos/tinybird.png differ diff --git a/client/app/assets/images/db-logos/trino.png b/client/app/assets/images/db-logos/trino.png new file mode 100644 index 0000000000..904db40bb5 Binary files /dev/null and b/client/app/assets/images/db-logos/trino.png differ diff --git a/client/app/assets/images/db-logos/yandex_disk.png b/client/app/assets/images/db-logos/yandex_disk.png new file mode 100644 index 0000000000..7b375648df Binary files /dev/null and b/client/app/assets/images/db-logos/yandex_disk.png differ diff --git a/client/app/assets/images/destinations/asana.png b/client/app/assets/images/destinations/asana.png new file mode 100644 index 0000000000..42ea1ab9cc Binary files /dev/null and b/client/app/assets/images/destinations/asana.png differ diff --git a/client/app/assets/images/destinations/datadog.png b/client/app/assets/images/destinations/datadog.png new file mode 100644 index 0000000000..0c1cd4e583 Binary files /dev/null and b/client/app/assets/images/destinations/datadog.png differ diff --git a/client/app/assets/images/destinations/discord.png b/client/app/assets/images/destinations/discord.png new file mode 100644 index 0000000000..0781b84ce1 Binary files /dev/null and b/client/app/assets/images/destinations/discord.png differ diff --git a/client/app/assets/images/destinations/hipchat.png b/client/app/assets/images/destinations/hipchat.png deleted file mode 100644 index 88ac512102..0000000000 Binary files a/client/app/assets/images/destinations/hipchat.png and /dev/null differ diff --git a/client/app/assets/images/destinations/microsoft_teams_webhook.png b/client/app/assets/images/destinations/microsoft_teams_webhook.png new file mode 100644 index 0000000000..8ada5c8c69 Binary files /dev/null and b/client/app/assets/images/destinations/microsoft_teams_webhook.png differ diff --git a/client/app/assets/images/destinations/webex.png b/client/app/assets/images/destinations/webex.png new file mode 100644 index 0000000000..bea8fd1cad Binary files /dev/null and b/client/app/assets/images/destinations/webex.png differ diff --git a/client/app/assets/less/ant.less b/client/app/assets/less/ant.less index e95084039b..925840b2fe 100644 --- a/client/app/assets/less/ant.less +++ b/client/app/assets/less/ant.less @@ -16,7 +16,6 @@ @import "~antd/lib/pagination/style/index"; @import "~antd/lib/table/style/index"; @import "~antd/lib/popover/style/index"; -@import "~antd/lib/icon/style/index"; @import "~antd/lib/tag/style/index"; @import "~antd/lib/grid/style/index"; @import "~antd/lib/switch/style/index"; @@ -31,6 +30,7 @@ @import "~antd/lib/badge/style/index"; @import "~antd/lib/card/style/index"; @import "~antd/lib/spin/style/index"; +@import "~antd/lib/skeleton/style/index"; @import "~antd/lib/tabs/style/index"; @import "~antd/lib/notification/style/index"; @import "~antd/lib/collapse/style/index"; @@ -225,6 +225,16 @@ } } + &-tbody > tr&-row { + &:hover, + &:focus, + &:focus-within { + & > td { + background: @table-row-hover-bg; + } + } + } + // Custom styles &-headerless &-tbody > tr:first-child > td { @@ -391,6 +401,18 @@ left: 0; } } + + &:focus, + &:focus-within { + color: @menu-highlight-color; + } + } +} + +.@{dropdown-prefix-cls}-menu-item { + &:focus, + &:focus-within { + background-color: @item-hover-bg; } } @@ -401,3 +423,14 @@ .@{checkbox-prefix-cls} + span { padding-right: 0; } + +// make sure Multiple select has room for icons +.@{select-prefix-cls}-multiple { + &.@{select-prefix-cls}-show-arrow, + &.@{select-prefix-cls}-show-search, + &.@{select-prefix-cls}-loading { + .@{select-prefix-cls}-selector { + padding-right: 30px; + } + } +} diff --git a/client/app/assets/less/inc/alert.less b/client/app/assets/less/inc/alert.less index 3e73d9b54d..fc1f1bbb61 100755 --- a/client/app/assets/less/inc/alert.less +++ b/client/app/assets/less/inc/alert.less @@ -23,6 +23,10 @@ padding: 5px 8px; } + .ant-form-item-explain { + margin-top: 10px; + } + .alert-last-triggered { color: @headings-color; } diff --git a/client/app/assets/less/inc/base.less b/client/app/assets/less/inc/base.less index ea8f6bf6a7..14a37f3470 100755 --- a/client/app/assets/less/inc/base.less +++ b/client/app/assets/less/inc/base.less @@ -78,8 +78,6 @@ strong { } } -// Fixed width layout for specific pages - .settings-screen, .home-page, .page-dashboard-list, @@ -89,7 +87,7 @@ strong { .admin-page-layout { .container { width: 100%; - max-width: 1200px; + max-width: none; } } @@ -100,6 +98,10 @@ strong { .clickable { cursor: pointer; + + button&:disabled { + cursor: not-allowed; + } } .resize-vertical { @@ -116,10 +118,6 @@ strong { transition: height 0s, width 0s !important; } -.admin-schema-editor { - padding: 50px 0; -} - .bg-ace { background-color: fade(@redash-gray, 12%) !important; } diff --git a/client/app/assets/less/inc/edit-in-place.less b/client/app/assets/less/inc/edit-in-place.less index 1afd359a48..cf7ef5bd8a 100755 --- a/client/app/assets/less/inc/edit-in-place.less +++ b/client/app/assets/less/inc/edit-in-place.less @@ -1,26 +1,23 @@ -.edit-in-place span { +.edit-in-place { white-space: pre-line; + display: inline-block; p { margin-bottom: 0; } -} -.edit-in-place span.editable { - display: inline-block; - cursor: pointer; -} - -.edit-in-place span.editable:hover { - background: @redash-yellow; - border-radius: @redash-radius; -} + .editable { + display: inline-block; + cursor: pointer; -.edit-in-place.active input, -.edit-in-place.active textarea { - display: inline-block; -} + &:hover { + background: @redash-yellow; + border-radius: @redash-radius; + } + } -.edit-in-place { - display: inline-block; + &.active input, + &.active textarea { + display: inline-block; + } } diff --git a/client/app/assets/less/inc/generics.less b/client/app/assets/less/inc/generics.less index d7f484da0a..4896b3b372 100755 --- a/client/app/assets/less/inc/generics.less +++ b/client/app/assets/less/inc/generics.less @@ -2,163 +2,218 @@ Generate Margin Classes (0px - 25px) margin, margin-top, margin-bottom, margin-left, margin-right -----------------------------------------------------------*/ -.margin (@label, @size: 1, @key:1) when (@size =< 30){ - .m-@{key} { - margin: @size !important; - } - - .m-t-@{key} { - margin-top: @size !important; - } - - .m-b-@{key} { - margin-bottom: @size !important; - } - - .m-l-@{key} { - margin-left: @size !important; - } - - .m-r-@{key} { - margin-right: @size !important; - } - - .margin(@label - 5; @size + 5; @key + 5); +.margin (@label, @size: 1, @key:1) when (@size =< 30) { + .m-@{key} { + margin: @size !important; + } + + .m-t-@{key} { + margin-top: @size !important; + } + + .m-b-@{key} { + margin-bottom: @size !important; + } + + .m-l-@{key} { + margin-left: @size !important; + } + + .m-r-@{key} { + margin-right: @size !important; + } + + .margin(@label - 5; @size + 5; @key + 5); } .margin(25, 0px, 0); -.m-2{ - margin:2px; +.m-2 { + margin: 2px; } /* -------------------------------------------------------- Generate Padding Classes (0px - 25px) padding, padding-top, padding-bottom, padding-left, padding-right -----------------------------------------------------------*/ -.padding (@label, @size: 1, @key:1) when (@size =< 30){ - .p-@{key} { - padding: @size !important; - } - - .p-t-@{key} { - padding-top: @size !important; - } - - .p-b-@{key} { - padding-bottom: @size !important; - } - - .p-l-@{key} { - padding-left: @size !important; - } - - .p-r-@{key} { - padding-right: @size !important; - } - - .padding(@label - 5; @size + 5; @key + 5); -} +.padding (@label, @size: 1, @key:1) when (@size =< 30) { + .p-@{key} { + padding: @size !important; + } -.padding(25, 0px, 0); + .p-t-@{key} { + padding-top: @size !important; + } + + .p-b-@{key} { + padding-bottom: @size !important; + } + .p-l-@{key} { + padding-left: @size !important; + } + + .p-r-@{key} { + padding-right: @size !important; + } + + .padding(@label - 5; @size + 5; @key + 5); +} + +.padding(25, 0px, 0); /* -------------------------------------------------------- Generate Font-Size Classes (8px - 20px) -----------------------------------------------------------*/ -.font-size (@label, @size: 8, @key:10) when (@size =< 20){ - .f-@{key} { - font-size: @size !important; - } - - .font-size(@label - 1; @size + 1; @key + 1); -} +.font-size (@label, @size: 8, @key:10) when (@size =< 20) { + .f-@{key} { + font-size: @size !important; + } -.font-size(20, 8px, 8); + .font-size(@label - 1; @size + 1; @key + 1); +} -.f-inherit { font-size: inherit !important; } +.font-size(20, 8px, 8); +.f-inherit { + font-size: inherit !important; +} /* -------------------------------------------------------- Font Weight -----------------------------------------------------------*/ -.f-300 { font-weight: 300 !important; } -.f-400 { font-weight: 400 !important; } -.f-500 { font-weight: 500 !important; } -.f-700 { font-weight: 700 !important; } - +.f-300 { + font-weight: 300 !important; +} +.f-400 { + font-weight: 400 !important; +} +.f-500 { + font-weight: 500 !important; +} +.f-700 { + font-weight: 700 !important; +} /* -------------------------------------------------------- Position -----------------------------------------------------------*/ -.p-relative { position: relative !important; } -.p-absolute { position: absolute !important; } -.p-fixed { position: fixed !important; } -.p-static { position: static !important; } - +.p-relative { + position: relative !important; +} +.p-absolute { + position: absolute !important; +} +.p-fixed { + position: fixed !important; +} +.p-static { + position: static !important; +} /* -------------------------------------------------------- Overflow -----------------------------------------------------------*/ -.o-hidden { overflow: hidden !important; } -.o-visible { overflow: visible !important; } -.o-auto { overflow: auto !important; } - +.o-hidden { + overflow: hidden !important; +} +.o-visible { + overflow: visible !important; +} +.o-auto { + overflow: auto !important; +} /* -------------------------------------------------------- Display -----------------------------------------------------------*/ -.di-block { display: inline-block !important; } -.d-block { display: block; } +.di-block { + display: inline-block !important; +} +.d-block { + display: block; +} /* -------------------------------------------------------- Background Colors and Colors -----------------------------------------------------------*/ -@array: c-white bg-white @white, c-ace bg-ace @ace, c-black bg-black @black, c-brown bg-brown @brown, c-pink bg-pink @pink, c-red bg-red @red, c-blue bg-blue @blue, c-purple bg-purple @purple, c-deeppurple bg-deeppurple @deeppurple, c-lightblue bg-lightblue @lightblue, c-cyan bg-cyan @cyan, c-teal bg-teal @teal, c-green bg-green @green, c-lightgreen bg-lightgreen @lightgreen, c-lime bg-lime @lime, c-yellow bg-yellow @yellow, c-amber bg-amber @amber, c-orange bg-orange @orange, c-deeporange bg-deeporange @deeporange, c-gray bg-gray @gray, c-bluegray bg-bluegray @bluegray, c-indigo bg-indigo @indigo; - -.for(@array); .-each(@value) { - @name: extract(@value, 1); - @name2: extract(@value, 2); - @color: extract(@value, 3); - &.@{name2} { - background-color: @color !important; - } - - &.@{name} { - color: @color !important; - } +@array: c-white bg-white @white, c-ace bg-ace @ace, c-black bg-black @black, c-brown bg-brown @brown, + c-pink bg-pink @pink, c-red bg-red @red, c-blue bg-blue @blue, c-purple bg-purple @purple, + c-deeppurple bg-deeppurple @deeppurple, c-lightblue bg-lightblue @lightblue, c-cyan bg-cyan @cyan, + c-teal bg-teal @teal, c-green bg-green @green, c-lightgreen bg-lightgreen @lightgreen, c-lime bg-lime @lime, + c-yellow bg-yellow @yellow, c-amber bg-amber @amber, c-orange bg-orange @orange, + c-deeporange bg-deeporange @deeporange, c-gray bg-gray @gray, c-bluegray bg-bluegray @bluegray, + c-indigo bg-indigo @indigo; + +.for(@array); +.-each(@value) { + @name: extract(@value, 1); + @name2: extract(@value, 2); + @color: extract(@value, 3); + &.@{name2} { + background-color: @color !important; + } + + &.@{name} { + color: @color !important; + } } - /* -------------------------------------------------------- Background Colors -----------------------------------------------------------*/ -.bg-brand { background-color: @brand-bg; } -.bg-black-trp { background-color: rgba(0,0,0,0.12) !important; } - - +.bg-brand { + background-color: @brand-bg; +} +.bg-black-trp { + background-color: rgba(0, 0, 0, 0.12) !important; +} /* -------------------------------------------------------- Borders -----------------------------------------------------------*/ -.b-0 { border: 0 !important; } - +.b-0 { + border: 0 !important; +} /* -------------------------------------------------------- Width -----------------------------------------------------------*/ -.w-100 { width: 100% !important; } -.w-50 { width: 50% !important; } -.w-25 { width: 25% !important; } - +.w-100 { + width: 100% !important; +} +.w-50 { + width: 50% !important; +} +.w-25 { + width: 25% !important; +} /* -------------------------------------------------------- Border Radius -----------------------------------------------------------*/ -.brd-2 { border-radius: 2px; } - +.brd-2 { + border-radius: 2px; +} /* -------------------------------------------------------- Alignment -----------------------------------------------------------*/ -.va-top { vertical-align: top; } \ No newline at end of file +.va-top { + vertical-align: top; +} + +/* -------------------------------------------------------- + Screen readers +-----------------------------------------------------------*/ +.sr-only { + position: absolute; + width: 1px; + height: 1px; + padding: 0; + margin: -1px; + overflow: hidden; + clip: rect(0, 0, 0, 0); + white-space: nowrap; + border-width: 0; +} diff --git a/client/app/assets/less/inc/popover.less b/client/app/assets/less/inc/popover.less index c687a089a2..5fcad7089b 100755 --- a/client/app/assets/less/inc/popover.less +++ b/client/app/assets/less/inc/popover.less @@ -1,7 +1,5 @@ .popover { box-shadow: fade(@redash-gray, 25%) 0px 0px 15px 0px; - color: #000000; - z-index: 1000000001; // So that it can popover a dropdown menu } .popover-title { @@ -21,4 +19,4 @@ p { margin-bottom: 0; } -} +} \ No newline at end of file diff --git a/client/app/assets/less/inc/schema-browser.less b/client/app/assets/less/inc/schema-browser.less index 5c9a71dcd4..f005239758 100644 --- a/client/app/assets/less/inc/schema-browser.less +++ b/client/app/assets/less/inc/schema-browser.less @@ -1,99 +1,107 @@ -div.table-name { - overflow: hidden; - text-overflow: ellipsis; - white-space: nowrap; - cursor: pointer; - padding: 2px 22px 2px 10px; - border-radius: @redash-radius; - position: relative; - height: 22px; - - .copy-to-editor, .info { - display: none; - } - - &:hover { - background: fade(@redash-gray, 10%); - - .copy-to-editor, .info { - display: flex; - } - } -} - .schema-container { height: 100%; z-index: 10; background-color: white; -} -.schema-browser { - overflow: hidden; - border: none; - padding-top: 10px; - position: relative; - height: 100%; - - .schema-loading-state { - display: flex; - align-items: center; - justify-content: center; + .schema-browser { + overflow: hidden; + border: none; + padding-top: 10px; + position: relative; height: 100%; - } - .collapse.in { - background: transparent; - } + .schema-loading-state { + display: flex; + align-items: center; + justify-content: center; + height: 100%; + } - .copy-to-editor, .info { - color: fade(@redash-gray, 90%); - cursor: pointer; - position: absolute; - top: 0; - right: 0; - bottom: 0; - width: 20px; - display: flex; - align-items: center; - justify-content: center; - } + .collapse.in { + background: transparent; + } - .info { - right: 20px - } + .copy-to-editor { + visibility: hidden; + color: fade(@redash-gray, 90%); + width: 20px; + display: flex; + align-items: center; + justify-content: center; + transition: none; + } - .table-open { - padding: 0 22px 0 26px; - overflow: hidden; - text-overflow: ellipsis; - white-space: nowrap; - position: relative; - height: 18px; + .schema-list-item { + display: flex; + border-radius: @redash-radius; + height: 22px; + + .table-name { + flex: 1; + overflow: hidden; + text-overflow: ellipsis; + white-space: nowrap; + cursor: pointer; + padding: 2px 22px 2px 10px; + } - .copy-to-editor, .info { - display: none; - } + &:hover, + &:focus, + &:focus-within { + background: fade(@redash-gray, 10%); - &:hover { - background: fade(@redash-gray, 10%); + .copy-to-editor { + visibility: visible; + } + } + } - .copy-to-editor, .info { + .table-open { + .table-open-item { display: flex; + height: 18px; + width: calc(100% - 22px); + padding-left: 22px; + overflow: hidden; + text-overflow: ellipsis; + white-space: nowrap; + transition: none; + + div:first-child { + flex: 1; + } + + .column-type { + color: fade(@text-color, 80%); + font-size: 10px; + margin-left: 2px; + text-transform: uppercase; + } + + &:hover, + &:focus, + &:focus-within { + background: fade(@redash-gray, 10%); + + .copy-to-editor { + visibility: visible; + } + } } } } -} -.schema-control { - display: flex; - flex-wrap: nowrap; - padding: 0; + .schema-control { + display: flex; + flex-wrap: nowrap; + padding: 0; - .ant-btn { - height: auto; + .ant-btn { + height: auto; + } } -} -.parameter-label { - display: block; + .parameter-label { + display: block; + } } diff --git a/client/app/assets/less/inc/table.less b/client/app/assets/less/inc/table.less index 7a43a6f9e6..b610686925 100755 --- a/client/app/assets/less/inc/table.less +++ b/client/app/assets/less/inc/table.less @@ -1,149 +1,158 @@ .table { - margin-bottom: 0; - - th.sortable-column { - cursor: pointer; - } - - &:not(.table-striped) > thead > tr > th { - background-color: #FAFAFA; - } - - [class*="bg-"] { - & > tr > th { - color: #fff; - border-bottom: 0; - background: transparent !important; - } - - & + tbody > tr:first-child > td { - border-top: 0; - } - } - - & > thead > tr > th { - vertical-align: middle; - font-weight: 500; - color: #333; - border-width: 1px; - text-transform: uppercase; - padding: 15px 10px; + margin-bottom: 0; + + th.sortable-column { + cursor: pointer; + } + + &:not(.table-striped) > thead > tr > th { + background-color: #fafafa; + } + + [class*="bg-"] { + & > tr > th { + color: #fff; + border-bottom: 0; + background: transparent !important; } - - & > thead > tr, - & > tbody > tr, - & > tfoot > tr { - - & > th, & > td { - - &:first-child { - padding-left: 30px; - } - - &:last-child { - padding-right: 30px; - } - - } + + & + tbody > tr:first-child > td { + border-top: 0; } - - tbody > tr:last-child > td { - padding-bottom: 20px; + } + + & > thead > tr > th { + vertical-align: middle; + font-weight: 500; + color: #333; + border-width: 1px; + text-transform: uppercase; + padding: 15px 10px; + } + + & > thead > tr, + & > tbody > tr, + & > tfoot > tr { + & > th, + & > td { + &:first-child { + padding-left: 30px; + } + + &:last-child { + padding-right: 30px; + } } + } + + tbody > tr:last-child > td { + padding-bottom: 20px; + } } .table-bordered { - border: 0; - - & > tbody > tr { - & > td, & > th { - border-bottom: 0; - border-left: 0; - - &:last-child { - border-right: 0; - } - } + border: 0; + + & > tbody > tr { + & > td, + & > th { + border-bottom: 0; + border-left: 0; + + &:last-child { + border-right: 0; + } } - - & > thead > tr > th { - border-left: 0; - - &:last-child { - border-right: 0; - } + } + + & > thead > tr > th { + border-left: 0; + + &:last-child { + border-right: 0; } + } } .table-vmiddle { - td { - vertical-align: middle !important; - } + td { + vertical-align: middle !important; + } } .table-responsive { - border: 0; + border: 0; } -.tile .table { - - & > thead:not([class*="bg-"]) > tr > th { - border-top: 1px solid @table-border-color; - - } +.tile .table { + & > thead:not([class*="bg-"]) > tr > th { + border-top: 1px solid @table-border-color; + } } .table-hover > tbody > tr:hover { - background-color: #f4f4f4; + background-color: #f4f4f4; } .table-data { - tbody > tr > td { - padding-top: 5px !important; - } - - .btn-favourite, .btn-archive { - font-size: 15px; - } + thead > tr > th { + white-space: nowrap; + } + + tbody > tr > td { + padding-top: 5px !important; + } + + .btn-favorite, + .btn-archive { + font-size: 15px; + } } .table-main-title { - font-weight: 500; - line-height: 1.7 !important; + font-weight: 500; + line-height: 1.7 !important; } -.btn-favourite { - color: #d4d4d4; - transition: all .25s ease-in-out; - - &:hover, &:focus { - color: @yellow-darker; - cursor: pointer; - } - +.btn-favorite { + color: #d4d4d4; + transition: all 0.25s ease-in-out; + + .fa-star { + color: @yellow-darker; + } + + &:hover, + &:focus { + color: @yellow-darker; + cursor: pointer; + .fa-star { - color: @yellow-darker; + filter: saturate(75%); + opacity: 0.75; } + } } .btn-archive { - color: #d4d4d4; - transition: all .25s ease-in-out; - - &:hover, &:focus { - color: @gray-light; - } - - .fa-archive { - color: @gray-light; - } + color: #d4d4d4; + transition: all 0.25s ease-in-out; + + &:hover, + &:focus { + color: @gray-light; + } + + .fa-archive { + color: @gray-light; + } } .table > thead > tr > th { - text-transform: none; + text-transform: none; } .table-data .label-tag { - display: inline-block; - max-width: 135px; - } \ No newline at end of file + display: inline-block; + max-width: 135px; +} diff --git a/client/app/assets/less/redash/query.less b/client/app/assets/less/redash/query.less index ab84515856..57bccbc8c4 100644 --- a/client/app/assets/less/redash/query.less +++ b/client/app/assets/less/redash/query.less @@ -90,6 +90,23 @@ body.fixed-layout { .embed__vis { display: flex; flex-flow: column; + height: calc(~'100vh - 25px'); + + > .embed-heading { + flex: 0 0 auto; + } + + > .query__vis { + flex: 1 1 auto; + + .chart-visualization-container, .visualization-renderer-wrapper, .visualization-renderer { + height: 100% + } + } + + > .tile__bottom-control { + flex: 0 0 auto; + } width: 100%; } @@ -127,11 +144,13 @@ body.fixed-layout { } } -a.label-tag { +.label-tag { background: fade(@redash-gray, 15%); color: darken(@redash-gray, 15%); - &:hover { + &:hover, + &:focus, + &:active { color: darken(@redash-gray, 15%); background: fade(@redash-gray, 25%); } @@ -141,6 +160,7 @@ a.label-tag { display: flex; flex-direction: column; flex-grow: 1; + position: relative; } .query-fullscreen { @@ -203,6 +223,7 @@ a.label-tag { } .editor__left__schema { + min-height: 120px; flex-grow: 1; display: flex; flex-direction: column; @@ -493,17 +514,3 @@ nav .rg-bottom { padding-right: 0; } } - -.ui-select-choices-row .info { - display: none; -} - -.ui-select-choices-row { - &:hover { - .info { - cursor: pointer; - width: 20px; - display: inline; - } - } -} diff --git a/client/app/components/ApplicationArea/ApplicationLayout/DesktopNavbar.jsx b/client/app/components/ApplicationArea/ApplicationLayout/DesktopNavbar.jsx index 2f068e1279..a1550f60dc 100644 --- a/client/app/components/ApplicationArea/ApplicationLayout/DesktopNavbar.jsx +++ b/client/app/components/ApplicationArea/ApplicationLayout/DesktopNavbar.jsx @@ -1,165 +1,198 @@ -import { first } from "lodash"; -import React, { useState } from "react"; -import Button from "antd/lib/button"; +import React, { useMemo } from "react"; +import { first, includes } from "lodash"; import Menu from "antd/lib/menu"; -import Icon from "antd/lib/icon"; +import Link from "@/components/Link"; +import PlainButton from "@/components/PlainButton"; import HelpTrigger from "@/components/HelpTrigger"; import CreateDashboardDialog from "@/components/dashboards/CreateDashboardDialog"; +import { useCurrentRoute } from "@/components/ApplicationArea/Router"; import { Auth, currentUser } from "@/services/auth"; import settingsMenu from "@/services/settingsMenu"; import logoUrl from "@/assets/images/redash_icon_small.png"; +import DesktopOutlinedIcon from "@ant-design/icons/DesktopOutlined"; +import CodeOutlinedIcon from "@ant-design/icons/CodeOutlined"; +import AlertOutlinedIcon from "@ant-design/icons/AlertOutlined"; +import PlusOutlinedIcon from "@ant-design/icons/PlusOutlined"; +import QuestionCircleOutlinedIcon from "@ant-design/icons/QuestionCircleOutlined"; +import SettingOutlinedIcon from "@ant-design/icons/SettingOutlined"; import VersionInfo from "./VersionInfo"; + import "./DesktopNavbar.less"; -function NavbarSection({ inlineCollapsed, children, ...props }) { +function NavbarSection({ children, ...props }) { return ( - + {children} ); } -export default function DesktopNavbar() { - const [collapsed, setCollapsed] = useState(true); +function useNavbarActiveState() { + const currentRoute = useCurrentRoute(); + + return useMemo( + () => ({ + dashboards: includes( + [ + "Dashboards.List", + "Dashboards.Favorites", + "Dashboards.My", + "Dashboards.ViewOrEdit", + "Dashboards.LegacyViewOrEdit", + ], + currentRoute.id + ), + queries: includes( + [ + "Queries.List", + "Queries.Favorites", + "Queries.Archived", + "Queries.My", + "Queries.View", + "Queries.New", + "Queries.Edit", + ], + currentRoute.id + ), + dataSources: includes(["DataSources.List"], currentRoute.id), + alerts: includes(["Alerts.List", "Alerts.New", "Alerts.View", "Alerts.Edit"], currentRoute.id), + }), + [currentRoute.id] + ); +} +export default function DesktopNavbar() { const firstSettingsTab = first(settingsMenu.getAvailableItems()); + const activeState = useNavbarActiveState(); + const canCreateQuery = currentUser.hasPermission("create_query"); const canCreateDashboard = currentUser.hasPermission("create_dashboard"); const canCreateAlert = currentUser.hasPermission("list_alerts"); return ( -
- - - Redash - +
+ ); } diff --git a/client/app/components/ApplicationArea/ApplicationLayout/DesktopNavbar.less b/client/app/components/ApplicationArea/ApplicationLayout/DesktopNavbar.less index 7cafb94008..4b8bedda7f 100644 --- a/client/app/components/ApplicationArea/ApplicationLayout/DesktopNavbar.less +++ b/client/app/components/ApplicationArea/ApplicationLayout/DesktopNavbar.less @@ -1,12 +1,17 @@ @backgroundColor: #001529; @dividerColor: rgba(255, 255, 255, 0.5); @textColor: rgba(255, 255, 255, 0.75); +@brandColor: #ff7964; // Redash logo color +@activeItemColor: @brandColor; +@iconSize: 26px; .desktop-navbar { background: @backgroundColor; display: flex; flex-direction: column; height: 100%; + width: 80px; + overflow: hidden; &-spacer { flex: 1 1 auto; @@ -21,12 +26,6 @@ height: 40px; transition: all 270ms; } - - &.ant-menu-inline-collapsed { - img { - height: 20px; - } - } } .help-trigger { @@ -34,33 +33,38 @@ } .ant-menu { - &:not(.ant-menu-inline-collapsed) { - width: 170px; - } - - &.ant-menu-inline-collapsed > .ant-menu-submenu-title span img + span, - &.ant-menu-inline-collapsed > .ant-menu-item i + span { - display: inline-block; - max-width: 0; - opacity: 0; - } - - .ant-menu-item-divider { - background: @dividerColor; - } - .ant-menu-item, .ant-menu-submenu { font-weight: 500; color: @textColor; + &.navbar-active-item { + box-shadow: inset 3px 0 0 @activeItemColor; + + .anticon { + color: @activeItemColor; + } + } + &.ant-menu-submenu-open, &.ant-menu-submenu-active, &:hover, - &:active { + &:active, + &:focus, + &:focus-within { color: #fff; } + .anticon { + font-size: @iconSize; + margin: 0; + } + + .desktop-navbar-label { + margin-top: 4px; + font-size: 11px; + } + a, span, .anticon { @@ -71,21 +75,33 @@ .ant-menu-submenu-arrow { display: none; } - } - .ant-btn.desktop-navbar-collapse-button { - background-color: @backgroundColor; - border: 0; - border-radius: 0; - color: @textColor; + .ant-menu-item, + .ant-menu-submenu { + padding: 0; + height: 60px; + display: flex; + align-items: center; + flex-direction: column; + justify-content: center; + } - &:hover, - &:active { - color: #fff; + .ant-menu-submenu-title { + width: 100%; + padding: 0; } - &:after { - animation: 0s !important; + a, + &.ant-menu-vertical > .ant-menu-submenu > .ant-menu-submenu-title, + .ant-menu-submenu-title { + display: flex; + flex-direction: column; + align-items: center; + justify-content: center; + line-height: normal; + height: auto; + background: none; + color: inherit; } } @@ -99,37 +115,8 @@ .profile__image_thumb { margin: 0; vertical-align: middle; - } - - .profile__image_thumb + span { - flex: 1 1 auto; - overflow: hidden; - white-space: nowrap; - text-overflow: ellipsis; - - margin-left: 10px; - vertical-align: middle; - display: inline-block; - - // styles from Antd - opacity: 1; - transition: opacity 0.3s cubic-bezier(0.645, 0.045, 0.355, 1), - margin-left 0.3s cubic-bezier(0.645, 0.045, 0.355, 1), width 0.3s cubic-bezier(0.645, 0.045, 0.355, 1); - } - } - - &.ant-menu-inline-collapsed { - .ant-menu-submenu-title { - padding-left: 16px !important; - padding-right: 16px !important; - } - - .desktop-navbar-profile-menu-title { - .profile__image_thumb + span { - opacity: 0; - max-width: 0; - margin-left: 0; - } + width: @iconSize; + height: @iconSize; } } } @@ -146,7 +133,9 @@ color: @textColor; &:hover, - &:active { + &:active, + &:focus, + &:focus-within { color: #fff; } @@ -171,7 +160,9 @@ color: rgba(255, 255, 255, 0.8); &:hover, - &:active { + &:active, + &:focus, + &:focus-within { color: rgba(255, 255, 255, 1); } } diff --git a/client/app/components/ApplicationArea/ApplicationLayout/MobileNavbar.jsx b/client/app/components/ApplicationArea/ApplicationLayout/MobileNavbar.jsx index be5e2f0be5..78eae21064 100644 --- a/client/app/components/ApplicationArea/ApplicationLayout/MobileNavbar.jsx +++ b/client/app/components/ApplicationArea/ApplicationLayout/MobileNavbar.jsx @@ -2,9 +2,10 @@ import { first } from "lodash"; import React from "react"; import PropTypes from "prop-types"; import Button from "antd/lib/button"; -import Icon from "antd/lib/icon"; +import MenuOutlinedIcon from "@ant-design/icons/MenuOutlined"; import Dropdown from "antd/lib/dropdown"; import Menu from "antd/lib/menu"; +import Link from "@/components/Link"; import { Auth, currentUser } from "@/services/auth"; import settingsMenu from "@/services/settingsMenu"; import logoUrl from "@/assets/images/redash_icon_small.png"; @@ -17,9 +18,9 @@ export default function MobileNavbar({ getPopupContainer }) { return (
- + Redash - +
{currentUser.hasPermission("list_dashboards") && ( - Dashboards + Dashboards )} {currentUser.hasPermission("view_query") && ( - Queries + Queries )} {currentUser.hasPermission("list_alerts") && ( - Alerts + Alerts )} - Edit Profile + Edit Profile {firstSettingsTab && ( - Settings + Settings )} {currentUser.hasPermission("super_admin") && ( - System Status + System Status )} {currentUser.hasPermission("super_admin") && } {/* eslint-disable-next-line react/jsx-no-target-blank */} - + Help - + Auth.logout()}> Log out @@ -70,7 +71,7 @@ export default function MobileNavbar({ getPopupContainer }) {
}> diff --git a/client/app/components/ApplicationArea/ApplicationLayout/VersionInfo.jsx b/client/app/components/ApplicationArea/ApplicationLayout/VersionInfo.jsx index 832780f3b6..e655e7f9fb 100644 --- a/client/app/components/ApplicationArea/ApplicationLayout/VersionInfo.jsx +++ b/client/app/components/ApplicationArea/ApplicationLayout/VersionInfo.jsx @@ -1,4 +1,5 @@ import React from "react"; +import Link from "@/components/Link"; import { clientConfig, currentUser } from "@/services/auth"; import frontendVersion from "@/version.json"; @@ -12,10 +13,10 @@ export default function VersionInfo() { {clientConfig.newVersionAvailable && currentUser.hasPermission("super_admin") && (
{/* eslint-disable react/jsx-no-target-blank */} - - Update Available - - + + Update Available