diff --git a/.github/workflows/gcp-ci.yml b/.github/workflows/gcp-ci.yml index 39fa60c83c..b92b85b208 100644 --- a/.github/workflows/gcp-ci.yml +++ b/.github/workflows/gcp-ci.yml @@ -10,7 +10,6 @@ on: jobs: Run-CSPM-GCP-Tests: name: CIS GCP integration test - if: false runs-on: ubuntu-22.04 timeout-minutes: 60 permissions: @@ -68,7 +67,17 @@ jobs: USE_K8S: false run: | poetry install - poetry run pytest -k "cspm_gcp" --alluredir=./allure/results/ --clean-alluredir --maxfail=4 + sleep 5 + curl -X PUT "http://localhost:9200/*cloud_security_posture.findings*/_settings" -H "Content-Type: application/json" -d '{ + "index.mapping.total_fields.limit": 2000 + }' + poetry run pytest -k "cspm_gcp" --alluredir=./allure/results/ --clean-alluredir --maxfail=1 + + - name: Setup tmate session + uses: mxschmitt/action-tmate@v3 + if: failure() + with: + limit-access-to-actor: true - name: Print cloudbeat logs if: always() diff --git a/.github/workflows/test-environment.yml b/.github/workflows/test-environment.yml index ba7dd16106..bc4659f2e6 100644 --- a/.github/workflows/test-environment.yml +++ b/.github/workflows/test-environment.yml @@ -14,13 +14,16 @@ on: required: true description: "Stack version: For released/BC version use 8.x.y, for SNAPSHOT use 8.x.y-SNAPSHOT" default: "8.10.0" + type: string ess-region: required: true description: "Elastic Cloud deployment region" default: "gcp-us-west2" + type: string docker-image-override: required: false description: "Provide the full Docker image path to override the default image (e.g. for testing BC/SNAPSHOT)" + type: string run-sanity-tests: description: "Run sanity tests after provision" default: false @@ -33,6 +36,45 @@ on: type: string description: "**Optional** By default, the environment will be created in our Cloud Security Organization. If you want to use your own cloud account, enter your Elastic Cloud API key." required: false + workflow_call: + inputs: + deployment_name: + description: Name of the deployment to create + type: string + required: true + elk-stack-version: + required: true + description: "Stack version: For released/BC version use 8.x.y, for SNAPSHOT use 8.x.y-SNAPSHOT" + default: "8.10.0" + type: string + ess-region: + required: true + description: "Elastic Cloud deployment region" + default: "gcp-us-west2" + type: string + docker-image-override: + required: false + description: "Provide the full Docker image path to override the default image (e.g. for testing BC/SNAPSHOT)" + type: string + run-sanity-tests: + description: "Run sanity tests after provision" + default: false + type: boolean + cleanup-env: + description: "Cleanup resources after provision" + default: false + type: boolean + ec-api-key: + type: string + description: "**Optional** By default, the environment will be created in our Cloud Security Organization. If you want to use your own cloud account, enter your Elastic Cloud API key." + required: false + outputs: + s3-bucket: + description: "Terraform state s3 bucket folder" + value: ${{ jobs.Deploy.outputs.deploy-s3-bucket }} + cnvm-stack-name: + description: "AWS CNVM integration stack name" + value: ${{ jobs.Deploy.outputs.aws-cnvm-stack-name }} env: AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} @@ -52,17 +94,20 @@ jobs: run: working-directory: ${{ env.WORKING_DIR }} env: - TF_VAR_stack_version: ${{ github.event.inputs.elk-stack-version }} - TF_VAR_ess_region: ${{ github.event.inputs.ess-region }} - DEPLOYMENT_NAME: ${{ github.event.inputs.deployment_name }} + TF_VAR_stack_version: ${{ inputs.elk-stack-version }} + TF_VAR_ess_region: ${{ inputs.ess-region }} + DEPLOYMENT_NAME: ${{ inputs.deployment_name }} S3_BASE_BUCKET: "s3://tf-state-bucket-test-infra" - DOCKER_IMAGE_OVERRIDE: ${{ github.event.inputs.docker-image-override }} - STACK_VERSION: ${{ github.event.inputs.elk-stack-version }} - CNVM_STACK_NAME: "${{ github.event.inputs.deployment_name }}-cnvm-sanity-test-stack" + DOCKER_IMAGE_OVERRIDE: ${{ inputs.docker-image-override }} + STACK_VERSION: ${{ inputs.elk-stack-version }} + CNVM_STACK_NAME: "${{ inputs.deployment_name }}-cnvm-sanity-test-stack" # Add "id-token" with the intended permissions. permissions: contents: 'read' id-token: 'write' + outputs: + deploy-s3-bucket: ${{ steps.upload-state.outputs.s3-bucket-folder }} + aws-cnvm-stack-name: ${{ steps.upload-state.outputs.aws-cnvm-stack }} steps: - name: Check out the repo uses: actions/checkout@v4 @@ -73,7 +118,7 @@ jobs: - name: Check Deployment Name run: | - deployment_name="${{ github.event.inputs.deployment_name }}" + deployment_name="${{ inputs.deployment_name }}" # Check length if [ ${#deployment_name} -gt 20 ]; then @@ -88,7 +133,7 @@ jobs: fi - name: Mask Sensitive Data - if: github.event.inputs.ec-api-key != '' + if: inputs.ec-api-key != '' run: | ec_api_key=$(jq -r '.inputs["ec-api-key"]' $GITHUB_EVENT_PATH) echo "::add-mask::$ec_api_key" @@ -178,6 +223,7 @@ jobs: echo "CSPM_PUBLIC_IP=$CSPM_PUBLIC_IP" >> $GITHUB_ENV - name: Upload tf state + id: upload-state if: always() env: S3_BUCKET: "${{ env.S3_BASE_BUCKET }}/${{ env.DEPLOYMENT_NAME }}_${{ env.TF_STATE_FOLDER }}" @@ -185,6 +231,8 @@ jobs: aws s3 cp "./terraform.tfstate" "${{ env.S3_BUCKET }}/terraform.tfstate" aws s3 cp "${{ env.EC2_CSPM_KEY }}" "${{ env.S3_BUCKET }}/cspm.pem" aws s3 cp "${{ env.EC2_KSPM_KEY }}" "${{ env.S3_BUCKET }}/kspm.pem" + echo "s3-bucket-folder=${{ env.S3_BUCKET }}" >> $GITHUB_OUTPUT + echo "aws-cnvm-stack=${{ env.CNVM_STACK_NAME }}" >> $GITHUB_OUTPUT - name: Summary if: success() @@ -296,6 +344,7 @@ jobs: aws s3 cp "${{ env.FLEET_API_DIR}}/kspm_d4c.yaml" "${{ env.S3_BUCKET }}/kspm_d4c.yaml" aws s3 cp "${{ env.FLEET_API_DIR}}/kspm_eks.yaml" "${{ env.S3_BUCKET }}/kspm_eks.yaml" aws s3 cp "${{ env.FLEET_API_DIR}}/cspm-linux.sh" "${{ env.S3_BUCKET }}/cspm-linux.sh" + aws s3 cp "${{ env.FLEET_API_DIR}}/state_data.json" "${{ env.S3_BUCKET }}/state_data.json" - name: Wait for agents to enroll id: wait-for-agents @@ -304,13 +353,13 @@ jobs: poetry run python src/agents_enrolled.py - name: Run Sanity checks - if: ${{ success() && github.event.inputs.run-sanity-tests == 'true' }} + if: ${{ success() && inputs.run-sanity-tests == true }} working-directory: ./tests run: | poetry install poetry run pytest -m "sanity" --alluredir=./allure/results/ --clean-alluredir --maxfail=4 - name: Cleanup Environment - if: github.event.inputs.cleanup-env == 'true' + if: inputs.cleanup-env == 'true' run: | just delete-cloud-env ${{ env.DEPLOYMENT_NAME }} '' "false" diff --git a/.github/workflows/upgrade-environment.yml b/.github/workflows/upgrade-environment.yml new file mode 100644 index 0000000000..3cbdb6aa4c --- /dev/null +++ b/.github/workflows/upgrade-environment.yml @@ -0,0 +1,230 @@ +name: Test Upgrade Environment +run-name: Creating ${{ github.event.inputs.deployment_name }} by @${{ github.actor }} + +on: + # Ability to execute on demand + workflow_dispatch: + inputs: + deployment_name: + type: string + description: | + Name with letters, numbers, hyphens; start with a letter. Max 20 chars. e.g., 'my-env-123' + required: true + target-elk-stack-version: + required: true + description: "Target version of the ELK stack: For BC version use 8.x.y, for SNAPSHOT use 8.x.y-SNAPSHOT" + default: "8.11.0" + type: string + docker-image-override: + required: false + description: "Provide the full Docker image path to override the default image (e.g. for testing BC/SNAPSHOT)" + +env: + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + AWS_REGION: "eu-west-1" + WORKING_DIR: deploy/test-environments + FLEET_API_DIR: fleet_api/src + TF_VAR_stack_version: ${{ inputs.target-elk-stack-version }} + TF_VAR_ess_region: gcp-us-west2 + TF_VAR_ec_api_key: ${{ secrets.EC_API_KEY }} + DOCKER_IMAGE: ${{ inputs.docker-image-override }} + +jobs: + init: + runs-on: ubuntu-20.04 + outputs: + stack-version: ${{ steps.set-previous-version.outputs.PREVIOUS_VERSION }} + ess-region: ${{ env.TF_VAR_ess_region }} + steps: + - name: Set Previous Version + id: set-previous-version + run: | + VERSION="${{ inputs.target-elk-stack-version }}" + + # Extract the major and minor versions + MAJOR_VERSION=$(echo $VERSION | cut -d'.' -f1) + MINOR_VERSION=$(echo $VERSION | cut -d'.' -f2) + + # Calculate the previous version (assuming it's always X.(Y-1)) + PREVIOUS_VERSION="$MAJOR_VERSION.$((MINOR_VERSION - 1))" + + URL="https://snapshots.elastic.co/latest/$PREVIOUS_VERSION.json" + + # Use curl to fetch the JSON data + JSON_RESPONSE=$(curl -s "$URL") + + # Get latest snapshot version + SNAPSHOT_VERSION=$(echo "$JSON_RESPONSE" | jq -r '.version') + + # Split the version into major, minor, and patch parts + IFS='.-' read -ra PARTS <<< "$SNAPSHOT_VERSION" + MAJOR="${PARTS[0]}" + MINOR="${PARTS[1]}" + PATCH="${PARTS[2]}" + + # Decrement the patch version by 1 + PATCH=$((PATCH - 1)) + + # Format the previous version + PREVIOUS_VERSION="$MAJOR.$MINOR.$PATCH" + echo "Current Version: $VERSION" + echo "Latest Released Version: $PREVIOUS_VERSION" + echo "PREVIOUS_VERSION=$PREVIOUS_VERSION" >> $GITHUB_OUTPUT + deploy: + uses: ./.github/workflows/test-environment.yml + needs: init + with: + deployment_name: ${{ inputs.deployment_name }} + elk-stack-version: ${{ needs.init.outputs.stack-version }} + ess-region: ${{ needs.init.outputs.ess-region }} + run-sanity-tests: true + secrets: inherit + upgrade: + runs-on: ubuntu-20.04 + needs: [init, deploy] + timeout-minutes: 120 + defaults: + run: + working-directory: ${{ env.WORKING_DIR }} + permissions: + contents: 'read' + id-token: 'write' + steps: + - name: Check out the repo + uses: actions/checkout@v4 + + - name: Init Hermit + run: ./bin/hermit env -r >> $GITHUB_ENV + working-directory: ./ + + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: '3.9' + + - name: Install Poetry + run: | + curl -sSL https://install.python-poetry.org | python3 - + poetry --version + + - name: Install Fleet API dependencies + id: fleet-api-deps + working-directory: ${{ env.WORKING_DIR }}/fleet_api + run: | + poetry install + + - name: Configure AWS credentials + uses: aws-actions/configure-aws-credentials@v4 + with: + role-to-assume: arn:aws:iam::704479110758:role/Developer_eks + role-session-name: github-ci + aws-region: ${{ env.AWS_REGION }} + + - name: Download tf state data + env: + S3_BUCKET: ${{ needs.deploy.outputs.s3-bucket }} + run: | + aws s3 cp "${{ env.S3_BUCKET }}/terraform.tfstate" "./terraform.tfstate" + + - name: Terraform Init + run: terraform init + + - name: Terraform Validate + run: terraform validate + + - name: Update ELK stack version + id: apply + if: success() + run: | + terraform apply --auto-approve -var="deployment_name=${{ inputs.deployment_name }}" -var="region=${{ env.AWS_REGION }}" + + - name: Set Environment Output + id: env-output + run: | + echo "KIBANA_URL=$(terraform output -raw kibana_url)" >> $GITHUB_ENV + echo "ES_URL=$(terraform output -raw elasticsearch_url)" >> $GITHUB_ENV + echo "ES_USER=$(terraform output -raw elasticsearch_username)" >> $GITHUB_ENV + + export ES_PASSWORD=$(terraform output -raw elasticsearch_password) + echo "::add-mask::$ES_PASSWORD" + echo "ES_PASSWORD=$ES_PASSWORD" >> $GITHUB_ENV + + export EC2_CSPM=$(terraform output -raw ec2_cspm_ssh_cmd) + echo "::add-mask::$EC2_CSPM" + echo "EC2_CSPM=$EC2_CSPM" >> $GITHUB_ENV + + export EC2_KSPM=$(terraform output -raw ec2_kspm_ssh_cmd) + echo "::add-mask::$EC2_KSPM" + echo "EC2_KSPM=$EC2_KSPM" >> $GITHUB_ENV + + export EC2_CSPM_KEY=$(terraform output -raw ec2_cspm_key) + echo "::add-mask::$EC2_CSPM_KEY" + echo "EC2_CSPM_KEY=$EC2_CSPM_KEY" >> $GITHUB_ENV + + export EC2_KSPM_KEY=$(terraform output -raw ec2_kspm_key) + echo "::add-mask::$EC2_KSPM_KEY" + echo "EC2_KSPM_KEY=$EC2_KSPM_KEY" >> $GITHUB_ENV + + export KSPM_PUBLIC_IP=$(terraform output -raw ec2_kspm_public_ip) + echo "::add-mask::$KSPM_PUBLIC_IP" + echo "KSPM_PUBLIC_IP=$KSPM_PUBLIC_IP" >> $GITHUB_ENV + + export CSPM_PUBLIC_IP=$(terraform output -raw ec2_cspm_public_ip) + echo "::add-mask::$CSPM_PUBLIC_IP" + echo "CSPM_PUBLIC_IP=$CSPM_PUBLIC_IP" >> $GITHUB_ENV + + - name: Run Sanity checks + if: success() + working-directory: ./tests + env: + USE_K8S: false + STACK_VERSION: ${{ needs.init.outputs.stack-version }} + run: | + poetry install + poetry run pytest -m "sanity" --alluredir=./allure/results/ --clean-alluredir --maxfail=4 + + - name: Set Docker Image version + if: ${{ ! inputs.docker-image-override }} + env: + VERSION: 'docker.elastic.co/beats/elastic-agent:${{ inputs.target-elk-stack-version }}' + run: | + echo "DOCKER_IMAGE=${{ env.VERSION }}" >> $GITHUB_ENV + + - name: Download Integrations data + env: + S3_BUCKET: ${{ needs.deploy.outputs.s3-bucket }} + run: | + aws s3 cp "${{ env.S3_BUCKET }}/kspm.pem" "${{ env.EC2_KSPM_KEY }}" + aws s3 cp "${{ env.S3_BUCKET }}/state_data.json" "${{ env.FLEET_API_DIR }}/state_data.json" + + - name: Upgrade KSPM Unmanaged agent + run: | + chmod 600 ${{ env.EC2_KSPM_KEY }} + # Update image + ssh -o StrictHostKeyChecking=no -v -i ${{ env.EC2_KSPM_KEY }} "ubuntu@${{ env.KSPM_PUBLIC_IP }}" "kubectl set image daemonset elastic-agent -n kube-system elastic-agent=${{ env.DOCKER_IMAGE }}" + + - name: Upgrade KSPM EKS agent + run: | + aws eks --region ${{ env.AWS_REGION }} update-kubeconfig \ + --name $(terraform output -raw deployment_name) --alias eks-config + kubectl config use-context eks-config + kubectl set image daemonset elastic-agent -n kube-system elastic-agent=${{ env.DOCKER_IMAGE }} + + - name: Upgrade Linux agents + working-directory: ${{ env.WORKING_DIR }}/${{ env.FLEET_API_DIR }} + env: + CNVM_STACK_NAME: ${{ needs.deploy.outputs.cnvm-stack-name }} + STACK_VERSION: ${{ inputs.target-elk-stack-version }} + run: | + poetry run python upgrade_agents.py + + - name: Run Upgrade Sanity checks + if: success() + working-directory: ./tests + env: + STACK_VERSION: ${{ inputs.target-elk-stack-version }} + USE_K8S: false + run: | + poetry install + poetry run pytest -m "sanity" --alluredir=./allure/results/ --clean-alluredir --maxfail=4 diff --git a/deploy/gcp/cloudbeat-gcp.yml b/deploy/gcp/cloudbeat-gcp.yml index 13b3d79bc0..dad778a685 100644 --- a/deploy/gcp/cloudbeat-gcp.yml +++ b/deploy/gcp/cloudbeat-gcp.yml @@ -11,7 +11,7 @@ cloudbeat: credentials_file_path: ${GOOGLE_APPLICATION_CREDENTIALS:""} type: cloudbeat/cis_gcp # Defines how often an event is sent to the output - period: 30s + period: 60s evaluator: decision_logs: false # =================================== Kibana =================================== diff --git a/deploy/test-environments/fleet_api/pyproject.toml b/deploy/test-environments/fleet_api/pyproject.toml index 64b2f7f5d4..64a172c060 100644 --- a/deploy/test-environments/fleet_api/pyproject.toml +++ b/deploy/test-environments/fleet_api/pyproject.toml @@ -12,6 +12,8 @@ munch = "^3.0.0" loguru = "^0.7.0" jinja2 = "^3.1.2" ruamel-yaml = "^0.17.31" +packaging = "^23.1" +boto3 = "^1.28.65" [build-system] diff --git a/deploy/test-environments/fleet_api/src/api/agent_policy_api.py b/deploy/test-environments/fleet_api/src/api/agent_policy_api.py index 6c3a4cd00d..33740bbea7 100644 --- a/deploy/test-environments/fleet_api/src/api/agent_policy_api.py +++ b/deploy/test-environments/fleet_api/src/api/agent_policy_api.py @@ -2,6 +2,7 @@ This module contains API calls related to the agent policy API. """ +from typing import Optional from munch import Munch, munchify from loguru import logger from api.base_call_api import APICallException, perform_api_call @@ -40,6 +41,37 @@ def create_agent_policy(cfg: Munch, json_policy: dict) -> str: raise api_ex +def update_agent_policy(cfg: Munch, policy_id, json_policy: dict): + """This function updates an agent policy + + Args: + cfg (Munch): Config object containing authentication data. + policy_id (str): Policy id to be updated. + json_policy (dict): Data for the agent policy to be updated. + + Raises: + APICallException: If the API call fails or returns a non-200 status code. + """ + # pylint: disable=duplicate-code + url = f"{cfg.kibana_url}/api/fleet/agent_policies/{policy_id}" + + try: + perform_api_call( + method="PUT", + url=url, + auth=cfg.auth, + params={"json": json_policy}, + ) + logger.info( + f"Agent policy '{policy_id}' for integration '{json_policy.get('name', '')}' has been updated", + ) + except APICallException as api_ex: + logger.error( + f"API call failed, status code {api_ex.status_code}. Response: {api_ex.response_text}", + ) + raise api_ex + + def delete_agent_policy(cfg: Munch, agent_policy_id: str): """This function deletes an agent policy @@ -164,3 +196,46 @@ def unenroll_agents_from_policy(cfg: Munch, agents: list): f"API call failed, status code {api_ex.status_code}. Response: {api_ex.response_text}", ) return + + +def create_agent_download_source( + cfg: Munch, + name: str, + host: str, + is_default: bool = False, +) -> Optional[str]: + """ + Create a new agent download source using the Kibana Fleet API. + + Args: + cfg (Munch): Configuration object containing Kibana URL and authentication details. + name (str): The name of the agent download source. + host (str): The host URL where agents will download packages from. + is_default (bool, optional): Whether this source should be the default. Default is False. + + Returns: + str: The ID of the newly created agent download source, + or None if the ID cannot be retrieved. + """ + # pylint: disable=duplicate-code + url = f"{cfg.kibana_url}/api/fleet/agent_download_sources" + json_data = { + "name": name, + "host": host, + "is_default": is_default, + } + + try: + response = perform_api_call( + method="POST", + url=url, + auth=cfg.auth, + params={"json": json_data}, + ) + source_id = response.get("item", {}).get("id") + return source_id + except APICallException as api_ex: + logger.error( + f"API call failed, status code {api_ex.status_code}. Response: {api_ex.response_text}", + ) + return None diff --git a/deploy/test-environments/fleet_api/src/api/base_call_api.py b/deploy/test-environments/fleet_api/src/api/base_call_api.py index 60af1a082e..1479251d87 100644 --- a/deploy/test-environments/fleet_api/src/api/base_call_api.py +++ b/deploy/test-environments/fleet_api/src/api/base_call_api.py @@ -33,13 +33,15 @@ def __init__(self, status_code, response_text): self.response_text = response_text -def perform_api_call(method, url, headers=None, auth=None, params=None): +def perform_api_call(method, url, return_json=True, headers=None, auth=None, params=None): """ Perform an API call using the provided parameters. Args: method (str): The HTTP method for the API call (e.g., 'GET', 'POST', 'PUT', 'DELETE'). url (str): The URL of the API endpoint. + return_json (bool, optional): Indicates whether the function should return + JSON data (default is True). headers (dict, optional): The headers to be included in the API request. If not provided, default headers will be used. auth (tuple or None, optional): The authentication tuple (username, password) @@ -68,4 +70,6 @@ def perform_api_call(method, url, headers=None, auth=None, params=None): if response.status_code != 200: raise APICallException(response.status_code, response.text) + if not return_json: + return response.content return response.json() diff --git a/deploy/test-environments/fleet_api/src/api/common_api.py b/deploy/test-environments/fleet_api/src/api/common_api.py index 5bb5df0e98..d2c61cba0d 100644 --- a/deploy/test-environments/fleet_api/src/api/common_api.py +++ b/deploy/test-environments/fleet_api/src/api/common_api.py @@ -1,13 +1,20 @@ """ This module contains API calls related to Fleet settings """ +import time import codecs +from typing import Dict, Any, List from munch import Munch, munchify from loguru import logger from api.base_call_api import APICallException, perform_api_call -from utils import replace_image_field, add_capabilities +from utils import ( + replace_image_field, + add_capabilities, + add_tags, +) AGENT_ARTIFACT_SUFFIX = "/downloads/beats/elastic-agent" +AGENT_ARTIFACT_SUFFIX_SHORT = "/downloads/" STAGING_ARTIFACTORY_URL = "https://staging.elastic.co/" SNAPSHOT_ARTIFACTORY_URL = "https://snapshots.elastic.co/" @@ -116,6 +123,40 @@ def create_kubernetes_manifest(cfg: Munch, params: Munch): return +def get_cnvm_template(url: str, template_path: str, cnvm_tags: str): + """ + Download a CloudFormation template from a specified URL, + add custom tags to it, and save it to a file. + + Args: + url (str): The URL to download the CloudFormation template. + template_path (str): The file path where the modified template will be saved. + cnvm_tags (str): Custom tags to be added to the template in the format "key1=value1 key2=value2 ...". + + Returns: + None + + Raises: + APICallException: If there's an issue with the API call. + """ + try: + template_yaml = perform_api_call( + method="GET", + url=url, + return_json=False, + ) + template_yaml = add_tags(tags=cnvm_tags, yaml_content=template_yaml) + + with codecs.open(template_path, "w", encoding="utf-8") as cnvm_yaml: + cnvm_yaml.write(template_yaml) + logger.info(f"CNVM template is available at: '{template_path}'") + except APICallException as api_ex: + logger.error( + f"API call failed, status code {api_ex.status_code}. Response: {api_ex.response_text}", + ) + return + + def get_build_info(version: str) -> str: """ Retrieve the build ID for a specific version of Elastic. @@ -149,26 +190,31 @@ def get_build_info(version: str) -> str: return "" -def get_artifact_server(version: str) -> str: +def get_artifact_server(version: str, is_short_url: bool = False) -> str: """ - Retrieve the artifact server for a specific version of Elastic. + Retrieve the artifact server URL for a specific version of Elastic. Args: - version (str): The version of Elastic. + elastic_version (str): The version of Elastic. + is_short_url (bool, optional): Indicates whether to use the short artifact URL. + Defaults to False. Returns: - str: The artifact server of the specified version. + str: The artifact server URL for the specified Elastic version. Raises: APICallException: If the API call to retrieve the artifact server fails. """ - if is_snapshot(version): url = SNAPSHOT_ARTIFACTORY_URL else: url = STAGING_ARTIFACTORY_URL - return url + get_build_info(version) + AGENT_ARTIFACT_SUFFIX + artifacts_suffix = AGENT_ARTIFACT_SUFFIX + if is_short_url: + artifacts_suffix = AGENT_ARTIFACT_SUFFIX_SHORT + + return url + get_build_info(version) + artifacts_suffix def is_snapshot(version: str) -> bool: @@ -256,6 +302,48 @@ def get_package_version( return None +def get_package( + cfg: Munch, + package_name: str = "cloud_security_posture", + is_full: bool = True, + prerelease: bool = False, +) -> Dict[str, Any]: + """ + Retrieve package information from the Elastic Fleet Server API. + + Args: + cfg (Munch): Configuration data. + package_name (str, optional): The name of the package to retrieve. + Default is "cloud_security_posture". + is_full (bool, optional): Whether to retrieve full package information. Default is True. + prerelease (bool, optional): Whether to include prerelease versions. Default is False. + + Returns: + Dict[str, Any]: A dictionary containing the package information + or an empty dictionary if the API call fails. + """ + url = f"{cfg.kibana_url}/api/fleet/epm/packages/{package_name}" + + request_params = { + "full": is_full, + "prerelease": prerelease, + } + + try: + response = perform_api_call( + method="GET", + url=url, + auth=cfg.auth, + params={"params": request_params}, + ) + return response.get("response", {}) + except APICallException as api_ex: + logger.error( + f"API call failed, status code {api_ex.status_code}. Response: {api_ex.response_text}", + ) + return {} + + def update_package_version(cfg: Munch, package_name: str, package_version: str): """ Updates the version of a package. @@ -287,3 +375,118 @@ def update_package_version(cfg: Munch, package_name: str, package_version: str): logger.error( f"API call failed, status code {api_ex.status_code}. Response: {api_ex.response_text}", ) + + +def bulk_upgrade_agents(cfg: Munch, agent_ids: List[str], version: str, source_uri: str) -> str: + """ + Upgrade a list of agents to a specified version using the Kibana API. + + Args: + cfg (Munch): Configuration object containing Kibana URL and authentication details. + agent_ids (List[str]): List of agent IDs to upgrade. + version (str): The version to upgrade to. + source_uri (str): The source URI for the agent package. + + Returns: + str: The action ID of the upgrade. + + Raises: + APICallException: If the API call fails with a non-200 status code. + """ + # pylint: disable=duplicate-code + url = f"{cfg.kibana_url}/api/fleet/agents/bulk_upgrade" + json_data = { + "agents": agent_ids, + "version": version, + "source_uri": source_uri, + } + logger.info(f"Source URI: {source_uri}") + try: + response = perform_api_call( + method="POST", + url=url, + auth=cfg.auth, + params={"json": json_data}, + ) + action_id = response.get("actionId") + if not action_id: + raise APICallException( + response.status_code, + "API response did not include an actionId", + ) + logger.info(f"Agents '{agent_ids}' upgrade to version '{version}' is started") + logger.info(f"Action status id: {action_id}") + return action_id + except APICallException as api_ex: + logger.error( + f"API call failed, status code {api_ex.status_code}. Response: {api_ex.response_text}", + ) + raise APICallException(api_ex.status_code, api_ex.response_text) from api_ex + + +def get_action_status(cfg: Munch) -> List[dict]: + """ + Retrieve action status for agents using the Kibana API. + + Args: + cfg (Munch): Configuration object containing Kibana URL and authentication details. + + Returns: + List[dict]: A list of action status items. + + Raises: + APICallException: If the API call fails with a non-200 status code. + """ + url = f"{cfg.kibana_url}/api/fleet/agents/action_status" + + try: + response = perform_api_call( + method="GET", + url=url, + auth=cfg.auth, + ) + return response.get("items", []) + except APICallException as api_ex: + logger.error( + f"API call failed, status code {api_ex.status_code}. Response: {api_ex.response_text}", + ) + raise APICallException(api_ex.status_code, api_ex.response_text) from api_ex + + +def wait_for_action_status( + cfg: Munch, + target_action_id: str, + target_type: str, + target_status: str, + timeout_secs: int = 600, +): + """ + Wait for a specific action status to match the target criteria. + + Args: + cfg (Munch): Configuration object containing Kibana URL and authentication details. + target_action_id (str): The action ID to match. + target_type (str): The target action type to match. + target_status (str): The target status to match. + timeout_secs (int): Maximum time to wait in seconds (default is 600 seconds). + + Returns: + bool: True if the target criteria is met, False if the timeout is reached. + + Raises: + APICallException: If the API call fails with a non-200 status code. + """ + start_time = time.time() + while True: + action_status = get_action_status(cfg) + for item in action_status: + if item.get("actionId") == target_action_id: + logger.info(f"Type: {item.get('type')}, Status: {item.get('status')}") + if item.get("type") == target_type and item.get("status") == target_status: + return True # Found the target criteria + + if time.time() - start_time >= timeout_secs: + logger.error(f"Agent upgrade process reached a timeout of {timeout_secs} seconds.") + return False # Timeout reached + + time.sleep(2) # Fixed sleep interval of 1 second diff --git a/deploy/test-environments/fleet_api/src/api/package_policy_api.py b/deploy/test-environments/fleet_api/src/api/package_policy_api.py index a6d915c441..7c0806dea9 100644 --- a/deploy/test-environments/fleet_api/src/api/package_policy_api.py +++ b/deploy/test-environments/fleet_api/src/api/package_policy_api.py @@ -163,3 +163,36 @@ def delete_package_policy(cfg: Munch, policy_ids: list): logger.error( f"API call failed, status code {api_ex.status_code}. Response: {api_ex.response_text}", ) + + +def get_package_policy_by_id(cfg: Munch, policy_id: str) -> dict: + """ + Retrieve package policy information by its ID. + + Args: + cfg (Munch): A configuration object containing Kibana URL, authentication details, etc. + policy_id (str): The package policy ID to retrieve. + + Returns: + dict: A dictionary containing the package policy information, + or an empty dictionary if not found. + + Raises: + APICallException: If the API call to retrieve the package policy fails. + """ + # pylint: disable=duplicate-code + url = f"{cfg.kibana_url}/api/fleet/package_policies/{policy_id}" + + try: + response = perform_api_call( + method="GET", + url=url, + auth=cfg.auth, + ) + + return response.get("item", {}) + except APICallException as api_ex: + logger.error( + f"API call failed, status code {api_ex.status_code}. Response: {api_ex.response_text}", + ) + return {} diff --git a/deploy/test-environments/fleet_api/src/configuration_fleet.py b/deploy/test-environments/fleet_api/src/configuration_fleet.py index 5f66b6d80b..fff2249669 100644 --- a/deploy/test-environments/fleet_api/src/configuration_fleet.py +++ b/deploy/test-environments/fleet_api/src/configuration_fleet.py @@ -18,6 +18,14 @@ import os from munch import Munch +CNVM_TAGS = ( + "Key=division,Value=engineering " + "Key=org,Value=security " + "Key=team,Value=cloud-security-posture " + "Key=project,Value=test-environments" +) + + elk_config = Munch() elk_config.user = os.getenv("ES_USER", "NA") elk_config.password = os.getenv("ES_PASSWORD", "NA") @@ -31,6 +39,8 @@ aws_config = Munch() aws_config.access_key_id = os.getenv("AWS_ACCESS_KEY_ID", "NA") aws_config.secret_access_key = os.getenv("AWS_SECRET_ACCESS_KEY", "NA") +aws_config.cnvm_tags = os.getenv("AWS_CNVM_TAGS", CNVM_TAGS) +aws_config.cnvm_stack_name = os.getenv("CNVM_STACK_NAME", "NA") gcp_dm_config = Munch() gcp_dm_config.deployment_name = os.getenv("DEPLOYMENT_NAME", "") diff --git a/deploy/test-environments/fleet_api/src/install_cnvm_integration.py b/deploy/test-environments/fleet_api/src/install_cnvm_integration.py index 3b5cbc30e6..036afcaa22 100644 --- a/deploy/test-environments/fleet_api/src/install_cnvm_integration.py +++ b/deploy/test-environments/fleet_api/src/install_cnvm_integration.py @@ -7,9 +7,9 @@ 2. Create a CNVM AWS integration. 3. Create a deploy/cloudformation/config.json file to be used by the just deploy-cloudformation command. """ +import sys import json from pathlib import Path -from typing import Dict, Tuple from munch import Munch import configuration_fleet as cnfg from api.agent_policy_api import create_agent_policy @@ -18,50 +18,78 @@ get_enrollment_token, get_fleet_server_host, get_artifact_server, + get_package_version, + get_cnvm_template, ) from loguru import logger -from utils import read_json -from state_file_manager import state_manager, PolicyState +from state_file_manager import state_manager, PolicyState, HostType +from package_policy import ( + version_compatible, + VERSION_MAP, + load_data, + generate_random_name, + get_package_default_url, + extract_template_url, +) +from utils import rename_file_by_suffix -CNVM_AGENT_POLICY = "../../../cloud/data/agent_policy_cnvm_aws.json" -CNVM_PACKAGE_POLICY = "../../../cloud/data/package_policy_cnvm_aws.json" CNVM_EXPECTED_AGENTS = 1 CNVM_CLOUDFORMATION_CONFIG = "../../../cloudformation/config.json" -CNVM_AGENT_TAGS = ["cft_version:CFT_VERSION", "cft_arn:arn:aws:cloudformation:.*"] - -cnvm_agent_policy_data = Path(__file__).parent / CNVM_AGENT_POLICY -cnvm_pkg_policy_data = Path(__file__).parent / CNVM_PACKAGE_POLICY +CNMV_TEMPLATE = "../../../cloudformation/elastic-agent-ec2-cnvm.yml" +CNMV_TEMP_FILE = "elastic-agent-ec2-cnvm-temp.yml" +CNVM_AGENT_TAGS = ["cft_version:*", "cft_arn:arn:aws:cloudformation:.*"] +PKG_DEFAULT_VERSION = VERSION_MAP.get("vuln_mgmt_aws", "") +INTEGRATION_NAME = "CNVM AWS" +INTEGRATION_INPUT = { + "name": generate_random_name("pkg-cnvm-aws"), + "input_name": "vuln_mgmt_aws", + "posture": "vuln_mgmt", + "deployment": "aws", +} +AGENT_INPUT = { + "name": generate_random_name("cnvm-aws"), +} cnvm_cloudformation_config = Path(__file__).parent / CNVM_CLOUDFORMATION_CONFIG - - -def load_data() -> Tuple[Dict, Dict]: - """Loads data. - - Returns: - Tuple[Dict, Dict]: A tuple containing the loaded agent and package policies. - """ - logger.info("Loading agent and package policies") - agent_policy = read_json(json_path=cnvm_agent_policy_data) - package_policy = read_json(json_path=cnvm_pkg_policy_data) - return agent_policy, package_policy +cnvm_cloudformation_template = Path(__file__).parent / CNMV_TEMPLATE if __name__ == "__main__": # pylint: disable=duplicate-code - logger.info("Starting installation of CNVM AWS integration.") - agent_data, package_data = load_data() + package_version = get_package_version(cfg=cnfg.elk_config) + logger.info(f"Package version: {package_version}") + if not version_compatible( + current_version=package_version, + required_version=PKG_DEFAULT_VERSION, + ): + logger.warning(f"{INTEGRATION_NAME} is not supported in version {package_version}") + sys.exit(0) + logger.info(f"Starting installation of {INTEGRATION_NAME} integration.") + agent_data, package_data = load_data( + cfg=cnfg.elk_config, + agent_input=AGENT_INPUT, + package_input=INTEGRATION_INPUT, + ) logger.info("Create agent policy") agent_policy_id = create_agent_policy(cfg=cnfg.elk_config, json_policy=agent_data) - logger.info("Create CNVM integration for policy", agent_policy_id) + logger.info(f"Create {INTEGRATION_NAME} integration for policy {agent_policy_id}") package_policy_id = create_cnvm_integration( cfg=cnfg.elk_config, pkg_policy=package_data, agent_policy_id=agent_policy_id, ) - state_manager.add_policy(PolicyState(agent_policy_id, package_policy_id, CNVM_EXPECTED_AGENTS, CNVM_AGENT_TAGS)) + state_manager.add_policy( + PolicyState( + agent_policy_id, + package_policy_id, + CNVM_EXPECTED_AGENTS, + CNVM_AGENT_TAGS, + HostType.LINUX_TAR.value, + INTEGRATION_INPUT["name"], + ), + ) cloudformation_params = Munch() cloudformation_params.ENROLLMENT_TOKEN = get_enrollment_token( @@ -76,4 +104,24 @@ def load_data() -> Tuple[Dict, Dict]: with open(cnvm_cloudformation_config, "w") as file: json.dump(cloudformation_params, file) - logger.info("Installation of CNVM integration is done") + logger.info(f"Get {INTEGRATION_NAME} template") + default_url = get_package_default_url( + cfg=cnfg.elk_config, + policy_name=INTEGRATION_INPUT["posture"], + policy_type="cloudbeat/vuln_mgmt_aws", + ) + template_url = extract_template_url(url_string=default_url) + + logger.info(f"Using {template_url} for stack creation") + if template_url: + rename_file_by_suffix( + file_path=cnvm_cloudformation_template, + suffix="-orig", + ) + get_cnvm_template( + url=template_url, + template_path=cnvm_cloudformation_template, + cnvm_tags=cnfg.aws_config.cnvm_tags, + ) + + logger.info(f"Installation of {INTEGRATION_NAME} integration is done") diff --git a/deploy/test-environments/fleet_api/src/install_cspm_gcp_integration.py b/deploy/test-environments/fleet_api/src/install_cspm_gcp_integration.py index 30d808367c..a5be411cf5 100755 --- a/deploy/test-environments/fleet_api/src/install_cspm_gcp_integration.py +++ b/deploy/test-environments/fleet_api/src/install_cspm_gcp_integration.py @@ -7,10 +7,11 @@ 2. Create a CSPM GCP integration. 3. Create a deploy/deployment-manager/config.json file to be used by the just deploy-dm command. """ +import sys import json from pathlib import Path -from typing import Dict, Tuple from munch import Munch +from packaging import version import configuration_fleet as cnfg from api.agent_policy_api import create_agent_policy from api.package_policy_api import create_cspm_integration @@ -22,43 +23,57 @@ update_package_version, ) from loguru import logger -from utils import read_json -from state_file_manager import state_manager, PolicyState +from state_file_manager import state_manager, PolicyState, HostType +from package_policy import ( + load_data, + version_compatible, + generate_random_name, + VERSION_MAP, +) -CSPM_GCP_AGENT_POLICY = "../../../cloud/data/agent_policy_cspm_gcp.json" -CSPM_GCP_PACKAGE_POLICY = "../../../cloud/data/package_policy_cspm_gcp.json" CSPM_GCP_EXPECTED_AGENTS = 1 DEPLOYMENT_MANAGER_CONFIG = "../../../deployment-manager/config.json" -cspm_gcp_agent_policy_data = Path(__file__).parent / CSPM_GCP_AGENT_POLICY -cspm_gcp_pkg_policy_data = Path(__file__).parent / CSPM_GCP_PACKAGE_POLICY cspm_gcp_deployment_manager_config = Path(__file__).parent / DEPLOYMENT_MANAGER_CONFIG INTEGRATION_NAME = "CSPM GCP" - - -def load_data() -> Tuple[Dict, Dict]: - """Loads data. - - Returns: - Tuple[Dict, Dict]: A tuple containing the loaded agent and package policies. - """ - logger.info("Loading agent and package policies") - agent_policy = read_json(json_path=cspm_gcp_agent_policy_data) - package_policy = read_json(json_path=cspm_gcp_pkg_policy_data) - return agent_policy, package_policy +PKG_DEFAULT_VERSION = VERSION_MAP.get("cis_gcp", "") +INTEGRATION_INPUT = { + "name": generate_random_name("pkg-cspm-gcp"), + "input_name": "cis_gcp", + "posture": "cspm", + "deployment": "gcp", +} +AGENT_INPUT = { + "name": generate_random_name("cspm-gcp"), +} if __name__ == "__main__": # pylint: disable=duplicate-code package_version = get_package_version(cfg=cnfg.elk_config) + if not version_compatible( + current_version=package_version, + required_version=PKG_DEFAULT_VERSION, + ): + logger.warning(f"{INTEGRATION_NAME} is not supported in version {package_version}") + sys.exit(0) + logger.info(f"Package version: {package_version}") update_package_version( cfg=cnfg.elk_config, package_name="cloud_security_posture", package_version=package_version, ) + if version.parse(package_version) >= version.parse("1.6"): + INTEGRATION_INPUT["vars"] = { + "gcp.account_type": "single-account", + } logger.info(f"Starting installation of {INTEGRATION_NAME} integration.") - agent_data, package_data = load_data() + agent_data, package_data = load_data( + cfg=cnfg.elk_config, + agent_input=AGENT_INPUT, + package_input=INTEGRATION_INPUT, + ) logger.info("Create agent policy") agent_policy_id = create_agent_policy(cfg=cnfg.elk_config, json_policy=agent_data) @@ -77,6 +92,8 @@ def load_data() -> Tuple[Dict, Dict]: package_policy_id, CSPM_GCP_EXPECTED_AGENTS, [], + HostType.LINUX_TAR.value, + INTEGRATION_INPUT["name"], ), ) diff --git a/deploy/test-environments/fleet_api/src/install_cspm_integration.py b/deploy/test-environments/fleet_api/src/install_cspm_integration.py index 70c7c2e919..4f8a29b3cc 100755 --- a/deploy/test-environments/fleet_api/src/install_cspm_integration.py +++ b/deploy/test-environments/fleet_api/src/install_cspm_integration.py @@ -7,9 +7,8 @@ 2. Create a CSPM AWS integration. 3. Create a CSPM bash script to be deployed on a host. """ - +import sys from pathlib import Path -from typing import Dict, Tuple from munch import Munch import configuration_fleet as cnfg from api.agent_policy_api import create_agent_policy @@ -22,65 +21,86 @@ update_package_version, ) from loguru import logger -from utils import ( - read_json, - render_template, +from utils import render_template +from state_file_manager import state_manager, PolicyState, HostType +from package_policy import ( + load_data, + version_compatible, + generate_random_name, + patch_vars, + VERSION_MAP, ) -from state_file_manager import state_manager, PolicyState -CSPM_AGENT_POLICY = "../../../cloud/data/agent_policy_cspm_aws.json" -CSPM_PACKAGE_POLICY = "../../../cloud/data/package_policy_cspm_aws.json" CSPM_EXPECTED_AGENTS = 1 +INTEGRATION_NAME = "CSPM AWS" +PKG_DEFAULT_VERSION = VERSION_MAP.get("cis_aws", "") +aws_config = cnfg.aws_config +INTEGRATION_INPUT = { + "name": generate_random_name("pkg-cspm-aws"), + "input_name": "cis_aws", + "posture": "cspm", + "deployment": "cloudbeat/cis_aws", + "vars": { + "access_key_id": aws_config.access_key_id, + "secret_access_key": aws_config.secret_access_key, + "aws.credentials.type": "direct_access_keys", + }, +} +AGENT_INPUT = { + "name": generate_random_name("cspm-aws"), +} -cspm_agent_policy_data = Path(__file__).parent / CSPM_AGENT_POLICY -cspm_pkg_policy_data = Path(__file__).parent / CSPM_PACKAGE_POLICY cspm_template = Path(__file__).parent / "data/cspm-linux.j2" - -def load_data() -> Tuple[Dict, Dict]: - """Loads data. - - Returns: - Tuple[Dict, Dict]: A tuple containing the loaded agent and package policies. - """ - logger.info("Loading agent and package policies") - agent_policy = read_json(json_path=cspm_agent_policy_data) - package_policy = read_json(json_path=cspm_pkg_policy_data) - return agent_policy, package_policy - - if __name__ == "__main__": # pylint: disable=duplicate-code package_version = get_package_version(cfg=cnfg.elk_config) logger.info(f"Package version: {package_version}") + if not version_compatible( + current_version=package_version, + required_version=PKG_DEFAULT_VERSION, + ): + logger.warning(f"{INTEGRATION_NAME} is not supported in version {package_version}") + sys.exit(0) + update_package_version( cfg=cnfg.elk_config, package_name="cloud_security_posture", package_version=package_version, ) - logger.info("Starting installation of CSPM AWS integration.") - agent_data, package_data = load_data() + patch_vars( + var_dict=INTEGRATION_INPUT.get("vars", {}), + package_version=package_version, + ) + logger.info(f"Starting installation of {INTEGRATION_NAME} integration.") + agent_data, package_data = load_data( + cfg=cnfg.elk_config, + agent_input=AGENT_INPUT, + package_input=INTEGRATION_INPUT, + ) logger.info("Create agent policy") agent_policy_id = create_agent_policy(cfg=cnfg.elk_config, json_policy=agent_data) - aws_config = cnfg.aws_config - cspm_data = { - "access_key_id": aws_config.access_key_id, - "secret_access_key": aws_config.secret_access_key, - "aws.credentials.type": "direct_access_keys", - } - - logger.info("Create CSPM integration") + logger.info(f"Create {INTEGRATION_NAME} integration") package_policy_id = create_cspm_integration( cfg=cnfg.elk_config, pkg_policy=package_data, agent_policy_id=agent_policy_id, - cspm_data=cspm_data, + cspm_data={}, ) - state_manager.add_policy(PolicyState(agent_policy_id, package_policy_id, CSPM_EXPECTED_AGENTS, [])) + state_manager.add_policy( + PolicyState( + agent_policy_id, + package_policy_id, + CSPM_EXPECTED_AGENTS, + [], + HostType.LINUX_TAR.value, + INTEGRATION_INPUT["name"], + ), + ) manifest_params = Munch() manifest_params.enrollment_token = get_enrollment_token( @@ -96,9 +116,9 @@ def load_data() -> Tuple[Dict, Dict]: # Render the template and get the replaced content rendered_content = render_template(cspm_template, manifest_params.toDict()) - logger.info("Creating CSPM linux manifest") + logger.info(f"Creating {INTEGRATION_NAME} linux manifest") # Write the rendered content to a file with open(Path(__file__).parent / "cspm-linux.sh", "w", encoding="utf-8") as cspm_file: cspm_file.write(rendered_content) - logger.info("Installation of CSPM integration is done") + logger.info(f"Installation of {INTEGRATION_NAME} integration is done") diff --git a/deploy/test-environments/fleet_api/src/install_d4c_integration.py b/deploy/test-environments/fleet_api/src/install_d4c_integration.py index e97c4186c7..d4c62d6248 100755 --- a/deploy/test-environments/fleet_api/src/install_d4c_integration.py +++ b/deploy/test-environments/fleet_api/src/install_d4c_integration.py @@ -23,10 +23,11 @@ ) from loguru import logger from utils import read_json -from state_file_manager import state_manager, PolicyState +from state_file_manager import state_manager, PolicyState, HostType D4C_AGENT_POLICY = "../../../cloud/data/agent_policy_d4c.json" D4C_PACKAGE_POLICY = "../../../cloud/data/package_policy_d4c.json" +D4C_AGENT_POLICY_NAME = "tf-ap-d4c" D4C_EXPECTED_AGENTS = 2 INTEGRATAION_NAME = "D4C" @@ -78,6 +79,8 @@ def load_data() -> Tuple[Dict, Dict]: package_policy_id, D4C_EXPECTED_AGENTS, [], + HostType.KUBERNETES.value, + D4C_AGENT_POLICY_NAME, ), ) diff --git a/deploy/test-environments/fleet_api/src/install_kspm_eks_integration.py b/deploy/test-environments/fleet_api/src/install_kspm_eks_integration.py index 830cc5c6c9..7e20223307 100755 --- a/deploy/test-environments/fleet_api/src/install_kspm_eks_integration.py +++ b/deploy/test-environments/fleet_api/src/install_kspm_eks_integration.py @@ -7,9 +7,8 @@ 2. Create a KSPM EKS integration. 3. Create a KSPM manifest to be deployed on a host. """ - +import sys from pathlib import Path -from typing import Dict, Tuple from munch import Munch import configuration_fleet as cnfg from api.agent_policy_api import create_agent_policy, get_agent_policy_id_by_name @@ -22,42 +21,57 @@ update_package_version, ) from loguru import logger -from utils import read_json -from state_file_manager import state_manager, PolicyState +from state_file_manager import state_manager, PolicyState, HostType +from package_policy import ( + load_data, + version_compatible, + generate_random_name, + VERSION_MAP, +) + -KSPM_EKS_AGENT_POLICY = "../../../cloud/data/agent_policy_eks.json" -KSPM_EKS_PACKAGE_POLICY = "../../../cloud/data/package_policy_eks.json" KSPM_EKS_EXPECTED_AGENTS = 2 D4C_AGENT_POLICY_NAME = "tf-ap-d4c" - -kspm_agent_policy_data = Path(__file__).parent / KSPM_EKS_AGENT_POLICY -kspm_eks_pkg_policy_data = Path(__file__).parent / KSPM_EKS_PACKAGE_POLICY - - -def load_data() -> Tuple[Dict, Dict]: - """Loads data. - - Returns: - Tuple[Dict, Dict]: A tuple containing the loaded agent and package policies. - """ - logger.info("Loading agent and package policies") - agent_policy = read_json(json_path=kspm_agent_policy_data) - package_policy = read_json(json_path=kspm_eks_pkg_policy_data) - return agent_policy, package_policy - +INTEGRATION_NAME = "KSPM EKS" +PKG_DEFAULT_VERSION = VERSION_MAP.get("cis_eks", "") +INTEGRATION_INPUT = { + "name": generate_random_name("pkg-kspm-eks"), + "input_name": "cis_eks", + "posture": "kspm", + "deployment": "cloudbeat/cis_eks", + "vars": { + "access_key_id": cnfg.aws_config.access_key_id, + "secret_access_key": cnfg.aws_config.secret_access_key, + "aws.credentials.type": "direct_access_keys", + }, +} +AGENT_INPUT = { + "name": generate_random_name("kspm-eks"), +} if __name__ == "__main__": # pylint: disable=duplicate-code package_version = get_package_version(cfg=cnfg.elk_config) logger.info(f"Package version: {package_version}") + if not version_compatible( + current_version=package_version, + required_version=PKG_DEFAULT_VERSION, + ): + logger.warning(f"{INTEGRATION_NAME} is not supported in version {package_version}") + sys.exit(0) + update_package_version( cfg=cnfg.elk_config, package_name="cloud_security_posture", package_version=package_version, ) - logger.info("Starting installation of KSPM EKS integration.") - agent_data, package_data = load_data() + logger.info(f"Starting installation of {INTEGRATION_NAME} integration.") + agent_data, package_data = load_data( + cfg=cnfg.elk_config, + agent_input=AGENT_INPUT, + package_input=INTEGRATION_INPUT, + ) logger.info("Create agent policy") agent_policy_id = get_agent_policy_id_by_name( @@ -70,21 +84,24 @@ def load_data() -> Tuple[Dict, Dict]: json_policy=agent_data, ) - aws_config = cnfg.aws_config - eks_data = { - "access_key_id": aws_config.access_key_id, - "secret_access_key": aws_config.secret_access_key, - } - - logger.info("Create KSPM EKS integration") + logger.info(f"Create {INTEGRATION_NAME} integration") package_policy_id = create_kspm_eks_integration( cfg=cnfg.elk_config, pkg_policy=package_data, agent_policy_id=agent_policy_id, - eks_data=eks_data, + eks_data={}, ) - state_manager.add_policy(PolicyState(agent_policy_id, package_policy_id, KSPM_EKS_EXPECTED_AGENTS, [])) + state_manager.add_policy( + PolicyState( + agent_policy_id, + package_policy_id, + KSPM_EKS_EXPECTED_AGENTS, + [], + HostType.KUBERNETES.value, + INTEGRATION_INPUT["name"], + ), + ) manifest_params = Munch() manifest_params.enrollment_token = get_enrollment_token( @@ -95,6 +112,6 @@ def load_data() -> Tuple[Dict, Dict]: manifest_params.fleet_url = get_fleet_server_host(cfg=cnfg.elk_config) manifest_params.yaml_path = Path(__file__).parent / "kspm_eks.yaml" manifest_params.docker_image_override = cnfg.kspm_config.docker_image_override - logger.info("Creating KSPM EKS manifest") + logger.info(f"Creating {INTEGRATION_NAME} manifest") create_kubernetes_manifest(cfg=cnfg.elk_config, params=manifest_params) - logger.info("Installation of KSPM EKS integration is done") + logger.info(f"Installation of {INTEGRATION_NAME} integration is done") diff --git a/deploy/test-environments/fleet_api/src/install_kspm_unmanaged_integration.py b/deploy/test-environments/fleet_api/src/install_kspm_unmanaged_integration.py index 730ceb4d7c..b5bbf94f04 100755 --- a/deploy/test-environments/fleet_api/src/install_kspm_unmanaged_integration.py +++ b/deploy/test-environments/fleet_api/src/install_kspm_unmanaged_integration.py @@ -7,9 +7,8 @@ 2. Create a KSPM unmanaged integration. 3. Create a KSPM manifest to be deployed on a host. """ - +import sys from pathlib import Path -from typing import Dict, Tuple from munch import Munch import configuration_fleet as cnfg from api.agent_policy_api import create_agent_policy @@ -22,55 +21,73 @@ update_package_version, ) from loguru import logger -from utils import read_json -from state_file_manager import state_manager, PolicyState - -KSPM_UNMANAGED_AGENT_POLICY = "../../../cloud/data/agent_policy_vanilla.json" -KSPM_UNMANAGED_PACKAGE_POLICY = "../../../cloud/data/package_policy_vanilla.json" -KSPM_UNMANAGED_EXPECTED_AGENTS = 2 - - -kspm_agent_policy_data = Path(__file__).parent / KSPM_UNMANAGED_AGENT_POLICY -kspm_unmanached_pkg_policy_data = Path(__file__).parent / KSPM_UNMANAGED_PACKAGE_POLICY - - -def load_data() -> Tuple[Dict, Dict]: - """Loads data. +from state_file_manager import state_manager, PolicyState, HostType +from package_policy import ( + load_data, + version_compatible, + generate_random_name, + VERSION_MAP, +) - Returns: - Tuple[Dict, Dict]: A tuple containing the loaded agent and package policies. - """ - logger.info("Loading agent and package policies") - agent_policy = read_json(json_path=kspm_agent_policy_data) - package_policy = read_json(json_path=kspm_unmanached_pkg_policy_data) - return agent_policy, package_policy +KSPM_UNMANAGED_EXPECTED_AGENTS = 2 +INTEGRATION_NAME = "KSPM Self Managed" +PKG_DEFAULT_VERSION = VERSION_MAP.get("cis_k8s", "") +INTEGRATION_INPUT = { + "name": generate_random_name("pkg-kspm"), + "input_name": "cis_k8s", + "posture": "kspm", + "deployment": "self_managed", +} +AGENT_INPUT = { + "name": generate_random_name("kspm-self-managed"), +} if __name__ == "__main__": # pylint: disable=duplicate-code package_version = get_package_version(cfg=cnfg.elk_config) logger.info(f"Package version: {package_version}") + if not version_compatible( + current_version=package_version, + required_version=PKG_DEFAULT_VERSION, + ): + logger.warning(f"{INTEGRATION_NAME} is not supported in version {package_version}") + sys.exit(0) + update_package_version( cfg=cnfg.elk_config, package_name="cloud_security_posture", package_version=package_version, ) - logger.info("Starting installation of KSPM integration.") - agent_data, package_data = load_data() + logger.info(f"Starting installation of {INTEGRATION_NAME} integration.") + agent_data, package_data = load_data( + cfg=cnfg.elk_config, + agent_input=AGENT_INPUT, + package_input=INTEGRATION_INPUT, + ) logger.info("Create agent policy") agent_policy_id = create_agent_policy(cfg=cnfg.elk_config, json_policy=agent_data) - logger.info("Create KSPM unmanaged integration") + logger.info(f"Create {INTEGRATION_NAME} integration") package_policy_id = create_kspm_unmanaged_integration( cfg=cnfg.elk_config, pkg_policy=package_data, agent_policy_id=agent_policy_id, ) - state_manager.add_policy(PolicyState(agent_policy_id, package_policy_id, KSPM_UNMANAGED_EXPECTED_AGENTS, [])) + state_manager.add_policy( + PolicyState( + agent_policy_id, + package_policy_id, + KSPM_UNMANAGED_EXPECTED_AGENTS, + [], + HostType.KUBERNETES.value, + INTEGRATION_INPUT["name"], + ), + ) manifest_params = Munch() manifest_params.enrollment_token = get_enrollment_token( @@ -81,6 +98,6 @@ def load_data() -> Tuple[Dict, Dict]: manifest_params.fleet_url = get_fleet_server_host(cfg=cnfg.elk_config) manifest_params.yaml_path = Path(__file__).parent / "kspm_unmanaged.yaml" manifest_params.docker_image_override = cnfg.kspm_config.docker_image_override - logger.info("Creating KSPM unmanaged manifest") + logger.info(f"Creating {INTEGRATION_NAME} manifest") create_kubernetes_manifest(cfg=cnfg.elk_config, params=manifest_params) - logger.info("Installation of KSPM integration is done") + logger.info(f"Installation of {INTEGRATION_NAME} is done") diff --git a/deploy/test-environments/fleet_api/src/package_policy.py b/deploy/test-environments/fleet_api/src/package_policy.py new file mode 100644 index 0000000000..806673a722 --- /dev/null +++ b/deploy/test-environments/fleet_api/src/package_policy.py @@ -0,0 +1,307 @@ +""" +This module provides functions for generating and formatting policy templates +and inputs based on provided data and templates. +""" + +import copy +import uuid +from typing import Dict, Tuple +from urllib.parse import urlparse, parse_qs +from packaging import version +from munch import Munch +from loguru import logger +from api.common_api import get_package + +# Constants +CLOUD_SECURITY_POSTURE = "cloud_security_posture" +REQUIRE_VARS = ["cloudbeat/cis_aws", "cloudbeat/cis_eks"] +SIMPLIFIED_PACKAGE_POLICY = { + "policy_id": "", + "package": {}, + "name": "", + "description": "", + "namespace": "default", + "inputs": {}, + "vars": {}, +} +SIMPLIFIED_AGENT_POLICY = { + "name": "", + "namespace": "default", + "monitoring_enabled": ["logs", "metrics"], +} +VERSION_MAP = { + "cis_k8s": "1.1.0", + "cis_eks": "1.2.0", + "cis_aws": "1.2.0", + "vuln_mgmt_aws": "1.3.0", + "cis_gcp": "1.5.0", +} + + +def generate_input_id(name: str, input_type: str) -> str: + """ + Generates a unique input ID based on the provided name and input type. + + This function combines the 'name' and 'input_type' parameters to create a unique + identifier for an input. The resulting ID is useful for organizing and referencing + inputs in a structured manner. + + Args: + name (str): The name or identifier associated with the input. + input_type (str): The type or category of the input. + + Returns: + str: A unique input ID generated by combining 'name' and 'input_type'. + + Example: + If 'name' is "cspm" and 'input_type' is "cloudbeat/cis_gcp", calling + 'generate_input_id(name, input_type)' will return "cspm-cloudbeat/cis_gcp". + """ + return f"{name}-{input_type}" + + +def format_inputs(policy_templates: list) -> dict: + """ + Format inputs based on policy templates. + + Args: + policy_templates (list): List of policy templates. + + Returns: + dict: Formatted inputs. + """ + inputs_dict = {} + for template in policy_templates: + name = template.get("name", "") + data_stream = template.get("data_streams", [])[0] + for template_input in template.get("inputs", []): + input_type = template_input.get("type", "") + input_dict = { + "enabled": False, + "streams": { + f"{CLOUD_SECURITY_POSTURE}.{data_stream}": { + "enabled": False, + }, + }, + } + # Conditionally add "vars" based on input_type + if input_type in REQUIRE_VARS: + input_dict["streams"][f"{CLOUD_SECURITY_POSTURE}.{data_stream}"]["vars"] = {} + inputs_dict[generate_input_id(name=name, input_type=input_type)] = input_dict + return inputs_dict + + +def format_vars(package_vars: list) -> dict: + """ + Format vars based on package vars. + + Args: + package_vars (list): List of package vars. + + Returns: + dict: Formatted vars. + """ + vars_dict = {} + for package_var in package_vars: + vars_dict[package_var.get("name", "")] = "" + return vars_dict + + +def update_input(data, input_data): + """ + Recursively updates a dictionary structure with values from another dictionary. + + Args: + data (dict or list): The dictionary structure to be updated. + input_data (dict): The dictionary containing values to update 'data' with. + """ + if isinstance(data, dict): + for key, value in data.items(): + if key == "enabled": + data[key] = True + elif key == "vars" and isinstance(value, dict): + data[key] = input_data.get("vars", {}) + elif isinstance(value, (dict, list)): + update_input(value, input_data) + elif isinstance(data, list): + for item in data: + update_input(item, input_data) + + +def generate_policy_template(cfg: Munch, policy_template: dict = None) -> dict: + """ + Generate a policy template based on configuration and a template. + + Args: + cfg (Munch): Configuration data. + policy_template (dict, optional): Policy template.If not provided, + a default template will be used. + + Returns: + dict: Generated policy template. + """ + if policy_template is None: + policy_template = SIMPLIFIED_PACKAGE_POLICY + + generated_policy = copy.deepcopy(policy_template) + package_policy_info = get_package(cfg=cfg) + generated_policy["package"] = { + "name": package_policy_info.get("name", ""), + "version": package_policy_info.get("version", ""), + } + generated_policy["inputs"] = format_inputs(package_policy_info.get("policy_templates", [])) + generated_policy["vars"] = format_vars(package_vars=package_policy_info.get("vars", [])) + return generated_policy + + +def generate_package_policy(template: dict, policy_input: dict) -> dict: + """ + Generate a package policy based on a template and policy input. + + Args: + template (dict): The package policy template. + policy_input (dict): The policy input containing values to update. + + Returns: + dict: The generated package policy. + """ + package_policy = copy.deepcopy(template) + integration_key = policy_input.get("input_name", "") + for input_name, data in package_policy["inputs"].items(): + if integration_key in input_name: + update_input(data, policy_input) + if "vars" in policy_input and "vars" not in data["streams"]["cloud_security_posture.findings"]: + data["streams"]["cloud_security_posture.findings"]["vars"] = policy_input["vars"] + package_policy["vars"]["posture"] = policy_input.get("posture", "") + package_policy["vars"]["deployment"] = policy_input.get("deployment", "") + package_policy["name"] = policy_input.get("name", "") + return package_policy + + +def load_data(cfg: Munch, agent_input: dict, package_input: dict) -> Tuple[Dict, Dict]: + """ + Load agent and package policies based on input data. + + Args: + cfg (Munch): Configuration data. + agent_input (dict): Agent policy input data. + package_input (dict): Package policy input data. + + Returns: + Tuple[Dict, Dict]: A tuple containing the loaded agent policy and package policy. + """ + logger.info("Loading agent and package policies") + agent_policy = SIMPLIFIED_AGENT_POLICY + agent_policy["name"] = agent_input.get("name", "") + package_template = generate_policy_template(cfg=cfg) + package_policy = generate_package_policy( + template=package_template, + policy_input=package_input, + ) + + return agent_policy, package_policy + + +def version_compatible(current_version, required_version): + """ + Check if the current version is compatible with the required version. + + Args: + current_version (str): The current version to be checked. + required_version (str): The required version for compatibility. + + Returns: + bool: True if the current version is compatible, False otherwise. + """ + return version.parse(current_version) >= version.parse(required_version) + + +def generate_random_name(prefix: str) -> str: + """ + Generate a random name by combining a given prefix with a random suffix. + + Args: + prefix (str): The prefix to be combined with the random suffix. + + Returns: + str: The generated name consisting of the prefix and a random 6-character suffix. + """ + random_uuid = str(uuid.uuid4()) + # Extract the last 6 characters from the UUID + random_suffix = random_uuid[-6:] + generated_name = f"{prefix}-{random_suffix}" + + return generated_name + + +def patch_vars(var_dict, package_version): + """ + Conditionally updates a dictionary based on the package version. + + This function checks the provided package version and updates the given + dictionary 'var_dict' with additional fields based on version requirements. + + Args: + var_dict (dict): The dictionary to be updated. + package_version (str): The version of the package to determine updates. + + Returns: + None: This function modifies 'var_dict' in place. + """ + if version.parse(package_version) >= version.parse("1.5.0"): + # Add or update fields in the vars_dict based on the version requirements + var_dict["aws.account_type"] = "single-account" + + +def get_package_default_url(cfg: Munch, policy_name: str, policy_type: str) -> str: + """ + Get the package default URL for a specific policy and policy type from the configuration. + + Args: + cfg (Munch): The configuration containing policy information. + policy_name (str): The name of the policy. + policy_type (str): The type of the policy. + + Returns: + str: The default package URL for the specified policy and type. + An empty string is returned if not found. + """ + package_policy = get_package(cfg=cfg) + policy_templates = package_policy.get("policy_templates", []) + + for template in policy_templates: + if template.get("name", "") == policy_name: + inputs = template.get("inputs", []) + + for policy_input in inputs: + if policy_input.get("type", "") == policy_type: + vars_list = policy_input.get("vars", []) + + if vars_list: + return vars_list[0].get("default", "") + + return "" + + +def extract_template_url(url_string: str) -> str: + """ + Extracts the 'templateURL' parameter from a given URL string. + + Args: + url_string (str): The URL string from which to extract the 'templateURL' parameter. + + Returns: + str: The value of the 'templateURL' parameter if found in the URL, + or empty string if the parameter is not present. + + """ + parsed_url = urlparse(url_string, allow_fragments=False) + query_parameters = parse_qs(parsed_url.query) + + template_url = query_parameters.get("templateURL") + + if not template_url: + logger.warning("templateURL field is not found") + return "" + + return template_url[0] diff --git a/deploy/test-environments/fleet_api/src/state_file_manager.py b/deploy/test-environments/fleet_api/src/state_file_manager.py index a67f8f3b16..1a8ab6b351 100644 --- a/deploy/test-environments/fleet_api/src/state_file_manager.py +++ b/deploy/test-environments/fleet_api/src/state_file_manager.py @@ -4,12 +4,29 @@ """ import json from pathlib import Path +from enum import Enum from utils import delete_file from loguru import logger __state_file = Path(__file__).parent / "state_data.json" +class HostType(Enum): + """ + Enumeration representing different host types for deployment. + + The `HostType` enumeration defines constants for various host types, + such as Kubernetes or Linux-based deployments. + + Attributes: + KUBERNETES (str): Represents a Kubernetes-based deployment. + LINUX_TAR (str): Represents a Linux-based deployment using TAR archives. + """ + + KUBERNETES = "kubernetes" + LINUX_TAR = "linux" + + class PolicyStateEncoder(json.JSONEncoder): """ Custom JSON encoder for PolicyState objects. @@ -27,17 +44,30 @@ class PolicyState: Class to represent a policy state. """ - def __init__(self, agnt_policy_id: str, pkg_policy_id: str, expected_agents: int, expected_tags: list[str]): + def __init__( + self, + agnt_policy_id: str, + pkg_policy_id: str, + expected_agents: int, + expected_tags: list[str], + host_type: HostType, + integration_name: str, + ): """ Args: agnt_policy_id (str): ID of the agent policy. pkg_policy_id (str): ID of the package policy. expected_agents (int): Expected number of deployed agents. + expected_tags: (list(int)): List of expected tags count. + host_type (HostType): Deployment host type + integration_name (str): Name of installed integration """ self.agnt_policy_id = agnt_policy_id self.pkg_policy_id = pkg_policy_id self.expected_agents = expected_agents self.expected_tags = expected_tags + self.host_type = host_type + self.integration_name = integration_name class StateFileManager: diff --git a/deploy/test-environments/fleet_api/src/upgrade_agents.py b/deploy/test-environments/fleet_api/src/upgrade_agents.py new file mode 100755 index 0000000000..2f17d0598f --- /dev/null +++ b/deploy/test-environments/fleet_api/src/upgrade_agents.py @@ -0,0 +1,181 @@ +#!/usr/bin/env python +""" +This script upgrades Linux-based agents. + +The following steps are performed: +1. Generate a custom agent binary download URL. +2. Update all Linux-based agent policies with the custom download URL. +3. Execute a bulk upgrade process for all agents. +4. Wait until all agent upgrades are complete. + +Note: This script requires a 'state_data.json' file to identify all Linux agents to be updated. + +For execution, create a configuration file 'cnvm_config.json' in the same directory. + +Example 'state_data.json': +{ + "policies": [ + { + "agnt_policy_id": "c3a6d9d0-6b58-11ee-8fd8-b709d88b5892", + "pkg_policy_id": "226965a4-e07a-4ddd-a64d-765ddd9946e5", + "expected_agents": 1, + "expected_tags": [ + "cft_version:cft_version", + "cft_arn:arn:aws:cloudformation:.*" + ], + "type": "linux", + "integration_name": "cnvm-int" + } + ] +} +""" + +import sys +import time +from pathlib import Path +from loguru import logger +import configuration_fleet as cnfg +from api.agent_policy_api import ( + create_agent_download_source, + get_agents, + update_agent_policy, +) +from api.common_api import ( + get_artifact_server, + bulk_upgrade_agents, + wait_for_action_status, + get_package_version, +) +from api.package_policy_api import get_package_policy_by_id +from state_file_manager import state_manager, HostType + +STATE_DATA_PATH = Path(__file__).parent / "state_data.json" + + +def create_custom_agent_download_source() -> str: + """Create a custom agent download source and return its ID.""" + host_url = get_artifact_server(version=cnfg.elk_config.stack_version, is_short_url=True) + download_source_id = create_agent_download_source( + cfg=cnfg.elk_config, + name="custom_source", + host=host_url, + ) + logger.info(f"Download source id '{download_source_id}' is created") + return download_source_id + + +def update_linux_policies(download_source_id: str): + """Update all Linux-based agent policies with the custom download source.""" + state_policies = state_manager.get_policies() + linux_policies_list = [] + + for policy in state_policies: + if policy.host_type == HostType.LINUX_TAR.value: + linux_policies_list.append(policy.agnt_policy_id) + update_agent_policy( + cfg=cnfg.elk_config, + policy_id=policy.agnt_policy_id, + json_policy={ + "name": policy.integration_name, + "namespace": "default", + "download_source_id": download_source_id, + }, + ) + + return linux_policies_list + + +def wait_for_packages_upgrade(): + """ + This function waits until all packages version is upgraded. + """ + desired_version = get_package_version(cfg=cnfg.elk_config) + policies = state_manager.get_policies() + for policy in policies: + if policy.integration_name == "tf-ap-d4c": + continue + if not wait_for_package_policy_version( + cfg=cnfg.elk_config, + policy_id=policy.pkg_policy_id, + desired_version=desired_version, + ): + logger.error(f"Integration {policy.integration_name} failed to upgrade.") + sys.exit(1) + + +def wait_for_package_policy_version( + cfg, + policy_id, + desired_version, + timeout_secs=300, + poll_interval_secs=10, +): + """ + Wait for a package policy to reach the desired version with a timeout. + + Args: + cfg (Munch): A configuration object containing Kibana URL, authentication details, etc. + policy_id (str): The package policy ID to monitor. + desired_version (str): The desired version to wait for. + timeout_secs (int, optional): Maximum time to wait in seconds. Default is 300 seconds. + poll_interval_secs (int, optional): Time to wait between polling for the package version. + Default is 10 seconds. + + Returns: + bool: True if the package policy reaches the desired version within the timeout, + False otherwise. + """ + start_time = time.time() + + while time.time() - start_time < timeout_secs: + policy_info = get_package_policy_by_id(cfg, policy_id) + policy_name = policy_info.get("name", "") + policy_version = policy_info.get("package", {}).get("version", "") + logger.info( + f"Integration: {policy_name}, current version: {policy_version}, desired version: {desired_version}", + ) + if policy_version == desired_version: + return True # Desired version reached + + time.sleep(poll_interval_secs) # Wait and poll again + + return False # Desired version not reached within the timeout + + +def main(): + """ + Main linux agents upgrade flow + """ + # Ensure that all packages are on the latest version + wait_for_packages_upgrade() + + download_source_id = create_custom_agent_download_source() + + if not download_source_id: + logger.error("Failed to create the agent download source.") + sys.exit(1) + + linux_policies_list = update_linux_policies(download_source_id) + time.sleep(180) # To ensure that policies updated + agents = get_agents(cfg=cnfg.elk_config) + linux_agent_ids = [agent.id for agent in agents if agent.policy_id in linux_policies_list] + for agent_id in linux_agent_ids: + action_id = bulk_upgrade_agents( + cfg=cnfg.elk_config, + agent_ids=agent_id, + version=cnfg.elk_config.stack_version, + source_uri=get_artifact_server(version=cnfg.elk_config.stack_version), + ) + + if not wait_for_action_status( + cfg=cnfg.elk_config, + target_action_id=action_id, + target_type="UPGRADE", + target_status="COMPLETE", + ): + sys.exit(1) + logger.info(f"Agent {agent_id} upgrade is finished") + + +if __name__ == "__main__": + main() diff --git a/deploy/test-environments/fleet_api/src/upgrade_cnvm.py b/deploy/test-environments/fleet_api/src/upgrade_cnvm.py new file mode 100755 index 0000000000..56d4f7d4ca --- /dev/null +++ b/deploy/test-environments/fleet_api/src/upgrade_cnvm.py @@ -0,0 +1,122 @@ +#!/usr/bin/env python +""" +This script updates AWS CNVM agent. + +The following steps are performed: +1. Download the latest CNVM template. +2. Get all the required parameters. +3. Execute a CloudFormation stack update. + +Note: This script requires the configuration and dependencies provided by the 'cnfg' and 'utils' modules. + +For execution, you can create a configuration file 'cnvm_config.json' in the same directory. + +Example 'cnvm_config.json': +{ + "ENROLLMENT_TOKEN": "YourEnrollmentToken" +} + +Ensure that AWS credentials are properly configured for Boto3. + +You can also modify the 'stack_tags' variable to set custom tags for the CloudFormation stack. + +""" +from pathlib import Path +import boto3 +from munch import Munch +from loguru import logger +from utils import read_json +import configuration_fleet as cnfg +from api.common_api import ( + get_artifact_server, + get_fleet_server_host, +) +from package_policy import ( + get_package_default_url, + extract_template_url, +) + + +CNVM_JSON_PATH = Path(__file__).parent / "cnvm_config.json" + + +def update_cloudformation_stack(cfg: Munch): + """ + Update an AWS CloudFormation stack with the provided configuration. + + Args: + cfg (Munch): A configuration object containing the following attributes: + - stack_name (str): The name of the CloudFormation stack to update. + - template (str): The URL or S3 path to the CloudFormation template. + - elastic_agent_version (str): The Elastic Agent version to set as a parameter. + - elastic_artifact_server (str): The Elastic Artifact Server URL to set as a parameter. + - enrollment_token (str): The Enrollment Token to set as a parameter. + - fleet_url (str): The Fleet URL to set as a parameter. + - stack_tags (list of dict): Tags to apply to the CloudFormation stack. + + Returns: + None + + The function performs a CloudFormation stack update using the provided configuration. + It initiates the stack update, waits for the update to complete, and logs the status. + """ + # Create a Boto3 CloudFormation client + cf_client = boto3.client("cloudformation") + + # Parameters in the format ParameterKey=Key,ParameterValue=Value + parameters = [ + {"ParameterKey": "ElasticAgentVersion", "ParameterValue": cfg.elastic_agent_version}, + {"ParameterKey": "ElasticArtifactServer", "ParameterValue": cfg.elastic_artifact_server}, + {"ParameterKey": "EnrollmentToken", "ParameterValue": cfg.enrollment_token}, + {"ParameterKey": "FleetUrl", "ParameterValue": cfg.fleet_url}, + ] + + # Capabilities + capabilities = ["CAPABILITY_NAMED_IAM"] + + # Perform the stack update with the YAML template body + response = cf_client.update_stack( + StackName=cfg.stack_name, + TemplateURL=cfg.template, + Parameters=parameters, + Capabilities=capabilities, + Tags=cfg.stack_tags, + ) + logger.info(f"Stack {response.get('StackId', 'NA')} update initiated. Waiting for update to complete...") + + # Wait until the stack update is complete + cf_client.get_waiter("stack_update_complete").wait(StackName=cfg.stack_name) + + logger.info(f"Stack {cfg.stack_name} update is complete.") + + +if __name__ == "__main__": + config = Munch() + config.stack_name = cnfg.aws_config.cnvm_stack_name + # Get template + logger.info("Get AWS CNVM template") + default_url = get_package_default_url( + cfg=cnfg.elk_config, + policy_name="vuln_mgmt", + policy_type="cloudbeat/vuln_mgmt_aws", + ) + template_url = extract_template_url(url_string=default_url) + + config.template = template_url + config.elastic_agent_version = cnfg.elk_config.stack_version + config.elastic_artifact_server = get_artifact_server(cnfg.elk_config.stack_version) + + # Tags for the CloudFormation stack + stack_tags = [ + {"Key": "division", "Value": "engineering"}, + {"Key": "org", "Value": "security"}, + {"Key": "team", "Value": "cloud-security-posture"}, + {"Key": "project", "Value": "test-environments"}, + ] + config.stack_tags = stack_tags + + # Get enrollment token + cnvm_json = read_json(CNVM_JSON_PATH) + config.enrollment_token = cnvm_json.get("ENROLLMENT_TOKEN", "") + config.fleet_url = get_fleet_server_host(cfg=cnfg.elk_config) + update_cloudformation_stack(cfg=config) diff --git a/deploy/test-environments/fleet_api/src/utils.py b/deploy/test-environments/fleet_api/src/utils.py index db6ae30b05..387598fce1 100644 --- a/deploy/test-environments/fleet_api/src/utils.py +++ b/deploy/test-environments/fleet_api/src/utils.py @@ -241,3 +241,80 @@ def add_capabilities(yaml_content: str) -> str: modified_content = output_stream.getvalue() return modified_content + + +def rename_file_by_suffix(file_path: Path, suffix: str) -> None: + """ + Rename a file by adding a specified suffix to its filename. + + Args: + file_path (Path): The path to the file to be renamed. + suffix (str): The suffix to be added to the filename. + + Returns: + None + """ + if not file_path.exists(): + logger.warning(f"File {file_path.name} not found") + return + + try: + new_name = f"{file_path.stem}{suffix}{file_path.suffix}" + new_file_path = file_path.parent / new_name + Path(file_path).rename(new_file_path) + except FileNotFoundError: + logger.warning(f"File {file_path.name} not found") + except FileExistsError: + logger.warning(f"File {new_file_path} already exists") + + +def add_tags(tags: str, yaml_content: str): + """ + Add custom tags to a YAML content while preserving formatting. + + Args: + tags (str): Custom tags in the format "key1=value1 key2=value2 ...". + yaml_content (str): YAML content to which custom tags will be added. + + Returns: + str: The modified YAML content with custom tags. + """ + # Create a ruamel.yaml instance with the ability to preserve formatting + yaml = ruamel.yaml.YAML() + yaml.preserve_quotes = True + yaml.explicit_start = True + yaml.indent(mapping=2, sequence=4, offset=2) + + cnvm_template = yaml.load(yaml_content) + + # Get custom tags from the input argument + custom_tags = tags.split() + tag_dicts = [] + + for tag in custom_tags: + key_values = tag.split(",") + tag_dict = {} + + for key_value in key_values: + key, value = key_value.split("=") + tag_dict[key] = value + tag_dicts.append(tag_dict) + + for resource in cnvm_template["Resources"].values(): + if resource["Type"] == "AWS::EC2::Instance": + if "Properties" not in resource: + resource["Properties"] = {} + if "Tags" not in resource["Properties"]: + resource["Properties"]["Tags"] = [] + resource["Properties"]["Tags"] += tag_dicts + + # Create an output stream + output_stream = ruamel.yaml.compat.StringIO() + + # Dump the modified YAML data to the output stream + yaml.dump(cnvm_template, output_stream) + + # Get the YAML string from the output stream + modified_content = output_stream.getvalue() + + return modified_content diff --git a/dev-docs/Cloud-Env-Upgrade.md b/dev-docs/Cloud-Env-Upgrade.md new file mode 100644 index 0000000000..59d211dc78 --- /dev/null +++ b/dev-docs/Cloud-Env-Upgrade.md @@ -0,0 +1,54 @@ +# Cloud Environment Upgrade Testing + +The [`Test Upgrade Environment`](https://github.com/elastic/cloudbeat/actions/workflows/upgrade-environment.yml) GitHub action automates the process of deploying a fully-featured cloud environment, pre-configured with all integrations (KSPM, CSPM, and D4C). +It also facilitates the upgrade of the environment to a new version of the ELK stack and all installed agents, along with performing findings retrieval checks. + + +## How to Run the Workflow + +Follow these steps to run the workflow: + +1. Go to [`Actions > Test Upgrade Environment`](https://github.com/elastic/cloudbeat/actions/workflows/upgrade-environment.yml). + + ![Navigate to Actions](https://github.com/elastic/cloudbeat/assets/99176494/2686668f-7be6-4b55-a37b-e37426c1a0e1) + +2. Click the `Run workflow` button. + + ![Run Workflow](https://github.com/elastic/cloudbeat/assets/99176494/902efe40-ed1b-4175-92a6-504439eb9e3d) + +3. Complete the required parameters: + + - **`deployment_name`**: Name your environment (Allowed characters: a-z0-9 and `-`). For + instance: `john-8-11-0-nov1`. + + - **`elk-stack-version`**: Specify the version of Elastic Cloud stack, either a SNAPSHOT or a build candidate (BC) + version. Check the available versions [here](https://artifacts-staging.elastic.co/dra-info/index.html). + For BC, enter only the version without additions/commit sha, e.g. `8.11.0`. + For SNAPSHOT, enter the full version, e.g. `8.12.0-SNAPSHOT`. + + ![Required Parameters](https://github.com/elastic/cloudbeat/assets/99176494/a50141d7-7554-4761-a737-e0f23f0b0492) + +4. Optionally, modify other parameters if required: + + - **`docker-image-override`** (**optional**): Use this to replace the default Docker image for build candidate (BC) or + SNAPSHOT versions. + Provide the full image path. Leave this field blank for snapshot versions. Follow this format for the image + path: `docker.elastic.co/cloud-release/elastic-agent-cloud:8.11.0-cb971279`. If you're not sure where to get this + image path from, look for message like [this](https://elastic.slack.com/archives/C0JFN9HJL/p1698263174847419) in + #mission-control channel, you can see it specify the stack version and the BC commit sha in the first line, + e.g. `elastic / unified-release - staging # 8.11 - 10 - 8.9.0-cb971279`. Now just copy it + and replace it the image path: `docker.elastic.co/cloud-release/elastic-agent-cloud:8.11.0-cb971279`. + + ![Optional Parameters](https://github.com/elastic/cloudbeat/assets/99176494/5b7f15bd-6f56-4eb0-b7d6-fc6a7656ffb0) + +## Tracking Workflow Execution + +Tracking workflow execution follows the same steps as defined in the [Create Environment](./Cloud-Env-Testing.md#tracking-workflow-execution) + +## Logging into the Environment + +Logging into the environment can be done following the steps detailed in the [Create Environment](./Cloud-Env-Testing.md#logging-into-the-environment) + +## Cleanup Procedure + +The cleanup procedure is also described in the [Create Environment](./Cloud-Env-Testing.md#cleanup-procedure) diff --git a/tests/configuration.py b/tests/configuration.py index c98d940d32..246bfae228 100644 --- a/tests/configuration.py +++ b/tests/configuration.py @@ -65,6 +65,7 @@ elasticsearch.kspm_index = os.getenv("KSPM_INDEX", FINDINGS_INDEX_PATTERN) elasticsearch.cspm_index = os.getenv("CSPM_INDEX", FINDINGS_INDEX_PATTERN) elasticsearch.cnvm_index = os.getenv("CNVM_INDEX", VULNERABILITIES_INDEX_PATTERN) +elasticsearch.stack_version = os.getenv("STACK_VERSION", "") # --- Docker environment definition docker = Munch() diff --git a/tests/integration/tests/test_sanity_checks.py b/tests/integration/tests/test_sanity_checks.py index e970534d1a..b3943cca66 100644 --- a/tests/integration/tests/test_sanity_checks.py +++ b/tests/integration/tests/test_sanity_checks.py @@ -8,11 +8,20 @@ """ import pytest from commonlib.utils import get_findings +from configuration import elasticsearch +from loguru import logger CONFIG_TIMEOUT = 120 -GCP_CONFIG_TIMEOUT = 600 +GCP_CONFIG_TIMEOUT = 300 CNVM_CONFIG_TIMEOUT = 3600 +STACK_VERSION = elasticsearch.stack_version +# Check if STACK_VERSION is provided +if not STACK_VERSION: + logger.warning("STACK_VERSION is not provided. Please set the STACK_VERSION in the configuration.") + +agent_term = {"term": {"agent.version": STACK_VERSION}} + tests_data = { "cis_aws": [ "cloud-compute", @@ -55,7 +64,11 @@ def test_kspm_unmanaged_findings(kspm_client, match_type): Raises: AssertionError: If the resource type is missing. """ - query_list = [{"term": {"rule.benchmark.id": "cis_k8s"}}, {"term": {"resource.type": match_type}}] + query_list = [ + {"term": {"rule.benchmark.id": "cis_k8s"}}, + {"term": {"resource.type": match_type}}, + {"term": {"agent.version": STACK_VERSION}}, + ] query, sort = kspm_client.build_es_must_match_query(must_query_list=query_list, time_range="now-4h") result = get_findings(kspm_client, CONFIG_TIMEOUT, query, sort, match_type) @@ -78,7 +91,11 @@ def test_kspm_e_k_s_findings(kspm_client, match_type): Raises: AssertionError: If the resource type is missing. """ - query_list = [{"term": {"rule.benchmark.id": "cis_eks"}}, {"term": {"resource.type": match_type}}] + query_list = [ + {"term": {"rule.benchmark.id": "cis_eks"}}, + {"term": {"resource.type": match_type}}, + {"term": {"agent.version": STACK_VERSION}}, + ] query, sort = kspm_client.build_es_must_match_query(must_query_list=query_list, time_range="now-4h") results = get_findings(kspm_client, CONFIG_TIMEOUT, query, sort, match_type) @@ -101,7 +118,11 @@ def test_cspm_findings(cspm_client, match_type): Raises: AssertionError: If the resource type is missing. """ - query_list = [{"term": {"rule.benchmark.id": "cis_aws"}}, {"term": {"resource.type": match_type}}] + query_list = [ + {"term": {"rule.benchmark.id": "cis_aws"}}, + {"term": {"resource.type": match_type}}, + {"term": {"agent.version": STACK_VERSION}}, + ] query, sort = cspm_client.build_es_must_match_query(must_query_list=query_list, time_range="now-24h") results = get_findings(cspm_client, CONFIG_TIMEOUT, query, sort, match_type) @@ -124,7 +145,7 @@ def test_cnvm_findings(cnvm_client, match_type): Raises: AssertionError: If the resource type is missing. """ - query_list = [] + query_list = [{"term": {"agent.version": STACK_VERSION}}] query, sort = cnvm_client.build_es_must_match_query(must_query_list=query_list, time_range="now-24h") results = get_findings(cnvm_client, CNVM_CONFIG_TIMEOUT, query, sort, match_type) assert len(results) > 0, f"The resource type '{match_type}' is missing" @@ -146,7 +167,12 @@ def test_cspm_gcp_findings(cspm_client, match_type): Raises: AssertionError: If the resource type is missing. """ - query_list = [{"term": {"rule.benchmark.id": "cis_gcp"}}, {"term": {"resource.type": match_type}}] + query_list = [ + {"term": {"rule.benchmark.id": "cis_gcp"}}, + {"term": {"resource.type": match_type}}, + ] + if STACK_VERSION: + query_list.append(agent_term) query, sort = cspm_client.build_es_must_match_query(must_query_list=query_list, time_range="now-24h") results = get_findings(cspm_client, GCP_CONFIG_TIMEOUT, query, sort, match_type)