From f08b4f3c4eb823adb8bc17d609a1ca6079ee21e2 Mon Sep 17 00:00:00 2001 From: Dmitry Gurevich <99176494+gurevichdmitry@users.noreply.github.com> Date: Thu, 28 Sep 2023 15:32:09 +0300 Subject: [PATCH 01/55] add package policy infra and update gcp and kspm integrations --- .../fleet_api/pyproject.toml | 1 + .../fleet_api/src/api/common_api.py | 43 ++++ .../src/install_cspm_gcp_integration.py | 48 ++-- .../src/install_kspm_unmanaged_integration.py | 71 +++--- .../fleet_api/src/package_policy.py | 228 ++++++++++++++++++ 5 files changed, 344 insertions(+), 47 deletions(-) create mode 100644 deploy/test-environments/fleet_api/src/package_policy.py diff --git a/deploy/test-environments/fleet_api/pyproject.toml b/deploy/test-environments/fleet_api/pyproject.toml index 64b2f7f5d4..43c6c06397 100644 --- a/deploy/test-environments/fleet_api/pyproject.toml +++ b/deploy/test-environments/fleet_api/pyproject.toml @@ -12,6 +12,7 @@ munch = "^3.0.0" loguru = "^0.7.0" jinja2 = "^3.1.2" ruamel-yaml = "^0.17.31" +packaging = "^23.1" [build-system] diff --git a/deploy/test-environments/fleet_api/src/api/common_api.py b/deploy/test-environments/fleet_api/src/api/common_api.py index 5bb5df0e98..bca3b26915 100644 --- a/deploy/test-environments/fleet_api/src/api/common_api.py +++ b/deploy/test-environments/fleet_api/src/api/common_api.py @@ -2,6 +2,7 @@ This module contains API calls related to Fleet settings """ import codecs +from typing import Dict, Any from munch import Munch, munchify from loguru import logger from api.base_call_api import APICallException, perform_api_call @@ -256,6 +257,48 @@ def get_package_version( return None +def get_package( + cfg: Munch, + package_name: str = "cloud_security_posture", + is_full: bool = True, + prerelease: bool = False, +) -> Dict[str, Any]: + """ + Retrieve package information from the Elastic Fleet Server API. + + Args: + cfg (Munch): Configuration data. + package_name (str, optional): The name of the package to retrieve. + Default is "cloud_security_posture". + is_full (bool, optional): Whether to retrieve full package information. Default is True. + prerelease (bool, optional): Whether to include prerelease versions. Default is False. + + Returns: + Dict[str, Any]: A dictionary containing the package information + or an empty dictionary if the API call fails. + """ + url = f"{cfg.kibana_url}/api/fleet/epm/packages/{package_name}" + + request_params = { + "full": is_full, + "prerelease": prerelease, + } + + try: + response = perform_api_call( + method="GET", + url=url, + auth=cfg.auth, + params={"params": request_params}, + ) + return response.get("response", {}) + except APICallException as api_ex: + logger.error( + f"API call failed, status code {api_ex.status_code}. Response: {api_ex.response_text}", + ) + return {} + + def update_package_version(cfg: Munch, package_name: str, package_version: str): """ Updates the version of a package. diff --git a/deploy/test-environments/fleet_api/src/install_cspm_gcp_integration.py b/deploy/test-environments/fleet_api/src/install_cspm_gcp_integration.py index 30d808367c..eb0a30c6dd 100755 --- a/deploy/test-environments/fleet_api/src/install_cspm_gcp_integration.py +++ b/deploy/test-environments/fleet_api/src/install_cspm_gcp_integration.py @@ -7,9 +7,9 @@ 2. Create a CSPM GCP integration. 3. Create a deploy/deployment-manager/config.json file to be used by the just deploy-dm command. """ +import sys import json from pathlib import Path -from typing import Dict, Tuple from munch import Munch import configuration_fleet as cnfg from api.agent_policy_api import create_agent_policy @@ -22,35 +22,41 @@ update_package_version, ) from loguru import logger -from utils import read_json from state_file_manager import state_manager, PolicyState +from package_policy import ( + load_data, + version_compatible, + generate_random_name, + VERSION_MAP, +) -CSPM_GCP_AGENT_POLICY = "../../../cloud/data/agent_policy_cspm_gcp.json" -CSPM_GCP_PACKAGE_POLICY = "../../../cloud/data/package_policy_cspm_gcp.json" CSPM_GCP_EXPECTED_AGENTS = 1 DEPLOYMENT_MANAGER_CONFIG = "../../../deployment-manager/config.json" -cspm_gcp_agent_policy_data = Path(__file__).parent / CSPM_GCP_AGENT_POLICY -cspm_gcp_pkg_policy_data = Path(__file__).parent / CSPM_GCP_PACKAGE_POLICY cspm_gcp_deployment_manager_config = Path(__file__).parent / DEPLOYMENT_MANAGER_CONFIG INTEGRATION_NAME = "CSPM GCP" - - -def load_data() -> Tuple[Dict, Dict]: - """Loads data. - - Returns: - Tuple[Dict, Dict]: A tuple containing the loaded agent and package policies. - """ - logger.info("Loading agent and package policies") - agent_policy = read_json(json_path=cspm_gcp_agent_policy_data) - package_policy = read_json(json_path=cspm_gcp_pkg_policy_data) - return agent_policy, package_policy +PKG_DEFAULT_VERSION = VERSION_MAP.get("cis_gcp", "") +INTEGRATION_INPUT = { + "name": generate_random_name("pkg-cspm-gcp"), + "input_name": "cis_gcp", + "posture": "cspm", + "deployment": "gcp", +} +AGENT_INPUT = { + "name": generate_random_name("cspm-gcp"), +} if __name__ == "__main__": # pylint: disable=duplicate-code package_version = get_package_version(cfg=cnfg.elk_config) + if not version_compatible( + current_version=package_version, + required_version=PKG_DEFAULT_VERSION, + ): + logger.warning(f"{INTEGRATION_NAME} is not supported in version {package_version}") + sys.exit(0) + logger.info(f"Package version: {package_version}") update_package_version( cfg=cnfg.elk_config, @@ -58,7 +64,11 @@ def load_data() -> Tuple[Dict, Dict]: package_version=package_version, ) logger.info(f"Starting installation of {INTEGRATION_NAME} integration.") - agent_data, package_data = load_data() + agent_data, package_data = load_data( + cfg=cnfg.elk_config, + agent_input=AGENT_INPUT, + package_input=INTEGRATION_INPUT, + ) logger.info("Create agent policy") agent_policy_id = create_agent_policy(cfg=cnfg.elk_config, json_policy=agent_data) diff --git a/deploy/test-environments/fleet_api/src/install_kspm_unmanaged_integration.py b/deploy/test-environments/fleet_api/src/install_kspm_unmanaged_integration.py index 730ceb4d7c..3c7a57fa1c 100755 --- a/deploy/test-environments/fleet_api/src/install_kspm_unmanaged_integration.py +++ b/deploy/test-environments/fleet_api/src/install_kspm_unmanaged_integration.py @@ -7,9 +7,8 @@ 2. Create a KSPM unmanaged integration. 3. Create a KSPM manifest to be deployed on a host. """ - +import sys from pathlib import Path -from typing import Dict, Tuple from munch import Munch import configuration_fleet as cnfg from api.agent_policy_api import create_agent_policy @@ -22,55 +21,71 @@ update_package_version, ) from loguru import logger -from utils import read_json from state_file_manager import state_manager, PolicyState +from package_policy import ( + load_data, + version_compatible, + generate_random_name, + VERSION_MAP, +) -KSPM_UNMANAGED_AGENT_POLICY = "../../../cloud/data/agent_policy_vanilla.json" -KSPM_UNMANAGED_PACKAGE_POLICY = "../../../cloud/data/package_policy_vanilla.json" -KSPM_UNMANAGED_EXPECTED_AGENTS = 2 - - -kspm_agent_policy_data = Path(__file__).parent / KSPM_UNMANAGED_AGENT_POLICY -kspm_unmanached_pkg_policy_data = Path(__file__).parent / KSPM_UNMANAGED_PACKAGE_POLICY - - -def load_data() -> Tuple[Dict, Dict]: - """Loads data. - - Returns: - Tuple[Dict, Dict]: A tuple containing the loaded agent and package policies. - """ - logger.info("Loading agent and package policies") - agent_policy = read_json(json_path=kspm_agent_policy_data) - package_policy = read_json(json_path=kspm_unmanached_pkg_policy_data) - return agent_policy, package_policy +KSPM_UNMANAGED_EXPECTED_AGENTS = 2 +INTEGRATION_NAME = "KSPM Self Managed" +PKG_DEFAULT_VERSION = VERSION_MAP.get("cis_k8s", "") +INTEGRATION_INPUT = { + "name": generate_random_name("pkg-kspm"), + "input_name": "cis_k8s", + "posture": "kspm", + "deployment": "self_managed", +} +AGENT_INPUT = { + "name": generate_random_name("kspm-self-managed"), +} if __name__ == "__main__": # pylint: disable=duplicate-code package_version = get_package_version(cfg=cnfg.elk_config) logger.info(f"Package version: {package_version}") + if not version_compatible( + current_version=package_version, + required_version=PKG_DEFAULT_VERSION, + ): + logger.warning(f"{INTEGRATION_NAME} is not supported in version {package_version}") + sys.exit(0) + update_package_version( cfg=cnfg.elk_config, package_name="cloud_security_posture", package_version=package_version, ) - logger.info("Starting installation of KSPM integration.") - agent_data, package_data = load_data() + logger.info(f"Starting installation of {INTEGRATION_NAME} integration.") + agent_data, package_data = load_data( + cfg=cnfg.elk_config, + agent_input=AGENT_INPUT, + package_input=INTEGRATION_INPUT, + ) logger.info("Create agent policy") agent_policy_id = create_agent_policy(cfg=cnfg.elk_config, json_policy=agent_data) - logger.info("Create KSPM unmanaged integration") + logger.info(f"Create {INTEGRATION_NAME} integration") package_policy_id = create_kspm_unmanaged_integration( cfg=cnfg.elk_config, pkg_policy=package_data, agent_policy_id=agent_policy_id, ) - state_manager.add_policy(PolicyState(agent_policy_id, package_policy_id, KSPM_UNMANAGED_EXPECTED_AGENTS, [])) + state_manager.add_policy( + PolicyState( + agent_policy_id, + package_policy_id, + KSPM_UNMANAGED_EXPECTED_AGENTS, + [], + ), + ) manifest_params = Munch() manifest_params.enrollment_token = get_enrollment_token( @@ -81,6 +96,6 @@ def load_data() -> Tuple[Dict, Dict]: manifest_params.fleet_url = get_fleet_server_host(cfg=cnfg.elk_config) manifest_params.yaml_path = Path(__file__).parent / "kspm_unmanaged.yaml" manifest_params.docker_image_override = cnfg.kspm_config.docker_image_override - logger.info("Creating KSPM unmanaged manifest") + logger.info(f"Creating {INTEGRATION_NAME} manifest") create_kubernetes_manifest(cfg=cnfg.elk_config, params=manifest_params) - logger.info("Installation of KSPM integration is done") + logger.info(f"Installation of {INTEGRATION_NAME} is done") diff --git a/deploy/test-environments/fleet_api/src/package_policy.py b/deploy/test-environments/fleet_api/src/package_policy.py new file mode 100644 index 0000000000..bf93a1d6ae --- /dev/null +++ b/deploy/test-environments/fleet_api/src/package_policy.py @@ -0,0 +1,228 @@ +""" +This module provides functions for generating and formatting policy templates +and inputs based on provided data and templates. +""" + +import copy +import uuid +from typing import Dict, Tuple +from packaging import version +from munch import Munch +from loguru import logger +from api.common_api import get_package + +# Constants +CLOUD_SECURITY_POSTURE = "cloud_security_posture" +REQUIRE_VARS = ["cloudbeat/cis_aws", "cloudbeat/cis_eks"] +SIMPLIFIED_PACKAGE_POLICY = { + "policy_id": "", + "package": {}, + "name": "", + "description": "", + "namespace": "default", + "inputs": {}, + "vars": {}, +} +SIMPLIFIED_AGENT_POLICY = { + "name": "", + "namespace": "default", + "monitoring_enabled": ["logs", "metrics"], +} +VERSION_MAP = { + "cis_k8s": "1.1.0", + "cis_gcp": "1.5.0", +} + + +def generate_input_id(name: str, input_type: str) -> str: + """ + Generates a unique input ID based on the provided name and input type. + + This function combines the 'name' and 'input_type' parameters to create a unique + identifier for an input. The resulting ID is useful for organizing and referencing + inputs in a structured manner. + + Args: + name (str): The name or identifier associated with the input. + input_type (str): The type or category of the input. + + Returns: + str: A unique input ID generated by combining 'name' and 'input_type'. + + Example: + If 'name' is "cspm" and 'input_type' is "cloudbeat/cis_gcp", calling + 'generate_input_id(name, input_type)' will return "cspm-cloudbeat/cis_gcp". + """ + return f"{name}-{input_type}" + + +def format_inputs(policy_templates: list) -> dict: + """ + Format inputs based on policy templates. + + Args: + policy_templates (list): List of policy templates. + + Returns: + dict: Formatted inputs. + """ + inputs_dict = {} + for template in policy_templates: + name = template.get("name", "") + data_stream = template.get("data_streams", [])[0] + for template_input in template.get("inputs", []): + input_type = template_input.get("type", "") + input_dict = { + "enabled": False, + "streams": { + f"{CLOUD_SECURITY_POSTURE}.{data_stream}": { + "enabled": False, + }, + }, + } + # Conditionally add "vars" based on input_type + if input_type in REQUIRE_VARS: + input_dict["streams"][f"{CLOUD_SECURITY_POSTURE}.{data_stream}"]["vars"] = {} + inputs_dict[generate_input_id(name=name, input_type=input_type)] = input_dict + return inputs_dict + + +def format_vars(package_vars: list) -> dict: + """ + Format vars based on package vars. + + Args: + package_vars (list): List of package vars. + + Returns: + dict: Formatted vars. + """ + vars_dict = {} + for package_var in package_vars: + vars_dict[package_var.get("name", "")] = "" + return vars_dict + + +def update_input(data, input_data): + """ + Recursively updates a dictionary structure with values from another dictionary. + + Args: + data (dict or list): The dictionary structure to be updated. + input_data (dict): The dictionary containing values to update 'data' with. + """ + if isinstance(data, dict): + for key, value in data.items(): + if key == "enabled": + data[key] = True + elif key == "vars" and isinstance(value, dict): + data[key] = input_data.get("vars", {}) + elif isinstance(value, (dict, list)): + update_input(value, input_data) + elif isinstance(data, list): + for item in data: + update_input(item, input_data) + + +def generate_policy_template(cfg: Munch, policy_template: dict = None) -> dict: + """ + Generate a policy template based on configuration and a template. + + Args: + cfg (Munch): Configuration data. + policy_template (dict, optional): Policy template.If not provided, + a default template will be used. + + Returns: + dict: Generated policy template. + """ + if policy_template is None: + policy_template = SIMPLIFIED_PACKAGE_POLICY + + generated_policy = copy.deepcopy(policy_template) + package_policy_info = get_package(cfg=cfg) + generated_policy["package"] = { + "name": package_policy_info.get("name", ""), + "version": package_policy_info.get("version", ""), + } + generated_policy["inputs"] = format_inputs(package_policy_info.get("policy_templates", [])) + generated_policy["vars"] = format_vars(package_vars=package_policy_info.get("vars", [])) + return generated_policy + + +def generate_package_policy(template: dict, policy_input: dict) -> dict: + """ + Generate a package policy based on a template and policy input. + + Args: + template (dict): The package policy template. + policy_input (dict): The policy input containing values to update. + + Returns: + dict: The generated package policy. + """ + package_policy = copy.deepcopy(template) + integration_key = policy_input.get("input_name", "") + for input_name, data in package_policy["inputs"].items(): + if integration_key in input_name: + update_input(data, policy_input) + package_policy["vars"]["posture"] = policy_input.get("posture", "") + package_policy["vars"]["deployment"] = policy_input.get("deployment", "") + package_policy["name"] = policy_input.get("name", "") + return package_policy + + +def load_data(cfg: Munch, agent_input: dict, package_input: dict) -> Tuple[Dict, Dict]: + """ + Load agent and package policies based on input data. + + Args: + cfg (Munch): Configuration data. + agent_input (dict): Agent policy input data. + package_input (dict): Package policy input data. + + Returns: + Tuple[Dict, Dict]: A tuple containing the loaded agent policy and package policy. + """ + logger.info("Loading agent and package policies") + agent_policy = SIMPLIFIED_AGENT_POLICY + agent_policy["name"] = agent_input.get("name", "") + package_template = generate_policy_template(cfg=cfg) + package_policy = generate_package_policy( + template=package_template, + policy_input=package_input, + ) + + return agent_policy, package_policy + + +def version_compatible(current_version, required_version): + """ + Check if the current version is compatible with the required version. + + Args: + current_version (str): The current version to be checked. + required_version (str): The required version for compatibility. + + Returns: + bool: True if the current version is compatible, False otherwise. + """ + return version.parse(current_version) >= version.parse(required_version) + + +def generate_random_name(prefix: str) -> str: + """ + Generate a random name by combining a given prefix with a random suffix. + + Args: + prefix (str): The prefix to be combined with the random suffix. + + Returns: + str: The generated name consisting of the prefix and a random 6-character suffix. + """ + random_uuid = str(uuid.uuid4()) + # Extract the last 6 characters from the UUID + random_suffix = random_uuid[-6:] + generated_name = f"{prefix}-{random_suffix}" + + return generated_name From a96b868f1d3a01c59f4a07b18aed90bbc97e54a9 Mon Sep 17 00:00:00 2001 From: Dmitry Gurevich <99176494+gurevichdmitry@users.noreply.github.com> Date: Thu, 28 Sep 2023 15:51:52 +0300 Subject: [PATCH 02/55] update cnvm integration package install --- .../fleet_api/src/install_cnvm_integration.py | 67 ++++++++++++------- .../fleet_api/src/package_policy.py | 1 + 2 files changed, 44 insertions(+), 24 deletions(-) diff --git a/deploy/test-environments/fleet_api/src/install_cnvm_integration.py b/deploy/test-environments/fleet_api/src/install_cnvm_integration.py index 3b5cbc30e6..e9d50004ab 100644 --- a/deploy/test-environments/fleet_api/src/install_cnvm_integration.py +++ b/deploy/test-environments/fleet_api/src/install_cnvm_integration.py @@ -7,9 +7,9 @@ 2. Create a CNVM AWS integration. 3. Create a deploy/cloudformation/config.json file to be used by the just deploy-cloudformation command. """ +import sys import json from pathlib import Path -from typing import Dict, Tuple from munch import Munch import configuration_fleet as cnfg from api.agent_policy_api import create_agent_policy @@ -18,50 +18,69 @@ get_enrollment_token, get_fleet_server_host, get_artifact_server, + get_package_version, ) from loguru import logger -from utils import read_json from state_file_manager import state_manager, PolicyState +from package_policy import ( + version_compatible, + VERSION_MAP, + load_data, + generate_random_name, +) -CNVM_AGENT_POLICY = "../../../cloud/data/agent_policy_cnvm_aws.json" -CNVM_PACKAGE_POLICY = "../../../cloud/data/package_policy_cnvm_aws.json" CNVM_EXPECTED_AGENTS = 1 CNVM_CLOUDFORMATION_CONFIG = "../../../cloudformation/config.json" CNVM_AGENT_TAGS = ["cft_version:CFT_VERSION", "cft_arn:arn:aws:cloudformation:.*"] - -cnvm_agent_policy_data = Path(__file__).parent / CNVM_AGENT_POLICY -cnvm_pkg_policy_data = Path(__file__).parent / CNVM_PACKAGE_POLICY +PKG_DEFAULT_VERSION = VERSION_MAP.get("vuln_mgmt_aws", "") +INTEGRATION_NAME = "CNVM AWS" +INTEGRATION_INPUT = { + "name": generate_random_name("pkg-cnvm-aws"), + "input_name": "vuln_mgmt_aws", + "posture": "vuln_mgmt", + "deployment": "aws", +} +AGENT_INPUT = { + "name": generate_random_name("cnvm-aws"), +} cnvm_cloudformation_config = Path(__file__).parent / CNVM_CLOUDFORMATION_CONFIG -def load_data() -> Tuple[Dict, Dict]: - """Loads data. - - Returns: - Tuple[Dict, Dict]: A tuple containing the loaded agent and package policies. - """ - logger.info("Loading agent and package policies") - agent_policy = read_json(json_path=cnvm_agent_policy_data) - package_policy = read_json(json_path=cnvm_pkg_policy_data) - return agent_policy, package_policy - - if __name__ == "__main__": # pylint: disable=duplicate-code - logger.info("Starting installation of CNVM AWS integration.") - agent_data, package_data = load_data() + package_version = get_package_version(cfg=cnfg.elk_config) + logger.info(f"Package version: {package_version}") + if not version_compatible( + current_version=package_version, + required_version=PKG_DEFAULT_VERSION, + ): + logger.warning(f"{INTEGRATION_NAME} is not supported in version {package_version}") + sys.exit(0) + logger.info(f"Starting installation of {INTEGRATION_NAME} integration.") + agent_data, package_data = load_data( + cfg=cnfg.elk_config, + agent_input=AGENT_INPUT, + package_input=INTEGRATION_INPUT, + ) logger.info("Create agent policy") agent_policy_id = create_agent_policy(cfg=cnfg.elk_config, json_policy=agent_data) - logger.info("Create CNVM integration for policy", agent_policy_id) + logger.info(f"Create {INTEGRATION_NAME} integration for policy {agent_policy_id}") package_policy_id = create_cnvm_integration( cfg=cnfg.elk_config, pkg_policy=package_data, agent_policy_id=agent_policy_id, ) - state_manager.add_policy(PolicyState(agent_policy_id, package_policy_id, CNVM_EXPECTED_AGENTS, CNVM_AGENT_TAGS)) + state_manager.add_policy( + PolicyState( + agent_policy_id, + package_policy_id, + CNVM_EXPECTED_AGENTS, + CNVM_AGENT_TAGS, + ), + ) cloudformation_params = Munch() cloudformation_params.ENROLLMENT_TOKEN = get_enrollment_token( @@ -76,4 +95,4 @@ def load_data() -> Tuple[Dict, Dict]: with open(cnvm_cloudformation_config, "w") as file: json.dump(cloudformation_params, file) - logger.info("Installation of CNVM integration is done") + logger.info(f"Installation of {INTEGRATION_NAME} integration is done") diff --git a/deploy/test-environments/fleet_api/src/package_policy.py b/deploy/test-environments/fleet_api/src/package_policy.py index bf93a1d6ae..fe0369245b 100644 --- a/deploy/test-environments/fleet_api/src/package_policy.py +++ b/deploy/test-environments/fleet_api/src/package_policy.py @@ -31,6 +31,7 @@ VERSION_MAP = { "cis_k8s": "1.1.0", "cis_gcp": "1.5.0", + "vuln_mgmt_aws": "1.3.0", } From f517be82e16de222aa3db3492d7c8003369bcaec Mon Sep 17 00:00:00 2001 From: Dmitry Gurevich <99176494+gurevichdmitry@users.noreply.github.com> Date: Thu, 28 Sep 2023 16:30:12 +0300 Subject: [PATCH 03/55] update kspm eks integration package install --- .../src/install_kspm_eks_integration.py | 83 +++++++++++-------- .../fleet_api/src/package_policy.py | 1 + 2 files changed, 50 insertions(+), 34 deletions(-) diff --git a/deploy/test-environments/fleet_api/src/install_kspm_eks_integration.py b/deploy/test-environments/fleet_api/src/install_kspm_eks_integration.py index 830cc5c6c9..3a144926a6 100755 --- a/deploy/test-environments/fleet_api/src/install_kspm_eks_integration.py +++ b/deploy/test-environments/fleet_api/src/install_kspm_eks_integration.py @@ -7,9 +7,8 @@ 2. Create a KSPM EKS integration. 3. Create a KSPM manifest to be deployed on a host. """ - +import sys from pathlib import Path -from typing import Dict, Tuple from munch import Munch import configuration_fleet as cnfg from api.agent_policy_api import create_agent_policy, get_agent_policy_id_by_name @@ -22,42 +21,57 @@ update_package_version, ) from loguru import logger -from utils import read_json from state_file_manager import state_manager, PolicyState +from package_policy import ( + load_data, + version_compatible, + generate_random_name, + VERSION_MAP, +) + -KSPM_EKS_AGENT_POLICY = "../../../cloud/data/agent_policy_eks.json" -KSPM_EKS_PACKAGE_POLICY = "../../../cloud/data/package_policy_eks.json" KSPM_EKS_EXPECTED_AGENTS = 2 D4C_AGENT_POLICY_NAME = "tf-ap-d4c" - -kspm_agent_policy_data = Path(__file__).parent / KSPM_EKS_AGENT_POLICY -kspm_eks_pkg_policy_data = Path(__file__).parent / KSPM_EKS_PACKAGE_POLICY - - -def load_data() -> Tuple[Dict, Dict]: - """Loads data. - - Returns: - Tuple[Dict, Dict]: A tuple containing the loaded agent and package policies. - """ - logger.info("Loading agent and package policies") - agent_policy = read_json(json_path=kspm_agent_policy_data) - package_policy = read_json(json_path=kspm_eks_pkg_policy_data) - return agent_policy, package_policy - +INTEGRATION_NAME = "KSPM EKS" +PKG_DEFAULT_VERSION = VERSION_MAP.get("cis_eks", "") +INTEGRATION_INPUT = { + "name": generate_random_name("pkg-kspm-eks"), + "input_name": "cis_eks", + "posture": "kspm", + "deployment": "cloudbeat/cis_eks", + "vars": { + "access_key_id": cnfg.aws_config.access_key_id, + "secret_access_key": cnfg.aws_config.secret_access_key, + "aws.credentials.type": "direct_access_keys", + }, +} +AGENT_INPUT = { + "name": generate_random_name("kspm-eks"), +} if __name__ == "__main__": # pylint: disable=duplicate-code package_version = get_package_version(cfg=cnfg.elk_config) logger.info(f"Package version: {package_version}") + if not version_compatible( + current_version=package_version, + required_version=PKG_DEFAULT_VERSION, + ): + logger.warning(f"{INTEGRATION_NAME} is not supported in version {package_version}") + sys.exit(0) + update_package_version( cfg=cnfg.elk_config, package_name="cloud_security_posture", package_version=package_version, ) - logger.info("Starting installation of KSPM EKS integration.") - agent_data, package_data = load_data() + logger.info(f"Starting installation of {INTEGRATION_NAME} integration.") + agent_data, package_data = load_data( + cfg=cnfg.elk_config, + agent_input=AGENT_INPUT, + package_input=INTEGRATION_INPUT, + ) logger.info("Create agent policy") agent_policy_id = get_agent_policy_id_by_name( @@ -70,21 +84,22 @@ def load_data() -> Tuple[Dict, Dict]: json_policy=agent_data, ) - aws_config = cnfg.aws_config - eks_data = { - "access_key_id": aws_config.access_key_id, - "secret_access_key": aws_config.secret_access_key, - } - - logger.info("Create KSPM EKS integration") + logger.info(f"Create {INTEGRATION_NAME} integration") package_policy_id = create_kspm_eks_integration( cfg=cnfg.elk_config, pkg_policy=package_data, agent_policy_id=agent_policy_id, - eks_data=eks_data, + eks_data={}, ) - state_manager.add_policy(PolicyState(agent_policy_id, package_policy_id, KSPM_EKS_EXPECTED_AGENTS, [])) + state_manager.add_policy( + PolicyState( + agent_policy_id, + package_policy_id, + KSPM_EKS_EXPECTED_AGENTS, + [], + ), + ) manifest_params = Munch() manifest_params.enrollment_token = get_enrollment_token( @@ -95,6 +110,6 @@ def load_data() -> Tuple[Dict, Dict]: manifest_params.fleet_url = get_fleet_server_host(cfg=cnfg.elk_config) manifest_params.yaml_path = Path(__file__).parent / "kspm_eks.yaml" manifest_params.docker_image_override = cnfg.kspm_config.docker_image_override - logger.info("Creating KSPM EKS manifest") + logger.info(f"Creating {INTEGRATION_NAME} manifest") create_kubernetes_manifest(cfg=cnfg.elk_config, params=manifest_params) - logger.info("Installation of KSPM EKS integration is done") + logger.info(f"Installation of {INTEGRATION_NAME} integration is done") diff --git a/deploy/test-environments/fleet_api/src/package_policy.py b/deploy/test-environments/fleet_api/src/package_policy.py index fe0369245b..73f4f89804 100644 --- a/deploy/test-environments/fleet_api/src/package_policy.py +++ b/deploy/test-environments/fleet_api/src/package_policy.py @@ -30,6 +30,7 @@ } VERSION_MAP = { "cis_k8s": "1.1.0", + "cis_eks": "1.2.0", "cis_gcp": "1.5.0", "vuln_mgmt_aws": "1.3.0", } From f31b483a0f43d7ddd48dd740d956f76324ea1094 Mon Sep 17 00:00:00 2001 From: Dmitry Gurevich <99176494+gurevichdmitry@users.noreply.github.com> Date: Thu, 28 Sep 2023 17:59:43 +0300 Subject: [PATCH 04/55] update cspm aws integration package install --- .../fleet_api/src/install_cspm_integration.py | 92 +++++++++++-------- .../fleet_api/src/package_policy.py | 22 ++++- 2 files changed, 76 insertions(+), 38 deletions(-) diff --git a/deploy/test-environments/fleet_api/src/install_cspm_integration.py b/deploy/test-environments/fleet_api/src/install_cspm_integration.py index 70c7c2e919..e9f82d832f 100755 --- a/deploy/test-environments/fleet_api/src/install_cspm_integration.py +++ b/deploy/test-environments/fleet_api/src/install_cspm_integration.py @@ -7,9 +7,8 @@ 2. Create a CSPM AWS integration. 3. Create a CSPM bash script to be deployed on a host. """ - +import sys from pathlib import Path -from typing import Dict, Tuple from munch import Munch import configuration_fleet as cnfg from api.agent_policy_api import create_agent_policy @@ -22,65 +21,84 @@ update_package_version, ) from loguru import logger -from utils import ( - read_json, - render_template, -) +from utils import render_template from state_file_manager import state_manager, PolicyState +from package_policy import ( + load_data, + version_compatible, + generate_random_name, + patch_vars, + VERSION_MAP, +) -CSPM_AGENT_POLICY = "../../../cloud/data/agent_policy_cspm_aws.json" -CSPM_PACKAGE_POLICY = "../../../cloud/data/package_policy_cspm_aws.json" CSPM_EXPECTED_AGENTS = 1 +INTEGRATION_NAME = "CSPM AWS" +PKG_DEFAULT_VERSION = VERSION_MAP.get("cis_aws", "") +aws_config = cnfg.aws_config +INTEGRATION_INPUT = { + "name": generate_random_name("pkg-cspm-aws"), + "input_name": "cis_aws", + "posture": "cspm", + "deployment": "cloudbeat/cis_aws", + "vars": { + "access_key_id": aws_config.access_key_id, + "secret_access_key": aws_config.secret_access_key, + "aws.credentials.type": "direct_access_keys", + }, +} +AGENT_INPUT = { + "name": generate_random_name("cspm-aws"), +} -cspm_agent_policy_data = Path(__file__).parent / CSPM_AGENT_POLICY -cspm_pkg_policy_data = Path(__file__).parent / CSPM_PACKAGE_POLICY cspm_template = Path(__file__).parent / "data/cspm-linux.j2" - -def load_data() -> Tuple[Dict, Dict]: - """Loads data. - - Returns: - Tuple[Dict, Dict]: A tuple containing the loaded agent and package policies. - """ - logger.info("Loading agent and package policies") - agent_policy = read_json(json_path=cspm_agent_policy_data) - package_policy = read_json(json_path=cspm_pkg_policy_data) - return agent_policy, package_policy - - if __name__ == "__main__": # pylint: disable=duplicate-code package_version = get_package_version(cfg=cnfg.elk_config) logger.info(f"Package version: {package_version}") + if not version_compatible( + current_version=package_version, + required_version=PKG_DEFAULT_VERSION, + ): + logger.warning(f"{INTEGRATION_NAME} is not supported in version {package_version}") + sys.exit(0) + update_package_version( cfg=cnfg.elk_config, package_name="cloud_security_posture", package_version=package_version, ) - logger.info("Starting installation of CSPM AWS integration.") - agent_data, package_data = load_data() + patch_vars( + var_dict=INTEGRATION_INPUT.get("vars", {}), + package_version=package_version, + ) + logger.info(f"Starting installation of {INTEGRATION_NAME} integration.") + agent_data, package_data = load_data( + cfg=cnfg.elk_config, + agent_input=AGENT_INPUT, + package_input=INTEGRATION_INPUT, + ) logger.info("Create agent policy") agent_policy_id = create_agent_policy(cfg=cnfg.elk_config, json_policy=agent_data) - aws_config = cnfg.aws_config - cspm_data = { - "access_key_id": aws_config.access_key_id, - "secret_access_key": aws_config.secret_access_key, - "aws.credentials.type": "direct_access_keys", - } - - logger.info("Create CSPM integration") + logger.info(f"Create {INTEGRATION_NAME} integration") package_policy_id = create_cspm_integration( cfg=cnfg.elk_config, pkg_policy=package_data, agent_policy_id=agent_policy_id, - cspm_data=cspm_data, + cspm_data={}, ) - state_manager.add_policy(PolicyState(agent_policy_id, package_policy_id, CSPM_EXPECTED_AGENTS, [])) + state_manager.add_policy( + PolicyState( + agent_policy_id, + package_policy_id, + CSPM_EXPECTED_AGENTS, + [], + ), + ) manifest_params = Munch() manifest_params.enrollment_token = get_enrollment_token( @@ -96,9 +114,9 @@ def load_data() -> Tuple[Dict, Dict]: # Render the template and get the replaced content rendered_content = render_template(cspm_template, manifest_params.toDict()) - logger.info("Creating CSPM linux manifest") + logger.info(f"Creating {INTEGRATION_NAME} linux manifest") # Write the rendered content to a file with open(Path(__file__).parent / "cspm-linux.sh", "w", encoding="utf-8") as cspm_file: cspm_file.write(rendered_content) - logger.info("Installation of CSPM integration is done") + logger.info(f"Installation of {INTEGRATION_NAME} integration is done") diff --git a/deploy/test-environments/fleet_api/src/package_policy.py b/deploy/test-environments/fleet_api/src/package_policy.py index 73f4f89804..65e70ebf1d 100644 --- a/deploy/test-environments/fleet_api/src/package_policy.py +++ b/deploy/test-environments/fleet_api/src/package_policy.py @@ -31,8 +31,9 @@ VERSION_MAP = { "cis_k8s": "1.1.0", "cis_eks": "1.2.0", - "cis_gcp": "1.5.0", + "cis_aws": "1.2.0", "vuln_mgmt_aws": "1.3.0", + "cis_gcp": "1.5.0", } @@ -228,3 +229,22 @@ def generate_random_name(prefix: str) -> str: generated_name = f"{prefix}-{random_suffix}" return generated_name + + +def patch_vars(var_dict, package_version): + """ + Conditionally updates a dictionary based on the package version. + + This function checks the provided package version and updates the given + dictionary 'var_dict' with additional fields based on version requirements. + + Args: + var_dict (dict): The dictionary to be updated. + package_version (str): The version of the package to determine updates. + + Returns: + None: This function modifies 'var_dict' in place. + """ + if version.parse(package_version) >= version.parse("1.5.0"): + # Add or update fields in the vars_dict based on the version requirements + var_dict["aws.account_type"] = "single-account" From 74a0d8f4b5e4bcfc8dbfa1c25c70adb0719a6cde Mon Sep 17 00:00:00 2001 From: Dmitry Gurevich <99176494+gurevichdmitry@users.noreply.github.com> Date: Tue, 3 Oct 2023 10:51:17 +0300 Subject: [PATCH 05/55] add upgrade flow --- .github/workflows/test-environment.yml | 59 +++++++-- .github/workflows/upgrade-environment.yml | 154 ++++++++++++++++++++++ 2 files changed, 202 insertions(+), 11 deletions(-) create mode 100644 .github/workflows/upgrade-environment.yml diff --git a/.github/workflows/test-environment.yml b/.github/workflows/test-environment.yml index e9634ab14c..275fd604c6 100644 --- a/.github/workflows/test-environment.yml +++ b/.github/workflows/test-environment.yml @@ -33,6 +33,39 @@ on: type: string description: "**Optional** By default, the environment will be created in our Cloud Security Organization. If you want to use your own cloud account, enter your Elastic Cloud API key." required: false + workflow_call: + inputs: + deployment_name: + description: Name of the deployment to create + required: true + elk-stack-version: + required: true + description: "Stack version: For released/BC version use 8.x.y, for SNAPSHOT use 8.x.y-SNAPSHOT" + default: "8.10.0" + ess-region: + required: true + description: "Elastic Cloud deployment region" + default: "gcp-us-west2" + docker-image-override: + required: false + description: "Provide the full Docker image path to override the default image (e.g. for testing BC/SNAPSHOT)" + run-sanity-tests: + description: "Run sanity tests after provision" + default: false + type: boolean + cleanup-env: + description: "Cleanup resources after provision" + default: false + type: boolean + ec-api-key: + type: string + description: "**Optional** By default, the environment will be created in our Cloud Security Organization. If you want to use your own cloud account, enter your Elastic Cloud API key." + required: false + outputs: + s3-bucket: + description: "Terraform state s3 bucket folder" + value: ${{ jobs.Deploy.outputs.deploy-s3-bucket }} + env: AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} @@ -52,17 +85,19 @@ jobs: run: working-directory: ${{ env.WORKING_DIR }} env: - TF_VAR_stack_version: ${{ github.event.inputs.elk-stack-version }} - TF_VAR_ess_region: ${{ github.event.inputs.ess-region }} - DEPLOYMENT_NAME: ${{ github.event.inputs.deployment_name }} + TF_VAR_stack_version: ${{ inputs.elk-stack-version }} + TF_VAR_ess_region: ${{ inputs.ess-region }} + DEPLOYMENT_NAME: ${{ inputs.deployment_name }} S3_BASE_BUCKET: "s3://tf-state-bucket-test-infra" - DOCKER_IMAGE_OVERRIDE: ${{ github.event.inputs.docker-image-override }} - STACK_VERSION: ${{ github.event.inputs.elk-stack-version }} - CNVM_STACK_NAME: "${{ github.event.inputs.deployment_name }}-cnvm-sanity-test-stack" + DOCKER_IMAGE_OVERRIDE: ${{ inputs.docker-image-override }} + STACK_VERSION: ${{ inputs.elk-stack-version }} + CNVM_STACK_NAME: "${{ inputs.deployment_name }}-cnvm-sanity-test-stack" # Add "id-token" with the intended permissions. permissions: contents: 'read' id-token: 'write' + outputs: + deploy-s3-bucket: ${{ steps.upload-state.outputs.s3-bucket-folder }} steps: - name: Check out the repo uses: actions/checkout@v4 @@ -73,7 +108,7 @@ jobs: - name: Check Deployment Name run: | - deployment_name="${{ github.event.inputs.deployment_name }}" + deployment_name="${{ inputs.deployment_name }}" # Check length if [ ${#deployment_name} -gt 20 ]; then @@ -88,7 +123,7 @@ jobs: fi - name: Mask Sensitive Data - if: github.event.inputs.ec-api-key != '' + if: inputs.ec-api-key != '' run: | ec_api_key=$(jq -r '.inputs["ec-api-key"]' $GITHUB_EVENT_PATH) echo "::add-mask::$ec_api_key" @@ -175,6 +210,7 @@ jobs: echo "CSPM_PUBLIC_IP=$CSPM_PUBLIC_IP" >> $GITHUB_ENV - name: Upload tf state + id: upload-state if: always() env: S3_BUCKET: "${{ env.S3_BASE_BUCKET }}/${{ env.DEPLOYMENT_NAME }}_${{ env.TF_STATE_FOLDER }}" @@ -182,6 +218,7 @@ jobs: aws s3 cp "./terraform.tfstate" "${{ env.S3_BUCKET }}/terraform.tfstate" aws s3 cp "${{ env.EC2_CSPM_KEY }}" "${{ env.S3_BUCKET }}/cspm.pem" aws s3 cp "${{ env.EC2_KSPM_KEY }}" "${{ env.S3_BUCKET }}/kspm.pem" + echo "s3-bucket-folder=${{ env.S3_BUCKET }}" >> $GITHUB_OUTPUT - name: Summary if: success() @@ -301,19 +338,19 @@ jobs: poetry run python src/agents_enrolled.py - name: Run Sanity checks - if: ${{ success() && github.event.inputs.run-sanity-tests == 'true' }} + if: ${{ success() && inputs.run-sanity-tests == 'true' }} working-directory: ./tests run: | poetry install poetry run pytest -m "sanity" --alluredir=./allure/results/ --clean-alluredir --maxfail=4 - name: Cleanup Environment - if: github.event.inputs.cleanup-env == 'true' + if: inputs.cleanup-env == 'true' run: | just delete-cloud-env ${{ env.DEPLOYMENT_NAME }} '' "false" - name: Tag CNVM Instance - if: github.event.inputs.cleanup-env == 'false' + if: inputs.cleanup-env == 'false' env: STACK_NAME: "${{ env.CNVM_STACK_NAME}}" run: just create-cnvm-stack-tags ${{ env.AWS_REGION}} ${{ env.STACK_NAME }} '${{ env.AWS_DEFAULT_TAGS }} Key=owner,Value=${{ github.actor }}' diff --git a/.github/workflows/upgrade-environment.yml b/.github/workflows/upgrade-environment.yml new file mode 100644 index 0000000000..1a007b69ef --- /dev/null +++ b/.github/workflows/upgrade-environment.yml @@ -0,0 +1,154 @@ +name: Test Upgrade Environment +run-name: Creating ${{ github.event.inputs.deployment_name }} by @${{ github.actor }} + +on: + # Ability to execute on demand + workflow_dispatch: + inputs: + deployment_name: + type: string + description: | + Name with letters, numbers, hyphens; start with a letter. Max 20 chars. e.g., 'my-env-123' + required: true + elk-stack-version: + required: true + description: "Stack version: For released/BC version use 8.x.y, for SNAPSHOT use 8.x.y-SNAPSHOT" + default: "8.11.0" + docker-image-override: + required: false + description: "Provide the full Docker image path to override the default image (e.g. for testing BC/SNAPSHOT)" + +env: + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + AWS_REGION: "eu-west-1" + WORKING_DIR: deploy/test-environments + TF_VAR_stack_version: ${{ inputs.elk-stack-version }} + TF_VAR_ess_region: gcp-us-west2 + +jobs: + init: + runs-on: ubuntu-20.04 + outputs: + stack-version: ${{ steps.set-previous-version.outputs.PREVIOUS_VERSION }} + ess-region: ${{ env.TF_VAR_ess_region }} + steps: + - name: Set Previous Version + id: set-previous-version + run: | + VERSION="${{ inputs.elk-stack-version }}" + + # Extract the major and minor versions + MAJOR_VERSION=$(echo $VERSION | cut -d'.' -f1) + MINOR_VERSION=$(echo $VERSION | cut -d'.' -f2) + + # Calculate the previous version (assuming it's always X.(Y-1)) + PREVIOUS_VERSION="$MAJOR_VERSION.$((MINOR_VERSION - 1))" + echo $PREVIOUS_VERSION + echo "PREVIOUS_VERSION=$PREVIOUS_VERSION" >> $GITHUB_OUTPUT + deploy: + uses: ./.github/workflows/test-environment.yml + needs: init + with: + deployment_name: ${{ inputs.deployment_name }} + elk-stack-version: ${{ needs.init.outputs.stack-version }} + ess-region: ${{ env.TF_VAR_ess_region }} + docker-image-override: ${{ inputs.docker-image-override }} + run-sanity-tests: true + upgrade: + runs-on: ubuntu-20.04 + needs: deploy + timeout-minutes: 120 + defaults: + run: + working-directory: ${{ env.WORKING_DIR }} + steps: + - name: Check out the repo + uses: actions/checkout@v4 + + - name: Init Hermit + run: ./bin/hermit env -r >> $GITHUB_ENV + working-directory: ./ + + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: '3.9' + + - name: Install Poetry + run: | + curl -sSL https://install.python-poetry.org | python3 - + poetry --version + + - name: Install Fleet API dependencies + id: fleet-api-deps + working-directory: ${{ env.WORKING_DIR }}/fleet_api + run: | + poetry install + + - name: Configure AWS credentials + uses: aws-actions/configure-aws-credentials@v2 + with: + aws-access-key-id: ${{ env.AWS_ACCESS_KEY_ID }} + aws-secret-access-key: ${{ env.AWS_SECRET_ACCESS_KEY }} + aws-region: ${{ env.AWS_REGION }} + + - name: Download tf state data + env: + S3_BUCKET: ${{ needs.deploy.outputs.deploy-s3-bucket }} + run: | + aws s3 cp "${{ env.S3_BUCKET }}/terraform.tfstate" "./terraform.tfstate" + + - name: Terraform Init + run: terraform init + + - name: Terraform Validate + run: terraform validate + + - name: Update ELK stack version + id: apply + if: success() + run: | + terraform apply --auto-approve -var="deployment_name=${{ inputs.deployment_name }}" -var="region=${{ env.AWS_REGION }}" + + - name: Set Environment Output + id: env-output + run: | + echo "KIBANA_URL=$(terraform output -raw kibana_url)" >> $GITHUB_ENV + echo "ES_URL=$(terraform output -raw elasticsearch_url)" >> $GITHUB_ENV + echo "ES_USER=$(terraform output -raw elasticsearch_username)" >> $GITHUB_ENV + + export ES_PASSWORD=$(terraform output -raw elasticsearch_password) + echo "::add-mask::$ES_PASSWORD" + echo "ES_PASSWORD=$ES_PASSWORD" >> $GITHUB_ENV + + export EC2_CSPM=$(terraform output -raw ec2_cspm_ssh_cmd) + echo "::add-mask::$EC2_CSPM" + echo "EC2_CSPM=$EC2_CSPM" >> $GITHUB_ENV + + export EC2_KSPM=$(terraform output -raw ec2_kspm_ssh_cmd) + echo "::add-mask::$EC2_KSPM" + echo "EC2_KSPM=$EC2_KSPM" >> $GITHUB_ENV + + export EC2_CSPM_KEY=$(terraform output -raw ec2_cspm_key) + echo "::add-mask::$EC2_CSPM_KEY" + echo "EC2_CSPM_KEY=$EC2_CSPM_KEY" >> $GITHUB_ENV + + export EC2_KSPM_KEY=$(terraform output -raw ec2_kspm_key) + echo "::add-mask::$EC2_KSPM_KEY" + echo "EC2_KSPM_KEY=$EC2_KSPM_KEY" >> $GITHUB_ENV + + export KSPM_PUBLIC_IP=$(terraform output -raw ec2_kspm_public_ip) + echo "::add-mask::$KSPM_PUBLIC_IP" + echo "KSPM_PUBLIC_IP=$KSPM_PUBLIC_IP" >> $GITHUB_ENV + + export CSPM_PUBLIC_IP=$(terraform output -raw ec2_cspm_public_ip) + echo "::add-mask::$CSPM_PUBLIC_IP" + echo "CSPM_PUBLIC_IP=$CSPM_PUBLIC_IP" >> $GITHUB_ENV + + - name: Run Sanity checks + if: success() + working-directory: ./tests + run: | + poetry install + poetry run pytest -m "sanity" --alluredir=./allure/results/ --clean-alluredir --maxfail=4 From 826521337bd1f39fc4dcd380dc611eab0f0faf77 Mon Sep 17 00:00:00 2001 From: Dmitry Gurevich <99176494+gurevichdmitry@users.noreply.github.com> Date: Tue, 3 Oct 2023 18:15:43 +0300 Subject: [PATCH 06/55] added support for installing CNVM with correct template version --- .../fleet_api/src/api/base_call_api.py | 30 ++++++++++ .../fleet_api/src/install_cnvm_integration.py | 22 ++++++++ .../fleet_api/src/package_policy.py | 55 +++++++++++++++++++ .../test-environments/fleet_api/src/utils.py | 25 +++++++++ 4 files changed, 132 insertions(+) diff --git a/deploy/test-environments/fleet_api/src/api/base_call_api.py b/deploy/test-environments/fleet_api/src/api/base_call_api.py index 60af1a082e..df19e2d50c 100644 --- a/deploy/test-environments/fleet_api/src/api/base_call_api.py +++ b/deploy/test-environments/fleet_api/src/api/base_call_api.py @@ -10,6 +10,7 @@ - requests: Library for making HTTP requests """ import requests +from loguru import logger class APICallException(Exception): @@ -69,3 +70,32 @@ def perform_api_call(method, url, headers=None, auth=None, params=None): raise APICallException(response.status_code, response.text) return response.json() + + +def download_file(url, destination, timeout=30): + """ + Download a file from a URL and save it to the specified destination. + + Args: + url (str): The URL of the file to download. + destination (str): The path where the downloaded file will be saved. + timeout (int, optional): The maximum time (in seconds) to wait for the server's response. + Defaults to 30 seconds. + + Raises: + APICallException: If there's an issue with the HTTP request. + IOError: If there's an issue with saving the downloaded file. + """ + try: + response = requests.get(url, stream=True, timeout=timeout) + response.raise_for_status() + + with open(destination, "wb") as file: + for chunk in response.iter_content(chunk_size=8192): + file.write(chunk) + + logger.info(f"File downloaded to {destination}") + except requests.exceptions.RequestException as ex: + raise APICallException(500, f"HTTP Request Error: {ex}") from ex + except IOError as io_ex: + raise IOError(f"IO Error: {io_ex}") from io_ex diff --git a/deploy/test-environments/fleet_api/src/install_cnvm_integration.py b/deploy/test-environments/fleet_api/src/install_cnvm_integration.py index e9d50004ab..8dfc8d7c54 100644 --- a/deploy/test-environments/fleet_api/src/install_cnvm_integration.py +++ b/deploy/test-environments/fleet_api/src/install_cnvm_integration.py @@ -20,6 +20,7 @@ get_artifact_server, get_package_version, ) +from api.base_call_api import download_file from loguru import logger from state_file_manager import state_manager, PolicyState from package_policy import ( @@ -27,10 +28,14 @@ VERSION_MAP, load_data, generate_random_name, + get_package_default_url, + extract_template_url, ) +from utils import rename_file_by_suffix CNVM_EXPECTED_AGENTS = 1 CNVM_CLOUDFORMATION_CONFIG = "../../../cloudformation/config.json" +CNMV_TEMPLATE = "../../../cloudformation/elastic-agent-ec2-cnvm.yml" CNVM_AGENT_TAGS = ["cft_version:CFT_VERSION", "cft_arn:arn:aws:cloudformation:.*"] PKG_DEFAULT_VERSION = VERSION_MAP.get("vuln_mgmt_aws", "") INTEGRATION_NAME = "CNVM AWS" @@ -44,6 +49,7 @@ "name": generate_random_name("cnvm-aws"), } cnvm_cloudformation_config = Path(__file__).parent / CNVM_CLOUDFORMATION_CONFIG +cnvm_cloudformation_template = Path(__file__).parent / CNMV_TEMPLATE if __name__ == "__main__": @@ -95,4 +101,20 @@ with open(cnvm_cloudformation_config, "w") as file: json.dump(cloudformation_params, file) + logger.info(f"Get {INTEGRATION_NAME} template") + default_url = get_package_default_url( + cfg=cnfg.elk_config, + policy_name=INTEGRATION_INPUT["posture"], + policy_type="cloudbeat/vuln_mgmt_aws", + ) + template_url = extract_template_url(url_string=default_url) + + logger.info(f"Using {template_url} for stack creation") + if template_url: + rename_file_by_suffix( + file_path=cnvm_cloudformation_template, + suffix="-orig", + ) + download_file(url=template_url, destination=cnvm_cloudformation_template) + logger.info(f"Installation of {INTEGRATION_NAME} integration is done") diff --git a/deploy/test-environments/fleet_api/src/package_policy.py b/deploy/test-environments/fleet_api/src/package_policy.py index 65e70ebf1d..fb0effee79 100644 --- a/deploy/test-environments/fleet_api/src/package_policy.py +++ b/deploy/test-environments/fleet_api/src/package_policy.py @@ -6,6 +6,7 @@ import copy import uuid from typing import Dict, Tuple +from urllib.parse import urlparse, parse_qs from packaging import version from munch import Munch from loguru import logger @@ -248,3 +249,57 @@ def patch_vars(var_dict, package_version): if version.parse(package_version) >= version.parse("1.5.0"): # Add or update fields in the vars_dict based on the version requirements var_dict["aws.account_type"] = "single-account" + + +def get_package_default_url(cfg: Munch, policy_name: str, policy_type: str) -> str: + """ + Get the package default URL for a specific policy and policy type from the configuration. + + Args: + cfg (Munch): The configuration containing policy information. + policy_name (str): The name of the policy. + policy_type (str): The type of the policy. + + Returns: + str: The default package URL for the specified policy and type. + An empty string is returned if not found. + """ + package_policy = get_package(cfg=cfg) + policy_templates = package_policy.get("policy_templates", []) + + for template in policy_templates: + if template.get("name", "") == policy_name: + inputs = template.get("inputs", []) + + for policy_input in inputs: + if policy_input.get("type", "") == policy_type: + vars_list = policy_input.get("vars", []) + + if vars_list: + return vars_list[0].get("default", "") + + return "" + + +def extract_template_url(url_string: str) -> str: + """ + Extracts the 'templateURL' parameter from a given URL string. + + Args: + url_string (str): The URL string from which to extract the 'templateURL' parameter. + + Returns: + str: The value of the 'templateURL' parameter if found in the URL, + or empty string if the parameter is not present. + + """ + parsed_url = urlparse(url_string, allow_fragments=False) + query_parameters = parse_qs(parsed_url.query) + + template_url = query_parameters.get("templateURL") + + if not template_url: + logger.warning("templateURL field is not found") + return "" + + return template_url[0] diff --git a/deploy/test-environments/fleet_api/src/utils.py b/deploy/test-environments/fleet_api/src/utils.py index db6ae30b05..9f552487ad 100644 --- a/deploy/test-environments/fleet_api/src/utils.py +++ b/deploy/test-environments/fleet_api/src/utils.py @@ -241,3 +241,28 @@ def add_capabilities(yaml_content: str) -> str: modified_content = output_stream.getvalue() return modified_content + + +def rename_file_by_suffix(file_path: Path, suffix: str) -> None: + """ + Rename a file by adding a specified suffix to its filename. + + Args: + file_path (Path): The path to the file to be renamed. + suffix (str): The suffix to be added to the filename. + + Returns: + None + """ + if not file_path.exists(): + logger.warning(f"File {file_path.name} not found") + return + + try: + new_name = f"{file_path.stem}{suffix}{file_path.suffix}" + new_file_path = file_path.parent / new_name + Path(file_path).rename(new_file_path) + except FileNotFoundError: + logger.warning(f"File {file_path.name} not found") + except FileExistsError: + logger.warning(f"File {new_file_path} already exists") From db1257207a160f169da691577c685331cc3374d5 Mon Sep 17 00:00:00 2001 From: Dmitry Gurevich <99176494+gurevichdmitry@users.noreply.github.com> Date: Wed, 4 Oct 2023 11:36:58 +0300 Subject: [PATCH 07/55] update reuse workflow inputs --- .github/workflows/upgrade-environment.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/upgrade-environment.yml b/.github/workflows/upgrade-environment.yml index 1a007b69ef..f58e5b2dc9 100644 --- a/.github/workflows/upgrade-environment.yml +++ b/.github/workflows/upgrade-environment.yml @@ -52,7 +52,7 @@ jobs: with: deployment_name: ${{ inputs.deployment_name }} elk-stack-version: ${{ needs.init.outputs.stack-version }} - ess-region: ${{ env.TF_VAR_ess_region }} + ess-region: ${{ needs.init.outputs.ess-region }} docker-image-override: ${{ inputs.docker-image-override }} run-sanity-tests: true upgrade: From a3ab3ee00033f3cfcc237efbe095707fec61f83b Mon Sep 17 00:00:00 2001 From: Dmitry Gurevich <99176494+gurevichdmitry@users.noreply.github.com> Date: Wed, 4 Oct 2023 11:46:26 +0300 Subject: [PATCH 08/55] update inputs type --- .github/workflows/test-environment.yml | 5 +++++ .github/workflows/upgrade-environment.yml | 1 + 2 files changed, 6 insertions(+) diff --git a/.github/workflows/test-environment.yml b/.github/workflows/test-environment.yml index 275fd604c6..c5bec20b78 100644 --- a/.github/workflows/test-environment.yml +++ b/.github/workflows/test-environment.yml @@ -14,13 +14,16 @@ on: required: true description: "Stack version: For released/BC version use 8.x.y, for SNAPSHOT use 8.x.y-SNAPSHOT" default: "8.10.0" + type: string ess-region: required: true description: "Elastic Cloud deployment region" default: "gcp-us-west2" + type: string docker-image-override: required: false description: "Provide the full Docker image path to override the default image (e.g. for testing BC/SNAPSHOT)" + type: string run-sanity-tests: description: "Run sanity tests after provision" default: false @@ -37,11 +40,13 @@ on: inputs: deployment_name: description: Name of the deployment to create + type: string required: true elk-stack-version: required: true description: "Stack version: For released/BC version use 8.x.y, for SNAPSHOT use 8.x.y-SNAPSHOT" default: "8.10.0" + type: string ess-region: required: true description: "Elastic Cloud deployment region" diff --git a/.github/workflows/upgrade-environment.yml b/.github/workflows/upgrade-environment.yml index f58e5b2dc9..0a39584329 100644 --- a/.github/workflows/upgrade-environment.yml +++ b/.github/workflows/upgrade-environment.yml @@ -14,6 +14,7 @@ on: required: true description: "Stack version: For released/BC version use 8.x.y, for SNAPSHOT use 8.x.y-SNAPSHOT" default: "8.11.0" + type: string docker-image-override: required: false description: "Provide the full Docker image path to override the default image (e.g. for testing BC/SNAPSHOT)" From 51b360f42ee8565729b370e61af49ab558a136db Mon Sep 17 00:00:00 2001 From: Dmitry Gurevich <99176494+gurevichdmitry@users.noreply.github.com> Date: Wed, 4 Oct 2023 11:50:47 +0300 Subject: [PATCH 09/55] update additional input types --- .github/workflows/test-environment.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.github/workflows/test-environment.yml b/.github/workflows/test-environment.yml index c5bec20b78..34a980a5d7 100644 --- a/.github/workflows/test-environment.yml +++ b/.github/workflows/test-environment.yml @@ -51,9 +51,11 @@ on: required: true description: "Elastic Cloud deployment region" default: "gcp-us-west2" + type: string docker-image-override: required: false description: "Provide the full Docker image path to override the default image (e.g. for testing BC/SNAPSHOT)" + type: string run-sanity-tests: description: "Run sanity tests after provision" default: false @@ -70,6 +72,7 @@ on: s3-bucket: description: "Terraform state s3 bucket folder" value: ${{ jobs.Deploy.outputs.deploy-s3-bucket }} + type: string env: From bba54190781acafe6460b1a429adcc1a19b0f2d9 Mon Sep 17 00:00:00 2001 From: Dmitry Gurevich <99176494+gurevichdmitry@users.noreply.github.com> Date: Wed, 4 Oct 2023 11:52:29 +0300 Subject: [PATCH 10/55] update additional input types --- .github/workflows/test-environment.yml | 2 -- 1 file changed, 2 deletions(-) diff --git a/.github/workflows/test-environment.yml b/.github/workflows/test-environment.yml index 34a980a5d7..72d683a2fd 100644 --- a/.github/workflows/test-environment.yml +++ b/.github/workflows/test-environment.yml @@ -72,8 +72,6 @@ on: s3-bucket: description: "Terraform state s3 bucket folder" value: ${{ jobs.Deploy.outputs.deploy-s3-bucket }} - type: string - env: AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} From e44b3e8bd0947ad09f8c649085d8b40b1685e1d4 Mon Sep 17 00:00:00 2001 From: Dmitry Gurevich <99176494+gurevichdmitry@users.noreply.github.com> Date: Wed, 4 Oct 2023 13:26:57 +0300 Subject: [PATCH 11/55] add secrets reuse workflow --- .github/workflows/upgrade-environment.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/upgrade-environment.yml b/.github/workflows/upgrade-environment.yml index 0a39584329..40806d25ab 100644 --- a/.github/workflows/upgrade-environment.yml +++ b/.github/workflows/upgrade-environment.yml @@ -56,6 +56,7 @@ jobs: ess-region: ${{ needs.init.outputs.ess-region }} docker-image-override: ${{ inputs.docker-image-override }} run-sanity-tests: true + secrets: inherit upgrade: runs-on: ubuntu-20.04 needs: deploy From ff4c2f0d7a01c78e6f7e30751991ddc390824881 Mon Sep 17 00:00:00 2001 From: Dmitry Gurevich <99176494+gurevichdmitry@users.noreply.github.com> Date: Thu, 12 Oct 2023 13:43:22 +0300 Subject: [PATCH 12/55] update release version calculation --- .github/workflows/upgrade-environment.yml | 23 ++++++++++++++++++++++- 1 file changed, 22 insertions(+), 1 deletion(-) diff --git a/.github/workflows/upgrade-environment.yml b/.github/workflows/upgrade-environment.yml index 40806d25ab..9259fcf4c9 100644 --- a/.github/workflows/upgrade-environment.yml +++ b/.github/workflows/upgrade-environment.yml @@ -45,7 +45,28 @@ jobs: # Calculate the previous version (assuming it's always X.(Y-1)) PREVIOUS_VERSION="$MAJOR_VERSION.$((MINOR_VERSION - 1))" - echo $PREVIOUS_VERSION + + URL="https://snapshots.elastic.co/latest/$PREVIOUS_VERSION.json" + + # Use curl to fetch the JSON data + JSON_RESPONSE=$(curl -s "$URL") + + # Get latest snapshot version + SNAPSHOT_VERSION=$(echo "$JSON_RESPONSE" | jq -r '.version') + + # Split the version into major, minor, and patch parts + IFS='.-' read -ra PARTS <<< "$SNAPSHOT_VERSION" + MAJOR="${PARTS[0]}" + MINOR="${PARTS[1]}" + PATCH="${PARTS[2]}" + + # Decrement the patch version by 1 + PATCH=$((PATCH - 1)) + + # Format the previous version + PREVIOUS_VERSION="$MAJOR.$MINOR.$PATCH" + echo "Current Version: $VERSION" + echo "Latest Released Version: $PREVIOUS_VERSION" echo "PREVIOUS_VERSION=$PREVIOUS_VERSION" >> $GITHUB_OUTPUT deploy: uses: ./.github/workflows/test-environment.yml From 655734c1d83f958ca2d0cc7cd30ec91c3ad50f76 Mon Sep 17 00:00:00 2001 From: Dmitry Gurevich <99176494+gurevichdmitry@users.noreply.github.com> Date: Thu, 12 Oct 2023 13:56:45 +0300 Subject: [PATCH 13/55] remove usage of override docker image for released version provision --- .github/workflows/upgrade-environment.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.github/workflows/upgrade-environment.yml b/.github/workflows/upgrade-environment.yml index 9259fcf4c9..5c38ef30e3 100644 --- a/.github/workflows/upgrade-environment.yml +++ b/.github/workflows/upgrade-environment.yml @@ -75,7 +75,6 @@ jobs: deployment_name: ${{ inputs.deployment_name }} elk-stack-version: ${{ needs.init.outputs.stack-version }} ess-region: ${{ needs.init.outputs.ess-region }} - docker-image-override: ${{ inputs.docker-image-override }} run-sanity-tests: true secrets: inherit upgrade: From c42377673c603ed54984df85a4bfa6e43bc6e6c7 Mon Sep 17 00:00:00 2001 From: Dmitry Gurevich <99176494+gurevichdmitry@users.noreply.github.com> Date: Sun, 15 Oct 2023 17:15:57 +0300 Subject: [PATCH 14/55] add custom tags support --- .../fleet_api/src/api/base_call_api.py | 34 ++---------- .../fleet_api/src/api/common_api.py | 40 +++++++++++++- .../fleet_api/src/configuration_fleet.py | 9 ++++ .../fleet_api/src/install_cnvm_integration.py | 9 +++- .../test-environments/fleet_api/src/utils.py | 52 +++++++++++++++++++ 5 files changed, 110 insertions(+), 34 deletions(-) diff --git a/deploy/test-environments/fleet_api/src/api/base_call_api.py b/deploy/test-environments/fleet_api/src/api/base_call_api.py index df19e2d50c..61b63283a8 100644 --- a/deploy/test-environments/fleet_api/src/api/base_call_api.py +++ b/deploy/test-environments/fleet_api/src/api/base_call_api.py @@ -10,7 +10,6 @@ - requests: Library for making HTTP requests """ import requests -from loguru import logger class APICallException(Exception): @@ -34,7 +33,7 @@ def __init__(self, status_code, response_text): self.response_text = response_text -def perform_api_call(method, url, headers=None, auth=None, params=None): +def perform_api_call(method, url, return_json=True, headers=None, auth=None, params=None): """ Perform an API call using the provided parameters. @@ -69,33 +68,6 @@ def perform_api_call(method, url, headers=None, auth=None, params=None): if response.status_code != 200: raise APICallException(response.status_code, response.text) + if not return_json: + return response.content return response.json() - - -def download_file(url, destination, timeout=30): - """ - Download a file from a URL and save it to the specified destination. - - Args: - url (str): The URL of the file to download. - destination (str): The path where the downloaded file will be saved. - timeout (int, optional): The maximum time (in seconds) to wait for the server's response. - Defaults to 30 seconds. - - Raises: - APICallException: If there's an issue with the HTTP request. - IOError: If there's an issue with saving the downloaded file. - """ - try: - response = requests.get(url, stream=True, timeout=timeout) - response.raise_for_status() - - with open(destination, "wb") as file: - for chunk in response.iter_content(chunk_size=8192): - file.write(chunk) - - logger.info(f"File downloaded to {destination}") - except requests.exceptions.RequestException as ex: - raise APICallException(500, f"HTTP Request Error: {ex}") from ex - except IOError as io_ex: - raise IOError(f"IO Error: {io_ex}") from io_ex diff --git a/deploy/test-environments/fleet_api/src/api/common_api.py b/deploy/test-environments/fleet_api/src/api/common_api.py index bca3b26915..a74b031280 100644 --- a/deploy/test-environments/fleet_api/src/api/common_api.py +++ b/deploy/test-environments/fleet_api/src/api/common_api.py @@ -6,7 +6,11 @@ from munch import Munch, munchify from loguru import logger from api.base_call_api import APICallException, perform_api_call -from utils import replace_image_field, add_capabilities +from utils import ( + replace_image_field, + add_capabilities, + add_tags, +) AGENT_ARTIFACT_SUFFIX = "/downloads/beats/elastic-agent" @@ -117,6 +121,40 @@ def create_kubernetes_manifest(cfg: Munch, params: Munch): return +def get_cnvm_template(url: str, template_path: str, cnvm_tags: str): + """ + Download a CloudFormation template from a specified URL, + add custom tags to it, and save it to a file. + + Args: + url (str): The URL to download the CloudFormation template. + template_path (str): The file path where the modified template will be saved. + cnvm_tags (str): Custom tags to be added to the template in the format "key1=value1 key2=value2 ...". + + Returns: + None + + Raises: + APICallException: If there's an issue with the API call. + """ + try: + template_yaml = perform_api_call( + method="GET", + url=url, + return_json=False, + ) + template_yaml = add_tags(tags=cnvm_tags, yaml_content=template_yaml) + + with codecs.open(template_path, "w", encoding="utf-8") as cnvm_yaml: + cnvm_yaml.write(template_yaml) + logger.info(f"CNVM template is available at: '{template_path}'") + except APICallException as api_ex: + logger.error( + f"API call failed, status code {api_ex.status_code}. Response: {api_ex.response_text}", + ) + return + + def get_build_info(version: str) -> str: """ Retrieve the build ID for a specific version of Elastic. diff --git a/deploy/test-environments/fleet_api/src/configuration_fleet.py b/deploy/test-environments/fleet_api/src/configuration_fleet.py index 5f66b6d80b..eec382d1bb 100644 --- a/deploy/test-environments/fleet_api/src/configuration_fleet.py +++ b/deploy/test-environments/fleet_api/src/configuration_fleet.py @@ -18,6 +18,14 @@ import os from munch import Munch +CNVM_TAGS = ( + "Key=division,Value=engineering " + "Key=org,Value=security " + "Key=team,Value=cloud-security-posture " + "Key=project,Value=test-environments" +) + + elk_config = Munch() elk_config.user = os.getenv("ES_USER", "NA") elk_config.password = os.getenv("ES_PASSWORD", "NA") @@ -31,6 +39,7 @@ aws_config = Munch() aws_config.access_key_id = os.getenv("AWS_ACCESS_KEY_ID", "NA") aws_config.secret_access_key = os.getenv("AWS_SECRET_ACCESS_KEY", "NA") +aws_config.cnvm_tags = os.getenv("AWS_CNVM_TAGS", CNVM_TAGS) gcp_dm_config = Munch() gcp_dm_config.deployment_name = os.getenv("DEPLOYMENT_NAME", "") diff --git a/deploy/test-environments/fleet_api/src/install_cnvm_integration.py b/deploy/test-environments/fleet_api/src/install_cnvm_integration.py index 8dfc8d7c54..3771ef6054 100644 --- a/deploy/test-environments/fleet_api/src/install_cnvm_integration.py +++ b/deploy/test-environments/fleet_api/src/install_cnvm_integration.py @@ -19,8 +19,8 @@ get_fleet_server_host, get_artifact_server, get_package_version, + get_cnvm_template, ) -from api.base_call_api import download_file from loguru import logger from state_file_manager import state_manager, PolicyState from package_policy import ( @@ -36,6 +36,7 @@ CNVM_EXPECTED_AGENTS = 1 CNVM_CLOUDFORMATION_CONFIG = "../../../cloudformation/config.json" CNMV_TEMPLATE = "../../../cloudformation/elastic-agent-ec2-cnvm.yml" +CNMV_TEMP_FILE = "elastic-agent-ec2-cnvm-temp.yml" CNVM_AGENT_TAGS = ["cft_version:CFT_VERSION", "cft_arn:arn:aws:cloudformation:.*"] PKG_DEFAULT_VERSION = VERSION_MAP.get("vuln_mgmt_aws", "") INTEGRATION_NAME = "CNVM AWS" @@ -115,6 +116,10 @@ file_path=cnvm_cloudformation_template, suffix="-orig", ) - download_file(url=template_url, destination=cnvm_cloudformation_template) + get_cnvm_template( + url=template_url, + template_path=cnvm_cloudformation_template, + cnvm_tags=cnfg.aws_config.cnvm_tags, + ) logger.info(f"Installation of {INTEGRATION_NAME} integration is done") diff --git a/deploy/test-environments/fleet_api/src/utils.py b/deploy/test-environments/fleet_api/src/utils.py index 9f552487ad..387598fce1 100644 --- a/deploy/test-environments/fleet_api/src/utils.py +++ b/deploy/test-environments/fleet_api/src/utils.py @@ -266,3 +266,55 @@ def rename_file_by_suffix(file_path: Path, suffix: str) -> None: logger.warning(f"File {file_path.name} not found") except FileExistsError: logger.warning(f"File {new_file_path} already exists") + + +def add_tags(tags: str, yaml_content: str): + """ + Add custom tags to a YAML content while preserving formatting. + + Args: + tags (str): Custom tags in the format "key1=value1 key2=value2 ...". + yaml_content (str): YAML content to which custom tags will be added. + + Returns: + str: The modified YAML content with custom tags. + """ + # Create a ruamel.yaml instance with the ability to preserve formatting + yaml = ruamel.yaml.YAML() + yaml.preserve_quotes = True + yaml.explicit_start = True + yaml.indent(mapping=2, sequence=4, offset=2) + + cnvm_template = yaml.load(yaml_content) + + # Get custom tags from the input argument + custom_tags = tags.split() + tag_dicts = [] + + for tag in custom_tags: + key_values = tag.split(",") + tag_dict = {} + + for key_value in key_values: + key, value = key_value.split("=") + tag_dict[key] = value + tag_dicts.append(tag_dict) + + for resource in cnvm_template["Resources"].values(): + if resource["Type"] == "AWS::EC2::Instance": + if "Properties" not in resource: + resource["Properties"] = {} + if "Tags" not in resource["Properties"]: + resource["Properties"]["Tags"] = [] + resource["Properties"]["Tags"] += tag_dicts + + # Create an output stream + output_stream = ruamel.yaml.compat.StringIO() + + # Dump the modified YAML data to the output stream + yaml.dump(cnvm_template, output_stream) + + # Get the YAML string from the output stream + modified_content = output_stream.getvalue() + + return modified_content From 4c20a96feeb18933da60056c5ef3f2f037b5bfc5 Mon Sep 17 00:00:00 2001 From: Dmitry Gurevich <99176494+gurevichdmitry@users.noreply.github.com> Date: Sun, 15 Oct 2023 18:07:21 +0300 Subject: [PATCH 15/55] update cnvm tag pattern --- .../test-environments/fleet_api/src/install_cnvm_integration.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/deploy/test-environments/fleet_api/src/install_cnvm_integration.py b/deploy/test-environments/fleet_api/src/install_cnvm_integration.py index 3771ef6054..13c31a2f05 100644 --- a/deploy/test-environments/fleet_api/src/install_cnvm_integration.py +++ b/deploy/test-environments/fleet_api/src/install_cnvm_integration.py @@ -37,7 +37,7 @@ CNVM_CLOUDFORMATION_CONFIG = "../../../cloudformation/config.json" CNMV_TEMPLATE = "../../../cloudformation/elastic-agent-ec2-cnvm.yml" CNMV_TEMP_FILE = "elastic-agent-ec2-cnvm-temp.yml" -CNVM_AGENT_TAGS = ["cft_version:CFT_VERSION", "cft_arn:arn:aws:cloudformation:.*"] +CNVM_AGENT_TAGS = ["cft_version:*", "cft_arn:arn:aws:cloudformation:.*"] PKG_DEFAULT_VERSION = VERSION_MAP.get("vuln_mgmt_aws", "") INTEGRATION_NAME = "CNVM AWS" INTEGRATION_INPUT = { From 7d5bf95176c002a42bc1166916bf0cedb0d039ae Mon Sep 17 00:00:00 2001 From: Dmitry Gurevich <99176494+gurevichdmitry@users.noreply.github.com> Date: Sun, 15 Oct 2023 19:50:07 +0300 Subject: [PATCH 16/55] update workflows --- .github/workflows/test-environment.yml | 2 +- .github/workflows/upgrade-environment.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/test-environment.yml b/.github/workflows/test-environment.yml index 72d683a2fd..50a6e7f4eb 100644 --- a/.github/workflows/test-environment.yml +++ b/.github/workflows/test-environment.yml @@ -344,7 +344,7 @@ jobs: poetry run python src/agents_enrolled.py - name: Run Sanity checks - if: ${{ success() && inputs.run-sanity-tests == 'true' }} + if: ${{ success() && (inputs.run-sanity-tests == 'true' || inputs.run-sanity-tests == true) }} working-directory: ./tests run: | poetry install diff --git a/.github/workflows/upgrade-environment.yml b/.github/workflows/upgrade-environment.yml index 5c38ef30e3..6d07d4c7dd 100644 --- a/.github/workflows/upgrade-environment.yml +++ b/.github/workflows/upgrade-environment.yml @@ -117,7 +117,7 @@ jobs: - name: Download tf state data env: - S3_BUCKET: ${{ needs.deploy.outputs.deploy-s3-bucket }} + S3_BUCKET: ${{ needs.deploy.outputs.s3-bucket }} run: | aws s3 cp "${{ env.S3_BUCKET }}/terraform.tfstate" "./terraform.tfstate" From ad41b51d5d204b59fa20ed191d9d2c467be014c9 Mon Sep 17 00:00:00 2001 From: Dmitry Gurevich <99176494+gurevichdmitry@users.noreply.github.com> Date: Mon, 16 Oct 2023 13:31:03 +0300 Subject: [PATCH 17/55] add ec api key support --- .github/workflows/upgrade-environment.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/upgrade-environment.yml b/.github/workflows/upgrade-environment.yml index 6d07d4c7dd..b049153fc0 100644 --- a/.github/workflows/upgrade-environment.yml +++ b/.github/workflows/upgrade-environment.yml @@ -26,6 +26,7 @@ env: WORKING_DIR: deploy/test-environments TF_VAR_stack_version: ${{ inputs.elk-stack-version }} TF_VAR_ess_region: gcp-us-west2 + TF_VAR_ec_api_key: ${{ secrets.EC_API_KEY }} jobs: init: From be5efe193ebc7442ecf0ab2b25a8a9e7e82c583e Mon Sep 17 00:00:00 2001 From: Dmitry Gurevich <99176494+gurevichdmitry@users.noreply.github.com> Date: Mon, 16 Oct 2023 16:02:41 +0300 Subject: [PATCH 18/55] update env var for sanity tests --- .github/workflows/upgrade-environment.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/upgrade-environment.yml b/.github/workflows/upgrade-environment.yml index b049153fc0..55c411c9f9 100644 --- a/.github/workflows/upgrade-environment.yml +++ b/.github/workflows/upgrade-environment.yml @@ -172,6 +172,8 @@ jobs: - name: Run Sanity checks if: success() working-directory: ./tests + env: + USE_K8S: false run: | poetry install poetry run pytest -m "sanity" --alluredir=./allure/results/ --clean-alluredir --maxfail=4 From 04dd45cbbb37faf332a480e7281d8933c44164d0 Mon Sep 17 00:00:00 2001 From: Dmitry Gurevich <99176494+gurevichdmitry@users.noreply.github.com> Date: Mon, 16 Oct 2023 21:33:38 +0300 Subject: [PATCH 19/55] add upgrade kspm unmanaged agent --- .github/workflows/upgrade-environment.yml | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/.github/workflows/upgrade-environment.yml b/.github/workflows/upgrade-environment.yml index 55c411c9f9..d1a667ba3d 100644 --- a/.github/workflows/upgrade-environment.yml +++ b/.github/workflows/upgrade-environment.yml @@ -27,6 +27,7 @@ env: TF_VAR_stack_version: ${{ inputs.elk-stack-version }} TF_VAR_ess_region: gcp-us-west2 TF_VAR_ec_api_key: ${{ secrets.EC_API_KEY }} + DOCKER_IMAGE: ${{ inputs.docker-image-override }} jobs: init: @@ -177,3 +178,22 @@ jobs: run: | poetry install poetry run pytest -m "sanity" --alluredir=./allure/results/ --clean-alluredir --maxfail=4 + + - name: Set Docker Image version + if: ${{ ! inputs.docker-image-override }} + env: + VERSION: 'docker.elastic.co/beats/elastic-agent:${{ inputs.elk-stack-version }}' + run: | + echo "DOCKER_IMAGE=${{ env.VERSION }}" >> $GITHUB_ENV + + - name: Download Integrations data + env: + S3_BUCKET: ${{ needs.deploy.outputs.s3-bucket }} + run: | + aws s3 cp "${{ env.S3_BUCKET }}/kspm.pem" "${{ env.EC2_KSPM_KEY }}" + + - name: Upgrade KSPM Unmanaged agent + run: | + chmod 600 ${{ env.EC2_KSPM_KEY }} + # Update image + ssh -o StrictHostKeyChecking=no -v -i ${{ env.EC2_KSPM_KEY }} "ubuntu@${{ env.KSPM_PUBLIC_IP }}" "kubectl set image daemonset elastic-agent -n kube-system elastic-agent=${{ env.DOCKER_IMAGE }}" From 80d0a3c8bea2031006d831acadebebf6c2447e9c Mon Sep 17 00:00:00 2001 From: Dmitry Gurevich <99176494+gurevichdmitry@users.noreply.github.com> Date: Tue, 17 Oct 2023 18:08:23 +0300 Subject: [PATCH 20/55] temporary code for testing identity provider --- .github/workflows/upgrade-environment.yml | 278 ++++++++++++---------- 1 file changed, 151 insertions(+), 127 deletions(-) diff --git a/.github/workflows/upgrade-environment.yml b/.github/workflows/upgrade-environment.yml index d1a667ba3d..4e4e37a366 100644 --- a/.github/workflows/upgrade-environment.yml +++ b/.github/workflows/upgrade-environment.yml @@ -36,6 +36,23 @@ jobs: stack-version: ${{ steps.set-previous-version.outputs.PREVIOUS_VERSION }} ess-region: ${{ env.TF_VAR_ess_region }} steps: + - name: Check out the repo + uses: actions/checkout@v4 + + - name: Configure AWS credentials + uses: aws-actions/configure-aws-credentials@v4 + with: + role-to-assume: arn:aws:iam::704479110758:role/Developer_eks + aws-region: ${{ env.AWS_REGION }} + + - name: Caller identity + run: | + aws sts get-caller-identity + aws eks --region ${{ env.AWS_REGION }} update-kubeconfig \ + --name ${{ inputs.deployment_name }} --alias eks-config + kubectl config use-context eks-config + kubectl get po -n kube-system + - name: Set Previous Version id: set-previous-version run: | @@ -70,130 +87,137 @@ jobs: echo "Current Version: $VERSION" echo "Latest Released Version: $PREVIOUS_VERSION" echo "PREVIOUS_VERSION=$PREVIOUS_VERSION" >> $GITHUB_OUTPUT - deploy: - uses: ./.github/workflows/test-environment.yml - needs: init - with: - deployment_name: ${{ inputs.deployment_name }} - elk-stack-version: ${{ needs.init.outputs.stack-version }} - ess-region: ${{ needs.init.outputs.ess-region }} - run-sanity-tests: true - secrets: inherit - upgrade: - runs-on: ubuntu-20.04 - needs: deploy - timeout-minutes: 120 - defaults: - run: - working-directory: ${{ env.WORKING_DIR }} - steps: - - name: Check out the repo - uses: actions/checkout@v4 - - - name: Init Hermit - run: ./bin/hermit env -r >> $GITHUB_ENV - working-directory: ./ - - - name: Set up Python - uses: actions/setup-python@v4 - with: - python-version: '3.9' - - - name: Install Poetry - run: | - curl -sSL https://install.python-poetry.org | python3 - - poetry --version - - - name: Install Fleet API dependencies - id: fleet-api-deps - working-directory: ${{ env.WORKING_DIR }}/fleet_api - run: | - poetry install - - - name: Configure AWS credentials - uses: aws-actions/configure-aws-credentials@v2 - with: - aws-access-key-id: ${{ env.AWS_ACCESS_KEY_ID }} - aws-secret-access-key: ${{ env.AWS_SECRET_ACCESS_KEY }} - aws-region: ${{ env.AWS_REGION }} - - - name: Download tf state data - env: - S3_BUCKET: ${{ needs.deploy.outputs.s3-bucket }} - run: | - aws s3 cp "${{ env.S3_BUCKET }}/terraform.tfstate" "./terraform.tfstate" - - - name: Terraform Init - run: terraform init - - - name: Terraform Validate - run: terraform validate - - - name: Update ELK stack version - id: apply - if: success() - run: | - terraform apply --auto-approve -var="deployment_name=${{ inputs.deployment_name }}" -var="region=${{ env.AWS_REGION }}" - - - name: Set Environment Output - id: env-output - run: | - echo "KIBANA_URL=$(terraform output -raw kibana_url)" >> $GITHUB_ENV - echo "ES_URL=$(terraform output -raw elasticsearch_url)" >> $GITHUB_ENV - echo "ES_USER=$(terraform output -raw elasticsearch_username)" >> $GITHUB_ENV - - export ES_PASSWORD=$(terraform output -raw elasticsearch_password) - echo "::add-mask::$ES_PASSWORD" - echo "ES_PASSWORD=$ES_PASSWORD" >> $GITHUB_ENV - - export EC2_CSPM=$(terraform output -raw ec2_cspm_ssh_cmd) - echo "::add-mask::$EC2_CSPM" - echo "EC2_CSPM=$EC2_CSPM" >> $GITHUB_ENV - - export EC2_KSPM=$(terraform output -raw ec2_kspm_ssh_cmd) - echo "::add-mask::$EC2_KSPM" - echo "EC2_KSPM=$EC2_KSPM" >> $GITHUB_ENV - - export EC2_CSPM_KEY=$(terraform output -raw ec2_cspm_key) - echo "::add-mask::$EC2_CSPM_KEY" - echo "EC2_CSPM_KEY=$EC2_CSPM_KEY" >> $GITHUB_ENV - - export EC2_KSPM_KEY=$(terraform output -raw ec2_kspm_key) - echo "::add-mask::$EC2_KSPM_KEY" - echo "EC2_KSPM_KEY=$EC2_KSPM_KEY" >> $GITHUB_ENV - - export KSPM_PUBLIC_IP=$(terraform output -raw ec2_kspm_public_ip) - echo "::add-mask::$KSPM_PUBLIC_IP" - echo "KSPM_PUBLIC_IP=$KSPM_PUBLIC_IP" >> $GITHUB_ENV - - export CSPM_PUBLIC_IP=$(terraform output -raw ec2_cspm_public_ip) - echo "::add-mask::$CSPM_PUBLIC_IP" - echo "CSPM_PUBLIC_IP=$CSPM_PUBLIC_IP" >> $GITHUB_ENV - - - name: Run Sanity checks - if: success() - working-directory: ./tests - env: - USE_K8S: false - run: | - poetry install - poetry run pytest -m "sanity" --alluredir=./allure/results/ --clean-alluredir --maxfail=4 - - - name: Set Docker Image version - if: ${{ ! inputs.docker-image-override }} - env: - VERSION: 'docker.elastic.co/beats/elastic-agent:${{ inputs.elk-stack-version }}' - run: | - echo "DOCKER_IMAGE=${{ env.VERSION }}" >> $GITHUB_ENV - - - name: Download Integrations data - env: - S3_BUCKET: ${{ needs.deploy.outputs.s3-bucket }} - run: | - aws s3 cp "${{ env.S3_BUCKET }}/kspm.pem" "${{ env.EC2_KSPM_KEY }}" - - - name: Upgrade KSPM Unmanaged agent - run: | - chmod 600 ${{ env.EC2_KSPM_KEY }} - # Update image - ssh -o StrictHostKeyChecking=no -v -i ${{ env.EC2_KSPM_KEY }} "ubuntu@${{ env.KSPM_PUBLIC_IP }}" "kubectl set image daemonset elastic-agent -n kube-system elastic-agent=${{ env.DOCKER_IMAGE }}" + # deploy: + # uses: ./.github/workflows/test-environment.yml + # needs: init + # with: + # deployment_name: ${{ inputs.deployment_name }} + # elk-stack-version: ${{ needs.init.outputs.stack-version }} + # ess-region: ${{ needs.init.outputs.ess-region }} + # run-sanity-tests: true + # secrets: inherit + # upgrade: + # runs-on: ubuntu-20.04 + # needs: deploy + # timeout-minutes: 120 + # defaults: + # run: + # working-directory: ${{ env.WORKING_DIR }} + # steps: + # - name: Check out the repo + # uses: actions/checkout@v4 + + # - name: Init Hermit + # run: ./bin/hermit env -r >> $GITHUB_ENV + # working-directory: ./ + + # - name: Set up Python + # uses: actions/setup-python@v4 + # with: + # python-version: '3.9' + + # - name: Install Poetry + # run: | + # curl -sSL https://install.python-poetry.org | python3 - + # poetry --version + + # - name: Install Fleet API dependencies + # id: fleet-api-deps + # working-directory: ${{ env.WORKING_DIR }}/fleet_api + # run: | + # poetry install + + # - name: Configure AWS credentials + # uses: aws-actions/configure-aws-credentials@v2 + # with: + # aws-access-key-id: ${{ env.AWS_ACCESS_KEY_ID }} + # aws-secret-access-key: ${{ env.AWS_SECRET_ACCESS_KEY }} + # aws-region: ${{ env.AWS_REGION }} + + # - name: Download tf state data + # env: + # S3_BUCKET: ${{ needs.deploy.outputs.s3-bucket }} + # run: | + # aws s3 cp "${{ env.S3_BUCKET }}/terraform.tfstate" "./terraform.tfstate" + + # - name: Terraform Init + # run: terraform init + + # - name: Terraform Validate + # run: terraform validate + + # - name: Update ELK stack version + # id: apply + # if: success() + # run: | + # terraform apply --auto-approve -var="deployment_name=${{ inputs.deployment_name }}" -var="region=${{ env.AWS_REGION }}" + + # - name: Set Environment Output + # id: env-output + # run: | + # echo "KIBANA_URL=$(terraform output -raw kibana_url)" >> $GITHUB_ENV + # echo "ES_URL=$(terraform output -raw elasticsearch_url)" >> $GITHUB_ENV + # echo "ES_USER=$(terraform output -raw elasticsearch_username)" >> $GITHUB_ENV + + # export ES_PASSWORD=$(terraform output -raw elasticsearch_password) + # echo "::add-mask::$ES_PASSWORD" + # echo "ES_PASSWORD=$ES_PASSWORD" >> $GITHUB_ENV + + # export EC2_CSPM=$(terraform output -raw ec2_cspm_ssh_cmd) + # echo "::add-mask::$EC2_CSPM" + # echo "EC2_CSPM=$EC2_CSPM" >> $GITHUB_ENV + + # export EC2_KSPM=$(terraform output -raw ec2_kspm_ssh_cmd) + # echo "::add-mask::$EC2_KSPM" + # echo "EC2_KSPM=$EC2_KSPM" >> $GITHUB_ENV + + # export EC2_CSPM_KEY=$(terraform output -raw ec2_cspm_key) + # echo "::add-mask::$EC2_CSPM_KEY" + # echo "EC2_CSPM_KEY=$EC2_CSPM_KEY" >> $GITHUB_ENV + + # export EC2_KSPM_KEY=$(terraform output -raw ec2_kspm_key) + # echo "::add-mask::$EC2_KSPM_KEY" + # echo "EC2_KSPM_KEY=$EC2_KSPM_KEY" >> $GITHUB_ENV + + # export KSPM_PUBLIC_IP=$(terraform output -raw ec2_kspm_public_ip) + # echo "::add-mask::$KSPM_PUBLIC_IP" + # echo "KSPM_PUBLIC_IP=$KSPM_PUBLIC_IP" >> $GITHUB_ENV + + # export CSPM_PUBLIC_IP=$(terraform output -raw ec2_cspm_public_ip) + # echo "::add-mask::$CSPM_PUBLIC_IP" + # echo "CSPM_PUBLIC_IP=$CSPM_PUBLIC_IP" >> $GITHUB_ENV + + # - name: Run Sanity checks + # if: success() + # working-directory: ./tests + # env: + # USE_K8S: false + # run: | + # poetry install + # poetry run pytest -m "sanity" --alluredir=./allure/results/ --clean-alluredir --maxfail=4 + + # - name: Set Docker Image version + # if: ${{ ! inputs.docker-image-override }} + # env: + # VERSION: 'docker.elastic.co/beats/elastic-agent:${{ inputs.elk-stack-version }}' + # run: | + # echo "DOCKER_IMAGE=${{ env.VERSION }}" >> $GITHUB_ENV + + # - name: Download Integrations data + # env: + # S3_BUCKET: ${{ needs.deploy.outputs.s3-bucket }} + # run: | + # aws s3 cp "${{ env.S3_BUCKET }}/kspm.pem" "${{ env.EC2_KSPM_KEY }}" + + # - name: Upgrade KSPM Unmanaged agent + # run: | + # chmod 600 ${{ env.EC2_KSPM_KEY }} + # # Update image + # ssh -o StrictHostKeyChecking=no -v -i ${{ env.EC2_KSPM_KEY }} "ubuntu@${{ env.KSPM_PUBLIC_IP }}" "kubectl set image daemonset elastic-agent -n kube-system elastic-agent=${{ env.DOCKER_IMAGE }}" + + # - name: Upgrade KSPM EKS agent + # run: | + # aws eks --region ${{ env.AWS_REGION }} update-kubeconfig \ + # --name $(terraform output -raw deployment_name) --alias eks-config + # kubectl config use-context eks-config + # kubectl set image daemonset elastic-agent -n kube-system elastic-agent=${{ env.DOCKER_IMAGE }} From 9c5f4bfdf72061e3e040614a9d94d2d1e7f7a04b Mon Sep 17 00:00:00 2001 From: Dmitry Gurevich <99176494+gurevichdmitry@users.noreply.github.com> Date: Tue, 17 Oct 2023 18:45:43 +0300 Subject: [PATCH 21/55] add token permissions --- .github/workflows/upgrade-environment.yml | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/.github/workflows/upgrade-environment.yml b/.github/workflows/upgrade-environment.yml index 4e4e37a366..b7f8f91670 100644 --- a/.github/workflows/upgrade-environment.yml +++ b/.github/workflows/upgrade-environment.yml @@ -35,6 +35,10 @@ jobs: outputs: stack-version: ${{ steps.set-previous-version.outputs.PREVIOUS_VERSION }} ess-region: ${{ env.TF_VAR_ess_region }} + # Add "id-token" with the intended permissions. + permissions: + contents: 'read' + id-token: 'write' steps: - name: Check out the repo uses: actions/checkout@v4 @@ -43,6 +47,7 @@ jobs: uses: aws-actions/configure-aws-credentials@v4 with: role-to-assume: arn:aws:iam::704479110758:role/Developer_eks + role-session-name: github-ci aws-region: ${{ env.AWS_REGION }} - name: Caller identity From e6fe3d2f0c441d055b1fcdb2fc2ff4d7529949b5 Mon Sep 17 00:00:00 2001 From: Dmitry Gurevich <99176494+gurevichdmitry@users.noreply.github.com> Date: Tue, 17 Oct 2023 19:30:52 +0300 Subject: [PATCH 22/55] add eks agent upgrade action --- .github/workflows/upgrade-environment.yml | 293 ++++++++++------------ 1 file changed, 137 insertions(+), 156 deletions(-) diff --git a/.github/workflows/upgrade-environment.yml b/.github/workflows/upgrade-environment.yml index b7f8f91670..e14e430b47 100644 --- a/.github/workflows/upgrade-environment.yml +++ b/.github/workflows/upgrade-environment.yml @@ -35,29 +35,7 @@ jobs: outputs: stack-version: ${{ steps.set-previous-version.outputs.PREVIOUS_VERSION }} ess-region: ${{ env.TF_VAR_ess_region }} - # Add "id-token" with the intended permissions. - permissions: - contents: 'read' - id-token: 'write' steps: - - name: Check out the repo - uses: actions/checkout@v4 - - - name: Configure AWS credentials - uses: aws-actions/configure-aws-credentials@v4 - with: - role-to-assume: arn:aws:iam::704479110758:role/Developer_eks - role-session-name: github-ci - aws-region: ${{ env.AWS_REGION }} - - - name: Caller identity - run: | - aws sts get-caller-identity - aws eks --region ${{ env.AWS_REGION }} update-kubeconfig \ - --name ${{ inputs.deployment_name }} --alias eks-config - kubectl config use-context eks-config - kubectl get po -n kube-system - - name: Set Previous Version id: set-previous-version run: | @@ -92,137 +70,140 @@ jobs: echo "Current Version: $VERSION" echo "Latest Released Version: $PREVIOUS_VERSION" echo "PREVIOUS_VERSION=$PREVIOUS_VERSION" >> $GITHUB_OUTPUT - # deploy: - # uses: ./.github/workflows/test-environment.yml - # needs: init - # with: - # deployment_name: ${{ inputs.deployment_name }} - # elk-stack-version: ${{ needs.init.outputs.stack-version }} - # ess-region: ${{ needs.init.outputs.ess-region }} - # run-sanity-tests: true - # secrets: inherit - # upgrade: - # runs-on: ubuntu-20.04 - # needs: deploy - # timeout-minutes: 120 - # defaults: - # run: - # working-directory: ${{ env.WORKING_DIR }} - # steps: - # - name: Check out the repo - # uses: actions/checkout@v4 - - # - name: Init Hermit - # run: ./bin/hermit env -r >> $GITHUB_ENV - # working-directory: ./ - - # - name: Set up Python - # uses: actions/setup-python@v4 - # with: - # python-version: '3.9' - - # - name: Install Poetry - # run: | - # curl -sSL https://install.python-poetry.org | python3 - - # poetry --version - - # - name: Install Fleet API dependencies - # id: fleet-api-deps - # working-directory: ${{ env.WORKING_DIR }}/fleet_api - # run: | - # poetry install - - # - name: Configure AWS credentials - # uses: aws-actions/configure-aws-credentials@v2 - # with: - # aws-access-key-id: ${{ env.AWS_ACCESS_KEY_ID }} - # aws-secret-access-key: ${{ env.AWS_SECRET_ACCESS_KEY }} - # aws-region: ${{ env.AWS_REGION }} - - # - name: Download tf state data - # env: - # S3_BUCKET: ${{ needs.deploy.outputs.s3-bucket }} - # run: | - # aws s3 cp "${{ env.S3_BUCKET }}/terraform.tfstate" "./terraform.tfstate" - - # - name: Terraform Init - # run: terraform init - - # - name: Terraform Validate - # run: terraform validate - - # - name: Update ELK stack version - # id: apply - # if: success() - # run: | - # terraform apply --auto-approve -var="deployment_name=${{ inputs.deployment_name }}" -var="region=${{ env.AWS_REGION }}" - - # - name: Set Environment Output - # id: env-output - # run: | - # echo "KIBANA_URL=$(terraform output -raw kibana_url)" >> $GITHUB_ENV - # echo "ES_URL=$(terraform output -raw elasticsearch_url)" >> $GITHUB_ENV - # echo "ES_USER=$(terraform output -raw elasticsearch_username)" >> $GITHUB_ENV - - # export ES_PASSWORD=$(terraform output -raw elasticsearch_password) - # echo "::add-mask::$ES_PASSWORD" - # echo "ES_PASSWORD=$ES_PASSWORD" >> $GITHUB_ENV - - # export EC2_CSPM=$(terraform output -raw ec2_cspm_ssh_cmd) - # echo "::add-mask::$EC2_CSPM" - # echo "EC2_CSPM=$EC2_CSPM" >> $GITHUB_ENV - - # export EC2_KSPM=$(terraform output -raw ec2_kspm_ssh_cmd) - # echo "::add-mask::$EC2_KSPM" - # echo "EC2_KSPM=$EC2_KSPM" >> $GITHUB_ENV - - # export EC2_CSPM_KEY=$(terraform output -raw ec2_cspm_key) - # echo "::add-mask::$EC2_CSPM_KEY" - # echo "EC2_CSPM_KEY=$EC2_CSPM_KEY" >> $GITHUB_ENV - - # export EC2_KSPM_KEY=$(terraform output -raw ec2_kspm_key) - # echo "::add-mask::$EC2_KSPM_KEY" - # echo "EC2_KSPM_KEY=$EC2_KSPM_KEY" >> $GITHUB_ENV - - # export KSPM_PUBLIC_IP=$(terraform output -raw ec2_kspm_public_ip) - # echo "::add-mask::$KSPM_PUBLIC_IP" - # echo "KSPM_PUBLIC_IP=$KSPM_PUBLIC_IP" >> $GITHUB_ENV - - # export CSPM_PUBLIC_IP=$(terraform output -raw ec2_cspm_public_ip) - # echo "::add-mask::$CSPM_PUBLIC_IP" - # echo "CSPM_PUBLIC_IP=$CSPM_PUBLIC_IP" >> $GITHUB_ENV - - # - name: Run Sanity checks - # if: success() - # working-directory: ./tests - # env: - # USE_K8S: false - # run: | - # poetry install - # poetry run pytest -m "sanity" --alluredir=./allure/results/ --clean-alluredir --maxfail=4 - - # - name: Set Docker Image version - # if: ${{ ! inputs.docker-image-override }} - # env: - # VERSION: 'docker.elastic.co/beats/elastic-agent:${{ inputs.elk-stack-version }}' - # run: | - # echo "DOCKER_IMAGE=${{ env.VERSION }}" >> $GITHUB_ENV - - # - name: Download Integrations data - # env: - # S3_BUCKET: ${{ needs.deploy.outputs.s3-bucket }} - # run: | - # aws s3 cp "${{ env.S3_BUCKET }}/kspm.pem" "${{ env.EC2_KSPM_KEY }}" - - # - name: Upgrade KSPM Unmanaged agent - # run: | - # chmod 600 ${{ env.EC2_KSPM_KEY }} - # # Update image - # ssh -o StrictHostKeyChecking=no -v -i ${{ env.EC2_KSPM_KEY }} "ubuntu@${{ env.KSPM_PUBLIC_IP }}" "kubectl set image daemonset elastic-agent -n kube-system elastic-agent=${{ env.DOCKER_IMAGE }}" - - # - name: Upgrade KSPM EKS agent - # run: | - # aws eks --region ${{ env.AWS_REGION }} update-kubeconfig \ - # --name $(terraform output -raw deployment_name) --alias eks-config - # kubectl config use-context eks-config - # kubectl set image daemonset elastic-agent -n kube-system elastic-agent=${{ env.DOCKER_IMAGE }} + deploy: + uses: ./.github/workflows/test-environment.yml + needs: init + with: + deployment_name: ${{ inputs.deployment_name }} + elk-stack-version: ${{ needs.init.outputs.stack-version }} + ess-region: ${{ needs.init.outputs.ess-region }} + run-sanity-tests: true + secrets: inherit + upgrade: + runs-on: ubuntu-20.04 + needs: deploy + timeout-minutes: 120 + defaults: + run: + working-directory: ${{ env.WORKING_DIR }} + permissions: + contents: 'read' + id-token: 'write' + steps: + - name: Check out the repo + uses: actions/checkout@v4 + + - name: Init Hermit + run: ./bin/hermit env -r >> $GITHUB_ENV + working-directory: ./ + + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: '3.9' + + - name: Install Poetry + run: | + curl -sSL https://install.python-poetry.org | python3 - + poetry --version + + - name: Install Fleet API dependencies + id: fleet-api-deps + working-directory: ${{ env.WORKING_DIR }}/fleet_api + run: | + poetry install + + - name: Configure AWS credentials + uses: aws-actions/configure-aws-credentials@v4 + with: + role-to-assume: arn:aws:iam::704479110758:role/Developer_eks + role-session-name: github-ci + aws-region: ${{ env.AWS_REGION }} + + - name: Download tf state data + env: + S3_BUCKET: ${{ needs.deploy.outputs.s3-bucket }} + run: | + aws s3 cp "${{ env.S3_BUCKET }}/terraform.tfstate" "./terraform.tfstate" + + - name: Terraform Init + run: terraform init + + - name: Terraform Validate + run: terraform validate + + - name: Update ELK stack version + id: apply + if: success() + run: | + terraform apply --auto-approve -var="deployment_name=${{ inputs.deployment_name }}" -var="region=${{ env.AWS_REGION }}" + + - name: Set Environment Output + id: env-output + run: | + echo "KIBANA_URL=$(terraform output -raw kibana_url)" >> $GITHUB_ENV + echo "ES_URL=$(terraform output -raw elasticsearch_url)" >> $GITHUB_ENV + echo "ES_USER=$(terraform output -raw elasticsearch_username)" >> $GITHUB_ENV + + export ES_PASSWORD=$(terraform output -raw elasticsearch_password) + echo "::add-mask::$ES_PASSWORD" + echo "ES_PASSWORD=$ES_PASSWORD" >> $GITHUB_ENV + + export EC2_CSPM=$(terraform output -raw ec2_cspm_ssh_cmd) + echo "::add-mask::$EC2_CSPM" + echo "EC2_CSPM=$EC2_CSPM" >> $GITHUB_ENV + + export EC2_KSPM=$(terraform output -raw ec2_kspm_ssh_cmd) + echo "::add-mask::$EC2_KSPM" + echo "EC2_KSPM=$EC2_KSPM" >> $GITHUB_ENV + + export EC2_CSPM_KEY=$(terraform output -raw ec2_cspm_key) + echo "::add-mask::$EC2_CSPM_KEY" + echo "EC2_CSPM_KEY=$EC2_CSPM_KEY" >> $GITHUB_ENV + + export EC2_KSPM_KEY=$(terraform output -raw ec2_kspm_key) + echo "::add-mask::$EC2_KSPM_KEY" + echo "EC2_KSPM_KEY=$EC2_KSPM_KEY" >> $GITHUB_ENV + + export KSPM_PUBLIC_IP=$(terraform output -raw ec2_kspm_public_ip) + echo "::add-mask::$KSPM_PUBLIC_IP" + echo "KSPM_PUBLIC_IP=$KSPM_PUBLIC_IP" >> $GITHUB_ENV + + export CSPM_PUBLIC_IP=$(terraform output -raw ec2_cspm_public_ip) + echo "::add-mask::$CSPM_PUBLIC_IP" + echo "CSPM_PUBLIC_IP=$CSPM_PUBLIC_IP" >> $GITHUB_ENV + + - name: Run Sanity checks + if: success() + working-directory: ./tests + env: + USE_K8S: false + run: | + poetry install + poetry run pytest -m "sanity" --alluredir=./allure/results/ --clean-alluredir --maxfail=4 + + - name: Set Docker Image version + if: ${{ ! inputs.docker-image-override }} + env: + VERSION: 'docker.elastic.co/beats/elastic-agent:${{ inputs.elk-stack-version }}' + run: | + echo "DOCKER_IMAGE=${{ env.VERSION }}" >> $GITHUB_ENV + + - name: Download Integrations data + env: + S3_BUCKET: ${{ needs.deploy.outputs.s3-bucket }} + run: | + aws s3 cp "${{ env.S3_BUCKET }}/kspm.pem" "${{ env.EC2_KSPM_KEY }}" + + - name: Upgrade KSPM Unmanaged agent + run: | + chmod 600 ${{ env.EC2_KSPM_KEY }} + # Update image + ssh -o StrictHostKeyChecking=no -v -i ${{ env.EC2_KSPM_KEY }} "ubuntu@${{ env.KSPM_PUBLIC_IP }}" "kubectl set image daemonset elastic-agent -n kube-system elastic-agent=${{ env.DOCKER_IMAGE }}" + + - name: Upgrade KSPM EKS agent + run: | + aws eks --region ${{ env.AWS_REGION }} update-kubeconfig \ + --name $(terraform output -raw deployment_name) --alias eks-config + kubectl config use-context eks-config + kubectl set image daemonset elastic-agent -n kube-system elastic-agent=${{ env.DOCKER_IMAGE }} From b69a76381adf71340195ca5c958967fe1f39444b Mon Sep 17 00:00:00 2001 From: Dmitry Gurevich <99176494+gurevichdmitry@users.noreply.github.com> Date: Wed, 18 Oct 2023 18:53:25 +0300 Subject: [PATCH 23/55] add cnvm agent upgrade --- .github/workflows/test-environment.yml | 6 + .github/workflows/upgrade-environment.yml | 9 ++ .../fleet_api/pyproject.toml | 1 + .../fleet_api/src/configuration_fleet.py | 1 + .../fleet_api/src/upgrade_cnvm.py | 122 ++++++++++++++++++ 5 files changed, 139 insertions(+) create mode 100755 deploy/test-environments/fleet_api/src/upgrade_cnvm.py diff --git a/.github/workflows/test-environment.yml b/.github/workflows/test-environment.yml index 50a6e7f4eb..fa0113d7e7 100644 --- a/.github/workflows/test-environment.yml +++ b/.github/workflows/test-environment.yml @@ -72,6 +72,9 @@ on: s3-bucket: description: "Terraform state s3 bucket folder" value: ${{ jobs.Deploy.outputs.deploy-s3-bucket }} + cnvm-stack-name: + description: "AWS CNVM integration stack name" + value: ${{ jobs.Deploy.outputs.aws-cnvm-stack-name }} env: AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} @@ -104,6 +107,7 @@ jobs: id-token: 'write' outputs: deploy-s3-bucket: ${{ steps.upload-state.outputs.s3-bucket-folder }} + aws-cnvm-stack-name: ${{ steps.upload-state.outputs.aws-cnvm-stack }} steps: - name: Check out the repo uses: actions/checkout@v4 @@ -224,7 +228,9 @@ jobs: aws s3 cp "./terraform.tfstate" "${{ env.S3_BUCKET }}/terraform.tfstate" aws s3 cp "${{ env.EC2_CSPM_KEY }}" "${{ env.S3_BUCKET }}/cspm.pem" aws s3 cp "${{ env.EC2_KSPM_KEY }}" "${{ env.S3_BUCKET }}/kspm.pem" + aws s3 cp "../cloudformation/config.json" "${{ env.S3_BUCKET }}/cnvm_config.json" echo "s3-bucket-folder=${{ env.S3_BUCKET }}" >> $GITHUB_OUTPUT + echo "aws-cnvm-stack=${{ env.CNVM_STACK_NAME }}" >> $GITHUB_OUTPUT - name: Summary if: success() diff --git a/.github/workflows/upgrade-environment.yml b/.github/workflows/upgrade-environment.yml index e14e430b47..b0bf3ece8f 100644 --- a/.github/workflows/upgrade-environment.yml +++ b/.github/workflows/upgrade-environment.yml @@ -194,6 +194,7 @@ jobs: S3_BUCKET: ${{ needs.deploy.outputs.s3-bucket }} run: | aws s3 cp "${{ env.S3_BUCKET }}/kspm.pem" "${{ env.EC2_KSPM_KEY }}" + aws s3 cp "${{ env.S3_BUCKET }}/cnvm_config.json" "${{ env.WORKING_DIR }}/fleet_api/src/cnvm_config.json" - name: Upgrade KSPM Unmanaged agent run: | @@ -207,3 +208,11 @@ jobs: --name $(terraform output -raw deployment_name) --alias eks-config kubectl config use-context eks-config kubectl set image daemonset elastic-agent -n kube-system elastic-agent=${{ env.DOCKER_IMAGE }} + + - name: Upgrade AWS CNMV agent + working-directory: ${{ env.WORKING_DIR }}/fleet_api + env: + CNVM_STACK_NAME: ${{ needs.deploy.outputs.cnvm-stack-name }} + STACK_VERSION: ${{ inputs.elk-stack-version }} + run: | + poetry run python src/upgrade_cnvm.py diff --git a/deploy/test-environments/fleet_api/pyproject.toml b/deploy/test-environments/fleet_api/pyproject.toml index 43c6c06397..64a172c060 100644 --- a/deploy/test-environments/fleet_api/pyproject.toml +++ b/deploy/test-environments/fleet_api/pyproject.toml @@ -13,6 +13,7 @@ loguru = "^0.7.0" jinja2 = "^3.1.2" ruamel-yaml = "^0.17.31" packaging = "^23.1" +boto3 = "^1.28.65" [build-system] diff --git a/deploy/test-environments/fleet_api/src/configuration_fleet.py b/deploy/test-environments/fleet_api/src/configuration_fleet.py index eec382d1bb..fff2249669 100644 --- a/deploy/test-environments/fleet_api/src/configuration_fleet.py +++ b/deploy/test-environments/fleet_api/src/configuration_fleet.py @@ -40,6 +40,7 @@ aws_config.access_key_id = os.getenv("AWS_ACCESS_KEY_ID", "NA") aws_config.secret_access_key = os.getenv("AWS_SECRET_ACCESS_KEY", "NA") aws_config.cnvm_tags = os.getenv("AWS_CNVM_TAGS", CNVM_TAGS) +aws_config.cnvm_stack_name = os.getenv("CNVM_STACK_NAME", "NA") gcp_dm_config = Munch() gcp_dm_config.deployment_name = os.getenv("DEPLOYMENT_NAME", "") diff --git a/deploy/test-environments/fleet_api/src/upgrade_cnvm.py b/deploy/test-environments/fleet_api/src/upgrade_cnvm.py new file mode 100755 index 0000000000..9cf63bb901 --- /dev/null +++ b/deploy/test-environments/fleet_api/src/upgrade_cnvm.py @@ -0,0 +1,122 @@ +#!/usr/bin/env python +""" +This script updates AWS CNVM agent. + +The following steps are performed: +1. Download the latest CNVM template. +2. Get all the required parameters. +3. Execute a CloudFormation stack update. + +Note: This script requires the configuration and dependencies provided by the 'cnfg' and 'utils' modules. + +For execution, you can create a configuration file 'cnvm_config.json' in the same directory. + +Example 'cnvm_config.json': +{ + "ENROLLMENT_TOKEN": "YourEnrollmentToken" +} + +Ensure that AWS credentials are properly configured for Boto3. + +You can also modify the 'stack_tags' variable to set custom tags for the CloudFormation stack. + +""" +from pathlib import Path +import boto3 +from munch import Munch +from loguru import logger +from utils import read_json +import configuration_fleet as cnfg +from api.common_api import ( + get_artifact_server, + get_fleet_server_host, +) +from package_policy import ( + get_package_default_url, + extract_template_url, +) + + +CNVM_JSON_PATH = Path(__file__).parent / "cnvm_config.json" + + +def update_cloudformation_stack(cfg: Munch): + """ + Update an AWS CloudFormation stack with the provided configuration. + + Args: + cnfg (Munch): A configuration object containing the following attributes: + - stack_name (str): The name of the CloudFormation stack to update. + - template (str): The URL or S3 path to the CloudFormation template. + - elastic_agent_version (str): The Elastic Agent version to set as a parameter. + - elastic_artifact_server (str): The Elastic Artifact Server URL to set as a parameter. + - enrollment_token (str): The Enrollment Token to set as a parameter. + - fleet_url (str): The Fleet URL to set as a parameter. + - stack_tags (list of dict): Tags to apply to the CloudFormation stack. + + Returns: + None + + The function performs a CloudFormation stack update using the provided configuration. + It initiates the stack update, waits for the update to complete, and logs the status. + """ + # Create a Boto3 CloudFormation client + cf_client = boto3.client("cloudformation") + + # Parameters in the format ParameterKey=Key,ParameterValue=Value + parameters = [ + {"ParameterKey": "ElasticAgentVersion", "ParameterValue": cfg.elastic_agent_version}, + {"ParameterKey": "ElasticArtifactServer", "ParameterValue": cfg.elastic_artifact_server}, + {"ParameterKey": "EnrollmentToken", "ParameterValue": cfg.enrollment_token}, + {"ParameterKey": "FleetUrl", "ParameterValue": cfg.fleet_url}, + ] + + # Capabilities + capabilities = ["CAPABILITY_NAMED_IAM"] + + # Perform the stack update with the YAML template body + response = cf_client.update_stack( + StackName=cfg.stack_name, + TemplateURL=cfg.template, + Parameters=parameters, + Capabilities=capabilities, + Tags=cfg.stack_tags, + ) + logger.info(f"Stack {response.get('StackId', 'NA')} update initiated. Waiting for update to complete...") + + # Wait until the stack update is complete + cf_client.get_waiter("stack_update_complete").wait(StackName=cfg.stack_name) + + logger.info(f"Stack {cfg.stack_name} update is complete.") + + +if __name__ == "__main__": + config = Munch() + config.stack_name = cnfg.aws_config.cnvm_stack_name + # Get template + logger.info("Get AWS CNVM template") + default_url = get_package_default_url( + cfg=cnfg.elk_config, + policy_name="vuln_mgmt", + policy_type="cloudbeat/vuln_mgmt_aws", + ) + template_url = extract_template_url(url_string=default_url) + + config.template = template_url + config.elastic_agent_version = cnfg.elk_config.stack_version + config.elastic_artifact_server = get_artifact_server(cnfg.elk_config.stack_version) + + # Tags for the CloudFormation stack + stack_tags = [ + {"Key": "division", "Value": "engineering"}, + {"Key": "org", "Value": "security"}, + {"Key": "team", "Value": "cloud-security-posture"}, + {"Key": "project", "Value": "test-environments"}, + ] + config.stack_tags = stack_tags + + # Get enrollment token + cnvm_json = read_json(CNVM_JSON_PATH) + config.enrollment_token = cnvm_json.get("ENROLLMENT_TOKEN", "") + config.fleet_url = get_fleet_server_host(cfg=cnfg.elk_config) + update_cloudformation_stack(cfg=config) From 0c53c5ae729616d1e1032275fa7877cbfb69160b Mon Sep 17 00:00:00 2001 From: Dmitry Gurevich <99176494+gurevichdmitry@users.noreply.github.com> Date: Wed, 18 Oct 2023 19:35:38 +0300 Subject: [PATCH 24/55] update file path --- .github/workflows/test-environment.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/test-environment.yml b/.github/workflows/test-environment.yml index fa0113d7e7..ac8412eedb 100644 --- a/.github/workflows/test-environment.yml +++ b/.github/workflows/test-environment.yml @@ -228,7 +228,6 @@ jobs: aws s3 cp "./terraform.tfstate" "${{ env.S3_BUCKET }}/terraform.tfstate" aws s3 cp "${{ env.EC2_CSPM_KEY }}" "${{ env.S3_BUCKET }}/cspm.pem" aws s3 cp "${{ env.EC2_KSPM_KEY }}" "${{ env.S3_BUCKET }}/kspm.pem" - aws s3 cp "../cloudformation/config.json" "${{ env.S3_BUCKET }}/cnvm_config.json" echo "s3-bucket-folder=${{ env.S3_BUCKET }}" >> $GITHUB_OUTPUT echo "aws-cnvm-stack=${{ env.CNVM_STACK_NAME }}" >> $GITHUB_OUTPUT @@ -342,6 +341,7 @@ jobs: aws s3 cp "${{ env.FLEET_API_DIR}}/kspm_d4c.yaml" "${{ env.S3_BUCKET }}/kspm_d4c.yaml" aws s3 cp "${{ env.FLEET_API_DIR}}/kspm_eks.yaml" "${{ env.S3_BUCKET }}/kspm_eks.yaml" aws s3 cp "${{ env.FLEET_API_DIR}}/cspm-linux.sh" "${{ env.S3_BUCKET }}/cspm-linux.sh" + aws s3 cp "../cloudformation/config.json" "${{ env.S3_BUCKET }}/cnvm_config.json" - name: Wait for agents to enroll id: wait-for-agents From 602a4a82d15a8fe7f7894562ed54f698e43f20b6 Mon Sep 17 00:00:00 2001 From: Dmitry Gurevich <99176494+gurevichdmitry@users.noreply.github.com> Date: Thu, 19 Oct 2023 16:42:21 +0300 Subject: [PATCH 25/55] update file path --- .github/workflows/upgrade-environment.yml | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/.github/workflows/upgrade-environment.yml b/.github/workflows/upgrade-environment.yml index b0bf3ece8f..c709c0e44d 100644 --- a/.github/workflows/upgrade-environment.yml +++ b/.github/workflows/upgrade-environment.yml @@ -24,6 +24,7 @@ env: AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} AWS_REGION: "eu-west-1" WORKING_DIR: deploy/test-environments + FLEET_API_DIR: fleet_api/src TF_VAR_stack_version: ${{ inputs.elk-stack-version }} TF_VAR_ess_region: gcp-us-west2 TF_VAR_ec_api_key: ${{ secrets.EC_API_KEY }} @@ -194,7 +195,7 @@ jobs: S3_BUCKET: ${{ needs.deploy.outputs.s3-bucket }} run: | aws s3 cp "${{ env.S3_BUCKET }}/kspm.pem" "${{ env.EC2_KSPM_KEY }}" - aws s3 cp "${{ env.S3_BUCKET }}/cnvm_config.json" "${{ env.WORKING_DIR }}/fleet_api/src/cnvm_config.json" + aws s3 cp "${{ env.S3_BUCKET }}/cnvm_config.json" "${{ env.FLEET_API_DIR }}/cnvm_config.json" - name: Upgrade KSPM Unmanaged agent run: | @@ -210,9 +211,9 @@ jobs: kubectl set image daemonset elastic-agent -n kube-system elastic-agent=${{ env.DOCKER_IMAGE }} - name: Upgrade AWS CNMV agent - working-directory: ${{ env.WORKING_DIR }}/fleet_api + working-directory: ${{ env.WORKING_DIR }}/${{ env.FLEET_API_DIR }} env: CNVM_STACK_NAME: ${{ needs.deploy.outputs.cnvm-stack-name }} STACK_VERSION: ${{ inputs.elk-stack-version }} run: | - poetry run python src/upgrade_cnvm.py + poetry run python upgrade_cnvm.py From e8a35afc907ec987a9fb9dc68c99e4fe2e2c2bc4 Mon Sep 17 00:00:00 2001 From: Dmitry Gurevich <99176494+gurevichdmitry@users.noreply.github.com> Date: Wed, 25 Oct 2023 17:27:50 +0300 Subject: [PATCH 26/55] add sanity checks after upgrade --- .github/workflows/upgrade-environment.yml | 9 +++++++++ tests/configuration.py | 1 + tests/integration/tests/test_sanity_checks.py | 20 +++++++++++++++++-- tests/pyproject.toml | 1 + 4 files changed, 29 insertions(+), 2 deletions(-) diff --git a/.github/workflows/upgrade-environment.yml b/.github/workflows/upgrade-environment.yml index c709c0e44d..d8b711d9b2 100644 --- a/.github/workflows/upgrade-environment.yml +++ b/.github/workflows/upgrade-environment.yml @@ -217,3 +217,12 @@ jobs: STACK_VERSION: ${{ inputs.elk-stack-version }} run: | poetry run python upgrade_cnvm.py + + - name: Run Upgrade Sanity checks + if: success() + working-directory: ./tests + env: + STACK_VERSION: ${{ inputs.elk-stack-version }} + run: | + poetry install + poetry run pytest -m "upgrade" --alluredir=./allure/results/ --clean-alluredir --maxfail=4 diff --git a/tests/configuration.py b/tests/configuration.py index c98d940d32..246bfae228 100644 --- a/tests/configuration.py +++ b/tests/configuration.py @@ -65,6 +65,7 @@ elasticsearch.kspm_index = os.getenv("KSPM_INDEX", FINDINGS_INDEX_PATTERN) elasticsearch.cspm_index = os.getenv("CSPM_INDEX", FINDINGS_INDEX_PATTERN) elasticsearch.cnvm_index = os.getenv("CNVM_INDEX", VULNERABILITIES_INDEX_PATTERN) +elasticsearch.stack_version = os.getenv("STACK_VERSION", "") # --- Docker environment definition docker = Munch() diff --git a/tests/integration/tests/test_sanity_checks.py b/tests/integration/tests/test_sanity_checks.py index e970534d1a..360b2ba399 100644 --- a/tests/integration/tests/test_sanity_checks.py +++ b/tests/integration/tests/test_sanity_checks.py @@ -8,11 +8,17 @@ """ import pytest from commonlib.utils import get_findings +from configuration import elasticsearch CONFIG_TIMEOUT = 120 GCP_CONFIG_TIMEOUT = 600 CNVM_CONFIG_TIMEOUT = 3600 +STACK_VERSION = elasticsearch.stack_version +# Check if STACK_VERSION is provided +if not STACK_VERSION: + raise ValueError("STACK_VERSION is not provided. Please set the STACK_VERSION in the configuration.") + tests_data = { "cis_aws": [ "cloud-compute", @@ -40,6 +46,7 @@ @pytest.mark.sanity +@pytest.mark.upgrade @pytest.mark.parametrize("match_type", tests_data["cis_k8s"]) def test_kspm_unmanaged_findings(kspm_client, match_type): """ @@ -55,7 +62,11 @@ def test_kspm_unmanaged_findings(kspm_client, match_type): Raises: AssertionError: If the resource type is missing. """ - query_list = [{"term": {"rule.benchmark.id": "cis_k8s"}}, {"term": {"resource.type": match_type}}] + query_list = [ + {"term": {"rule.benchmark.id": "cis_k8s"}}, + {"term": {"resource.type": match_type}}, + {"term": {"agent.version": STACK_VERSION}}, + ] query, sort = kspm_client.build_es_must_match_query(must_query_list=query_list, time_range="now-4h") result = get_findings(kspm_client, CONFIG_TIMEOUT, query, sort, match_type) @@ -63,6 +74,7 @@ def test_kspm_unmanaged_findings(kspm_client, match_type): @pytest.mark.sanity +@pytest.mark.upgrade @pytest.mark.parametrize("match_type", tests_data["cis_eks"]) def test_kspm_e_k_s_findings(kspm_client, match_type): """ @@ -78,7 +90,11 @@ def test_kspm_e_k_s_findings(kspm_client, match_type): Raises: AssertionError: If the resource type is missing. """ - query_list = [{"term": {"rule.benchmark.id": "cis_eks"}}, {"term": {"resource.type": match_type}}] + query_list = [ + {"term": {"rule.benchmark.id": "cis_eks"}}, + {"term": {"resource.type": match_type}}, + {"term": {"agent.version": STACK_VERSION}}, + ] query, sort = kspm_client.build_es_must_match_query(must_query_list=query_list, time_range="now-4h") results = get_findings(kspm_client, CONFIG_TIMEOUT, query, sort, match_type) diff --git a/tests/pyproject.toml b/tests/pyproject.toml index f2b20075ec..a1a1be37dd 100644 --- a/tests/pyproject.toml +++ b/tests/pyproject.toml @@ -31,6 +31,7 @@ markers = [ "pre_merge", "pre_merge_agent", "sanity", + "upgrade", # test target markers "file_system_rules", "k8s_object_rules", From c043e0a9a64eba6bfe91b3ea514acef9cf188e4a Mon Sep 17 00:00:00 2001 From: Dmitry Gurevich <99176494+gurevichdmitry@users.noreply.github.com> Date: Wed, 25 Oct 2023 18:24:47 +0300 Subject: [PATCH 27/55] update jobs dependency and add prev stack version --- .github/workflows/upgrade-environment.yml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.github/workflows/upgrade-environment.yml b/.github/workflows/upgrade-environment.yml index d8b711d9b2..ff61719715 100644 --- a/.github/workflows/upgrade-environment.yml +++ b/.github/workflows/upgrade-environment.yml @@ -82,7 +82,7 @@ jobs: secrets: inherit upgrade: runs-on: ubuntu-20.04 - needs: deploy + needs: [init, deploy] timeout-minutes: 120 defaults: run: @@ -179,6 +179,7 @@ jobs: working-directory: ./tests env: USE_K8S: false + STACK_VERSION: ${{ needs.init.outputs.stack-version }} run: | poetry install poetry run pytest -m "sanity" --alluredir=./allure/results/ --clean-alluredir --maxfail=4 @@ -223,6 +224,7 @@ jobs: working-directory: ./tests env: STACK_VERSION: ${{ inputs.elk-stack-version }} + USE_K8S: false run: | poetry install poetry run pytest -m "upgrade" --alluredir=./allure/results/ --clean-alluredir --maxfail=4 From ce53d90beccfec802a5ec650f32500ed0ae59c60 Mon Sep 17 00:00:00 2001 From: Dmitry Gurevich <99176494+gurevichdmitry@users.noreply.github.com> Date: Thu, 26 Oct 2023 23:37:02 +0300 Subject: [PATCH 28/55] add upgrade all linux agents --- .github/workflows/test-environment.yml | 2 +- .github/workflows/upgrade-environment.yml | 8 +- .../fleet_api/src/api/agent_policy_api.py | 73 ++++++++++ .../fleet_api/src/api/common_api.py | 136 +++++++++++++++++- .../fleet_api/src/install_cnvm_integration.py | 4 +- .../src/install_cspm_gcp_integration.py | 9 +- .../fleet_api/src/install_cspm_integration.py | 4 +- .../fleet_api/src/install_d4c_integration.py | 5 +- .../src/install_kspm_eks_integration.py | 4 +- .../src/install_kspm_unmanaged_integration.py | 4 +- .../fleet_api/src/package_policy.py | 2 + .../fleet_api/src/state_file_manager.py | 32 ++++- .../fleet_api/src/upgrade_agents.py | 115 +++++++++++++++ tests/integration/tests/test_sanity_checks.py | 16 ++- tests/pyproject.toml | 1 - 15 files changed, 390 insertions(+), 25 deletions(-) create mode 100755 deploy/test-environments/fleet_api/src/upgrade_agents.py diff --git a/.github/workflows/test-environment.yml b/.github/workflows/test-environment.yml index ac8412eedb..dbc6b0a4e0 100644 --- a/.github/workflows/test-environment.yml +++ b/.github/workflows/test-environment.yml @@ -341,7 +341,7 @@ jobs: aws s3 cp "${{ env.FLEET_API_DIR}}/kspm_d4c.yaml" "${{ env.S3_BUCKET }}/kspm_d4c.yaml" aws s3 cp "${{ env.FLEET_API_DIR}}/kspm_eks.yaml" "${{ env.S3_BUCKET }}/kspm_eks.yaml" aws s3 cp "${{ env.FLEET_API_DIR}}/cspm-linux.sh" "${{ env.S3_BUCKET }}/cspm-linux.sh" - aws s3 cp "../cloudformation/config.json" "${{ env.S3_BUCKET }}/cnvm_config.json" + aws s3 cp "${{ env.FLEET_API_DIR}}/state_data.json" "${{ env.S3_BUCKET }}/state_data.json" - name: Wait for agents to enroll id: wait-for-agents diff --git a/.github/workflows/upgrade-environment.yml b/.github/workflows/upgrade-environment.yml index ff61719715..31dc40cfc1 100644 --- a/.github/workflows/upgrade-environment.yml +++ b/.github/workflows/upgrade-environment.yml @@ -196,7 +196,7 @@ jobs: S3_BUCKET: ${{ needs.deploy.outputs.s3-bucket }} run: | aws s3 cp "${{ env.S3_BUCKET }}/kspm.pem" "${{ env.EC2_KSPM_KEY }}" - aws s3 cp "${{ env.S3_BUCKET }}/cnvm_config.json" "${{ env.FLEET_API_DIR }}/cnvm_config.json" + aws s3 cp "${{ env.S3_BUCKET }}/state_data.json" "${{ env.FLEET_API_DIR }}/state_data.json" - name: Upgrade KSPM Unmanaged agent run: | @@ -211,13 +211,13 @@ jobs: kubectl config use-context eks-config kubectl set image daemonset elastic-agent -n kube-system elastic-agent=${{ env.DOCKER_IMAGE }} - - name: Upgrade AWS CNMV agent + - name: Upgrade Linux agents working-directory: ${{ env.WORKING_DIR }}/${{ env.FLEET_API_DIR }} env: CNVM_STACK_NAME: ${{ needs.deploy.outputs.cnvm-stack-name }} STACK_VERSION: ${{ inputs.elk-stack-version }} run: | - poetry run python upgrade_cnvm.py + poetry run python upgrade_agents.py - name: Run Upgrade Sanity checks if: success() @@ -227,4 +227,4 @@ jobs: USE_K8S: false run: | poetry install - poetry run pytest -m "upgrade" --alluredir=./allure/results/ --clean-alluredir --maxfail=4 + poetry run pytest -m "sanity" --alluredir=./allure/results/ --clean-alluredir --maxfail=4 diff --git a/deploy/test-environments/fleet_api/src/api/agent_policy_api.py b/deploy/test-environments/fleet_api/src/api/agent_policy_api.py index 6c3a4cd00d..b4d7797cec 100644 --- a/deploy/test-environments/fleet_api/src/api/agent_policy_api.py +++ b/deploy/test-environments/fleet_api/src/api/agent_policy_api.py @@ -2,6 +2,7 @@ This module contains API calls related to the agent policy API. """ +from typing import Optional from munch import Munch, munchify from loguru import logger from api.base_call_api import APICallException, perform_api_call @@ -40,6 +41,35 @@ def create_agent_policy(cfg: Munch, json_policy: dict) -> str: raise api_ex +def update_agent_policy(cfg: Munch, policy_id, json_policy: dict): + """This function updates an agent policy + + Args: + cfg (Munch): Config object containing authentication data. + policy_id (str): Policy id to be updated. + json_policy (dict): Data for the agent policy to be updated. + + Raises: + APICallException: If the API call fails or returns a non-200 status code. + """ + # pylint: disable=duplicate-code + url = f"{cfg.kibana_url}/api/fleet/agent_policies/{policy_id}" + + try: + perform_api_call( + method="PUT", + url=url, + auth=cfg.auth, + params={"json": json_policy}, + ) + logger.info(f"Agent policy '{policy_id}' has been updated") + except APICallException as api_ex: + logger.error( + f"API call failed, status code {api_ex.status_code}. Response: {api_ex.response_text}", + ) + raise api_ex + + def delete_agent_policy(cfg: Munch, agent_policy_id: str): """This function deletes an agent policy @@ -164,3 +194,46 @@ def unenroll_agents_from_policy(cfg: Munch, agents: list): f"API call failed, status code {api_ex.status_code}. Response: {api_ex.response_text}", ) return + + +def create_agent_download_source( + cfg: Munch, + name: str, + host: str, + is_default: bool = False, +) -> Optional[str]: + """ + Create a new agent download source using the Kibana Fleet API. + + Args: + cfg (Munch): Configuration object containing Kibana URL and authentication details. + name (str): The name of the agent download source. + host (str): The host URL where agents will download packages from. + is_default (bool, optional): Whether this source should be the default. Default is False. + + Returns: + str: The ID of the newly created agent download source, + or None if the ID cannot be retrieved. + """ + # pylint: disable=duplicate-code + url = f"{cfg.kibana_url}/api/fleet/agent_download_sources" + json_data = { + "name": name, + "host": host, + "is_default": is_default, + } + + try: + response = perform_api_call( + method="POST", + url=url, + auth=cfg.auth, + params={"json": json_data}, + ) + source_id = response.get("item", {}).get("id") + return source_id + except APICallException as api_ex: + logger.error( + f"API call failed, status code {api_ex.status_code}. Response: {api_ex.response_text}", + ) + return None diff --git a/deploy/test-environments/fleet_api/src/api/common_api.py b/deploy/test-environments/fleet_api/src/api/common_api.py index a74b031280..bc13d926c7 100644 --- a/deploy/test-environments/fleet_api/src/api/common_api.py +++ b/deploy/test-environments/fleet_api/src/api/common_api.py @@ -1,8 +1,9 @@ """ This module contains API calls related to Fleet settings """ +import time import codecs -from typing import Dict, Any +from typing import Dict, Any, List from munch import Munch, munchify from loguru import logger from api.base_call_api import APICallException, perform_api_call @@ -13,6 +14,7 @@ ) AGENT_ARTIFACT_SUFFIX = "/downloads/beats/elastic-agent" +AGENT_ARTIFACT_SUFFIX_SHORT = "/downloads/" STAGING_ARTIFACTORY_URL = "https://staging.elastic.co/" SNAPSHOT_ARTIFACTORY_URL = "https://snapshots.elastic.co/" @@ -188,26 +190,31 @@ def get_build_info(version: str) -> str: return "" -def get_artifact_server(version: str) -> str: +def get_artifact_server(version: str, is_short_url: bool = False) -> str: """ - Retrieve the artifact server for a specific version of Elastic. + Retrieve the artifact server URL for a specific version of Elastic. Args: - version (str): The version of Elastic. + elastic_version (str): The version of Elastic. + is_snapshot_url (bool, optional): Indicates whether to use the short artifact URL. + Defaults to False. Returns: - str: The artifact server of the specified version. + str: The artifact server URL for the specified Elastic version. Raises: APICallException: If the API call to retrieve the artifact server fails. """ - if is_snapshot(version): url = SNAPSHOT_ARTIFACTORY_URL else: url = STAGING_ARTIFACTORY_URL - return url + get_build_info(version) + AGENT_ARTIFACT_SUFFIX + artifacts_suffix = AGENT_ARTIFACT_SUFFIX + if is_short_url: + artifacts_suffix = AGENT_ARTIFACT_SUFFIX_SHORT + + return url + get_build_info(version) + artifacts_suffix def is_snapshot(version: str) -> bool: @@ -368,3 +375,118 @@ def update_package_version(cfg: Munch, package_name: str, package_version: str): logger.error( f"API call failed, status code {api_ex.status_code}. Response: {api_ex.response_text}", ) + + +def bulk_upgrade_agents(cfg: Munch, agent_ids: List[str], version: str, source_uri: str) -> str: + """ + Upgrade a list of agents to a specified version using the Kibana API. + + Args: + cfg (Munch): Configuration object containing Kibana URL and authentication details. + agent_ids (List[str]): List of agent IDs to upgrade. + version (str): The version to upgrade to. + source_uri (str): The source URI for the agent package. + + Returns: + str: The action ID of the upgrade. + + Raises: + APICallException: If the API call fails with a non-200 status code. + """ + # pylint: disable=duplicate-code + url = f"{cfg.kibana_url}/api/fleet/agents/bulk_upgrade" + json_data = { + "agents": agent_ids, + "version": version, + "source_uri": source_uri, + } + + try: + response = perform_api_call( + method="POST", + url=url, + auth=cfg.auth, + params={"json": json_data}, + ) + action_id = response.get("actionId") + if not action_id: + raise APICallException( + response.status_code, + "API response did not include an actionId", + ) + logger.info(f"Agents '{agent_ids}' upgrade to version '{version}' is started") + return action_id + except APICallException as api_ex: + logger.error( + f"API call failed, status code {api_ex.status_code}. Response: {api_ex.response_text}", + ) + raise APICallException(api_ex.status_code, api_ex.response_text) from api_ex + + +def get_action_status(cfg: Munch) -> List[dict]: + """ + Retrieve action status for agents using the Kibana API. + + Args: + cfg (Munch): Configuration object containing Kibana URL and authentication details. + + Returns: + List[dict]: A list of action status items. + + Raises: + APICallException: If the API call fails with a non-200 status code. + """ + url = f"{cfg.kibana_url}/api/fleet/agents/action_status" + + try: + response = perform_api_call( + method="GET", + url=url, + auth=cfg.auth, + ) + return response.get("items", []) + except APICallException as api_ex: + logger.error( + f"API call failed, status code {api_ex.status_code}. Response: {api_ex.response_text}", + ) + raise APICallException(api_ex.status_code, api_ex.response_text) from api_ex + + +def wait_for_action_status( + cfg: Munch, + target_action_id: str, + target_type: str, + target_status: str, + timeout_secs: int = 600, +): + """ + Wait for a specific action status to match the target criteria. + + Args: + cfg (Munch): Configuration object containing Kibana URL and authentication details. + target_action_id (str): The action ID to match. + target_type (str): The target action type to match. + target_status (str): The target status to match. + timeout_secs (int): Maximum time to wait in seconds (default is 600 seconds). + + Returns: + bool: True if the target criteria is met, False if the timeout is reached. + + Raises: + APICallException: If the API call fails with a non-200 status code. + """ + start_time = time.time() + while True: + action_status = get_action_status(cfg) + for item in action_status: + if ( + item.get("actionId") == target_action_id + and item.get("type") == target_type + and item.get("status") == target_status + ): + return True # Found the target criteria + + if time.time() - start_time >= timeout_secs: + return False # Timeout reached + + time.sleep(1) # Fixed sleep interval of 1 second diff --git a/deploy/test-environments/fleet_api/src/install_cnvm_integration.py b/deploy/test-environments/fleet_api/src/install_cnvm_integration.py index 13c31a2f05..036afcaa22 100644 --- a/deploy/test-environments/fleet_api/src/install_cnvm_integration.py +++ b/deploy/test-environments/fleet_api/src/install_cnvm_integration.py @@ -22,7 +22,7 @@ get_cnvm_template, ) from loguru import logger -from state_file_manager import state_manager, PolicyState +from state_file_manager import state_manager, PolicyState, HostType from package_policy import ( version_compatible, VERSION_MAP, @@ -86,6 +86,8 @@ package_policy_id, CNVM_EXPECTED_AGENTS, CNVM_AGENT_TAGS, + HostType.LINUX_TAR.value, + INTEGRATION_INPUT["name"], ), ) diff --git a/deploy/test-environments/fleet_api/src/install_cspm_gcp_integration.py b/deploy/test-environments/fleet_api/src/install_cspm_gcp_integration.py index eb0a30c6dd..a5be411cf5 100755 --- a/deploy/test-environments/fleet_api/src/install_cspm_gcp_integration.py +++ b/deploy/test-environments/fleet_api/src/install_cspm_gcp_integration.py @@ -11,6 +11,7 @@ import json from pathlib import Path from munch import Munch +from packaging import version import configuration_fleet as cnfg from api.agent_policy_api import create_agent_policy from api.package_policy_api import create_cspm_integration @@ -22,7 +23,7 @@ update_package_version, ) from loguru import logger -from state_file_manager import state_manager, PolicyState +from state_file_manager import state_manager, PolicyState, HostType from package_policy import ( load_data, version_compatible, @@ -63,6 +64,10 @@ package_name="cloud_security_posture", package_version=package_version, ) + if version.parse(package_version) >= version.parse("1.6"): + INTEGRATION_INPUT["vars"] = { + "gcp.account_type": "single-account", + } logger.info(f"Starting installation of {INTEGRATION_NAME} integration.") agent_data, package_data = load_data( cfg=cnfg.elk_config, @@ -87,6 +92,8 @@ package_policy_id, CSPM_GCP_EXPECTED_AGENTS, [], + HostType.LINUX_TAR.value, + INTEGRATION_INPUT["name"], ), ) diff --git a/deploy/test-environments/fleet_api/src/install_cspm_integration.py b/deploy/test-environments/fleet_api/src/install_cspm_integration.py index e9f82d832f..4f8a29b3cc 100755 --- a/deploy/test-environments/fleet_api/src/install_cspm_integration.py +++ b/deploy/test-environments/fleet_api/src/install_cspm_integration.py @@ -22,7 +22,7 @@ ) from loguru import logger from utils import render_template -from state_file_manager import state_manager, PolicyState +from state_file_manager import state_manager, PolicyState, HostType from package_policy import ( load_data, version_compatible, @@ -97,6 +97,8 @@ package_policy_id, CSPM_EXPECTED_AGENTS, [], + HostType.LINUX_TAR.value, + INTEGRATION_INPUT["name"], ), ) diff --git a/deploy/test-environments/fleet_api/src/install_d4c_integration.py b/deploy/test-environments/fleet_api/src/install_d4c_integration.py index e97c4186c7..d4c62d6248 100755 --- a/deploy/test-environments/fleet_api/src/install_d4c_integration.py +++ b/deploy/test-environments/fleet_api/src/install_d4c_integration.py @@ -23,10 +23,11 @@ ) from loguru import logger from utils import read_json -from state_file_manager import state_manager, PolicyState +from state_file_manager import state_manager, PolicyState, HostType D4C_AGENT_POLICY = "../../../cloud/data/agent_policy_d4c.json" D4C_PACKAGE_POLICY = "../../../cloud/data/package_policy_d4c.json" +D4C_AGENT_POLICY_NAME = "tf-ap-d4c" D4C_EXPECTED_AGENTS = 2 INTEGRATAION_NAME = "D4C" @@ -78,6 +79,8 @@ def load_data() -> Tuple[Dict, Dict]: package_policy_id, D4C_EXPECTED_AGENTS, [], + HostType.KUBERNETES.value, + D4C_AGENT_POLICY_NAME, ), ) diff --git a/deploy/test-environments/fleet_api/src/install_kspm_eks_integration.py b/deploy/test-environments/fleet_api/src/install_kspm_eks_integration.py index 3a144926a6..7e20223307 100755 --- a/deploy/test-environments/fleet_api/src/install_kspm_eks_integration.py +++ b/deploy/test-environments/fleet_api/src/install_kspm_eks_integration.py @@ -21,7 +21,7 @@ update_package_version, ) from loguru import logger -from state_file_manager import state_manager, PolicyState +from state_file_manager import state_manager, PolicyState, HostType from package_policy import ( load_data, version_compatible, @@ -98,6 +98,8 @@ package_policy_id, KSPM_EKS_EXPECTED_AGENTS, [], + HostType.KUBERNETES.value, + INTEGRATION_INPUT["name"], ), ) diff --git a/deploy/test-environments/fleet_api/src/install_kspm_unmanaged_integration.py b/deploy/test-environments/fleet_api/src/install_kspm_unmanaged_integration.py index 3c7a57fa1c..b5bbf94f04 100755 --- a/deploy/test-environments/fleet_api/src/install_kspm_unmanaged_integration.py +++ b/deploy/test-environments/fleet_api/src/install_kspm_unmanaged_integration.py @@ -21,7 +21,7 @@ update_package_version, ) from loguru import logger -from state_file_manager import state_manager, PolicyState +from state_file_manager import state_manager, PolicyState, HostType from package_policy import ( load_data, version_compatible, @@ -84,6 +84,8 @@ package_policy_id, KSPM_UNMANAGED_EXPECTED_AGENTS, [], + HostType.KUBERNETES.value, + INTEGRATION_INPUT["name"], ), ) diff --git a/deploy/test-environments/fleet_api/src/package_policy.py b/deploy/test-environments/fleet_api/src/package_policy.py index fb0effee79..806673a722 100644 --- a/deploy/test-environments/fleet_api/src/package_policy.py +++ b/deploy/test-environments/fleet_api/src/package_policy.py @@ -170,6 +170,8 @@ def generate_package_policy(template: dict, policy_input: dict) -> dict: for input_name, data in package_policy["inputs"].items(): if integration_key in input_name: update_input(data, policy_input) + if "vars" in policy_input and "vars" not in data["streams"]["cloud_security_posture.findings"]: + data["streams"]["cloud_security_posture.findings"]["vars"] = policy_input["vars"] package_policy["vars"]["posture"] = policy_input.get("posture", "") package_policy["vars"]["deployment"] = policy_input.get("deployment", "") package_policy["name"] = policy_input.get("name", "") diff --git a/deploy/test-environments/fleet_api/src/state_file_manager.py b/deploy/test-environments/fleet_api/src/state_file_manager.py index a67f8f3b16..1a8ab6b351 100644 --- a/deploy/test-environments/fleet_api/src/state_file_manager.py +++ b/deploy/test-environments/fleet_api/src/state_file_manager.py @@ -4,12 +4,29 @@ """ import json from pathlib import Path +from enum import Enum from utils import delete_file from loguru import logger __state_file = Path(__file__).parent / "state_data.json" +class HostType(Enum): + """ + Enumeration representing different host types for deployment. + + The `HostType` enumeration defines constants for various host types, + such as Kubernetes or Linux-based deployments. + + Attributes: + KUBERNETES (str): Represents a Kubernetes-based deployment. + LINUX_TAR (str): Represents a Linux-based deployment using TAR archives. + """ + + KUBERNETES = "kubernetes" + LINUX_TAR = "linux" + + class PolicyStateEncoder(json.JSONEncoder): """ Custom JSON encoder for PolicyState objects. @@ -27,17 +44,30 @@ class PolicyState: Class to represent a policy state. """ - def __init__(self, agnt_policy_id: str, pkg_policy_id: str, expected_agents: int, expected_tags: list[str]): + def __init__( + self, + agnt_policy_id: str, + pkg_policy_id: str, + expected_agents: int, + expected_tags: list[str], + host_type: HostType, + integration_name: str, + ): """ Args: agnt_policy_id (str): ID of the agent policy. pkg_policy_id (str): ID of the package policy. expected_agents (int): Expected number of deployed agents. + expected_tags: (list(int)): List of expected tags count. + host_type (HostType): Deployment host type + integration_name (str): Name of installed integration """ self.agnt_policy_id = agnt_policy_id self.pkg_policy_id = pkg_policy_id self.expected_agents = expected_agents self.expected_tags = expected_tags + self.host_type = host_type + self.integration_name = integration_name class StateFileManager: diff --git a/deploy/test-environments/fleet_api/src/upgrade_agents.py b/deploy/test-environments/fleet_api/src/upgrade_agents.py new file mode 100755 index 0000000000..7b5ee4cae8 --- /dev/null +++ b/deploy/test-environments/fleet_api/src/upgrade_agents.py @@ -0,0 +1,115 @@ +#!/usr/bin/env python +""" +This script upgrades Linux-based agents. + +The following steps are performed: +1. Generate a custom agent binary download URL. +2. Update all Linux-based agent policies with the custom download URL. +3. Execute a bulk upgrade process for all agents. +4. Wait until all agent upgrades are complete. + +Note: This script requires a 'state_data.json' file to identify all Linux agents to be updated. + +For execution, create a configuration file 'cnvm_config.json' in the same directory. + +Example 'state_data.json': +{ + "policies": [ + { + "agnt_policy_id": "c3a6d9d0-6b58-11ee-8fd8-b709d88b5892", + "pkg_policy_id": "226965a4-e07a-4ddd-a64d-765ddd9946e5", + "expected_agents": 1, + "expected_tags": [ + "cft_version:cft_version", + "cft_arn:arn:aws:cloudformation:.*" + ], + "type": "linux", + "integration_name": "cnvm-int" + } + ] +} +""" + +import sys +from pathlib import Path +from loguru import logger +import configuration_fleet as cnfg +from api.agent_policy_api import ( + create_agent_download_source, + get_agents, + update_agent_policy, +) +from api.common_api import ( + get_artifact_server, + bulk_upgrade_agents, + wait_for_action_status, +) +from state_file_manager import state_manager, HostType + +STATE_DATA_PATH = Path(__file__).parent / "state_data.json" + + +def create_custom_agent_download_source() -> str: + """Create a custom agent download source and return its ID.""" + host_url = get_artifact_server(version=cnfg.elk_config.stack_version, is_short_url=True) + download_source_id = create_agent_download_source( + cfg=cnfg.elk_config, + name="custom_source", + host=host_url, + ) + return download_source_id + + +def update_linux_policies(download_source_id: str): + """Update all Linux-based agent policies with the custom download source.""" + state_policies = state_manager.get_policies() + linux_policies_list = [] + + for policy in state_policies: + if policy.host_type == HostType.LINUX_TAR.value: + linux_policies_list.append(policy.agnt_policy_id) + update_agent_policy( + cfg=cnfg.elk_config, + policy_id=policy.agnt_policy_id, + json_policy={ + "name": policy.integration_name, + "namespace": "default", + "download_source_id": download_source_id, + }, + ) + + return linux_policies_list + + +def main(): + """ + Main linux agents upgrade flow + """ + download_source_id = create_custom_agent_download_source() + + if not download_source_id: + logger.error("Failed to create the agent download source.") + sys.exit(1) + + linux_policies_list = update_linux_policies(download_source_id) + + agents = get_agents(cfg=cnfg.elk_config) + linux_agent_ids = [agent.id for agent in agents if agent.policy_id in linux_policies_list] + + action_id = bulk_upgrade_agents( + cfg=cnfg.elk_config, + agent_ids=linux_agent_ids, + version=cnfg.elk_config.stack_version, + source_uri=get_artifact_server(version=cnfg.elk_config.stack_version), + ) + + wait_for_action_status( + cfg=cnfg.elk_config, + target_action_id=action_id, + target_type="UPGRADE", + target_status="COMPLETE", + ) + + +if __name__ == "__main__": + main() diff --git a/tests/integration/tests/test_sanity_checks.py b/tests/integration/tests/test_sanity_checks.py index 360b2ba399..ebfae64ea8 100644 --- a/tests/integration/tests/test_sanity_checks.py +++ b/tests/integration/tests/test_sanity_checks.py @@ -46,7 +46,6 @@ @pytest.mark.sanity -@pytest.mark.upgrade @pytest.mark.parametrize("match_type", tests_data["cis_k8s"]) def test_kspm_unmanaged_findings(kspm_client, match_type): """ @@ -74,7 +73,6 @@ def test_kspm_unmanaged_findings(kspm_client, match_type): @pytest.mark.sanity -@pytest.mark.upgrade @pytest.mark.parametrize("match_type", tests_data["cis_eks"]) def test_kspm_e_k_s_findings(kspm_client, match_type): """ @@ -117,7 +115,11 @@ def test_cspm_findings(cspm_client, match_type): Raises: AssertionError: If the resource type is missing. """ - query_list = [{"term": {"rule.benchmark.id": "cis_aws"}}, {"term": {"resource.type": match_type}}] + query_list = [ + {"term": {"rule.benchmark.id": "cis_aws"}}, + {"term": {"resource.type": match_type}}, + {"term": {"agent.version": STACK_VERSION}}, + ] query, sort = cspm_client.build_es_must_match_query(must_query_list=query_list, time_range="now-24h") results = get_findings(cspm_client, CONFIG_TIMEOUT, query, sort, match_type) @@ -140,7 +142,7 @@ def test_cnvm_findings(cnvm_client, match_type): Raises: AssertionError: If the resource type is missing. """ - query_list = [] + query_list = [{"term": {"agent.version": STACK_VERSION}}] query, sort = cnvm_client.build_es_must_match_query(must_query_list=query_list, time_range="now-24h") results = get_findings(cnvm_client, CNVM_CONFIG_TIMEOUT, query, sort, match_type) assert len(results) > 0, f"The resource type '{match_type}' is missing" @@ -162,7 +164,11 @@ def test_cspm_gcp_findings(cspm_client, match_type): Raises: AssertionError: If the resource type is missing. """ - query_list = [{"term": {"rule.benchmark.id": "cis_gcp"}}, {"term": {"resource.type": match_type}}] + query_list = [ + {"term": {"rule.benchmark.id": "cis_gcp"}}, + {"term": {"resource.type": match_type}}, + {"term": {"agent.version": STACK_VERSION}}, + ] query, sort = cspm_client.build_es_must_match_query(must_query_list=query_list, time_range="now-24h") results = get_findings(cspm_client, GCP_CONFIG_TIMEOUT, query, sort, match_type) diff --git a/tests/pyproject.toml b/tests/pyproject.toml index a1a1be37dd..f2b20075ec 100644 --- a/tests/pyproject.toml +++ b/tests/pyproject.toml @@ -31,7 +31,6 @@ markers = [ "pre_merge", "pre_merge_agent", "sanity", - "upgrade", # test target markers "file_system_rules", "k8s_object_rules", From 65d65cc223f49f1b272f64aae6bcf80792140c4e Mon Sep 17 00:00:00 2001 From: Dmitry Gurevich <99176494+gurevichdmitry@users.noreply.github.com> Date: Sun, 29 Oct 2023 19:55:16 +0200 Subject: [PATCH 29/55] add logging to upgrade agents events and waiter for packages upgrade --- .../fleet_api/src/api/agent_policy_api.py | 4 +- .../fleet_api/src/api/common_api.py | 16 ++--- .../fleet_api/src/api/package_policy_api.py | 33 ++++++++++ .../fleet_api/src/upgrade_agents.py | 64 +++++++++++++++++++ 4 files changed, 108 insertions(+), 9 deletions(-) diff --git a/deploy/test-environments/fleet_api/src/api/agent_policy_api.py b/deploy/test-environments/fleet_api/src/api/agent_policy_api.py index b4d7797cec..33740bbea7 100644 --- a/deploy/test-environments/fleet_api/src/api/agent_policy_api.py +++ b/deploy/test-environments/fleet_api/src/api/agent_policy_api.py @@ -62,7 +62,9 @@ def update_agent_policy(cfg: Munch, policy_id, json_policy: dict): auth=cfg.auth, params={"json": json_policy}, ) - logger.info(f"Agent policy '{policy_id}' has been updated") + logger.info( + f"Agent policy '{policy_id}' for integration '{json_policy.get('name', '')}' has been updated", + ) except APICallException as api_ex: logger.error( f"API call failed, status code {api_ex.status_code}. Response: {api_ex.response_text}", diff --git a/deploy/test-environments/fleet_api/src/api/common_api.py b/deploy/test-environments/fleet_api/src/api/common_api.py index bc13d926c7..0798e6b9c2 100644 --- a/deploy/test-environments/fleet_api/src/api/common_api.py +++ b/deploy/test-environments/fleet_api/src/api/common_api.py @@ -400,7 +400,7 @@ def bulk_upgrade_agents(cfg: Munch, agent_ids: List[str], version: str, source_u "version": version, "source_uri": source_uri, } - + logger.info(f"Source URI: {source_uri}") try: response = perform_api_call( method="POST", @@ -415,6 +415,7 @@ def bulk_upgrade_agents(cfg: Munch, agent_ids: List[str], version: str, source_u "API response did not include an actionId", ) logger.info(f"Agents '{agent_ids}' upgrade to version '{version}' is started") + logger.info(f"Action status id: {action_id}") return action_id except APICallException as api_ex: logger.error( @@ -479,14 +480,13 @@ def wait_for_action_status( while True: action_status = get_action_status(cfg) for item in action_status: - if ( - item.get("actionId") == target_action_id - and item.get("type") == target_type - and item.get("status") == target_status - ): - return True # Found the target criteria + if item.get("actionId") == target_action_id: + logger.info(f"Type: {item.get('type')}, Status: {item.get('status')}") + if item.get("type") == target_type and item.get("status") == target_status: + return True # Found the target criteria if time.time() - start_time >= timeout_secs: + logger.error(f"Agent upgrade process reached a timeout of {timeout_secs} seconds.") return False # Timeout reached - time.sleep(1) # Fixed sleep interval of 1 second + time.sleep(2) # Fixed sleep interval of 1 second diff --git a/deploy/test-environments/fleet_api/src/api/package_policy_api.py b/deploy/test-environments/fleet_api/src/api/package_policy_api.py index a6d915c441..7c0806dea9 100644 --- a/deploy/test-environments/fleet_api/src/api/package_policy_api.py +++ b/deploy/test-environments/fleet_api/src/api/package_policy_api.py @@ -163,3 +163,36 @@ def delete_package_policy(cfg: Munch, policy_ids: list): logger.error( f"API call failed, status code {api_ex.status_code}. Response: {api_ex.response_text}", ) + + +def get_package_policy_by_id(cfg: Munch, policy_id: str) -> dict: + """ + Retrieve package policy information by its ID. + + Args: + cfg (Munch): A configuration object containing Kibana URL, authentication details, etc. + policy_id (str): The package policy ID to retrieve. + + Returns: + dict: A dictionary containing the package policy information, + or an empty dictionary if not found. + + Raises: + APICallException: If the API call to retrieve the package policy fails. + """ + # pylint: disable=duplicate-code + url = f"{cfg.kibana_url}/api/fleet/package_policies/{policy_id}" + + try: + response = perform_api_call( + method="GET", + url=url, + auth=cfg.auth, + ) + + return response.get("item", {}) + except APICallException as api_ex: + logger.error( + f"API call failed, status code {api_ex.status_code}. Response: {api_ex.response_text}", + ) + return {} diff --git a/deploy/test-environments/fleet_api/src/upgrade_agents.py b/deploy/test-environments/fleet_api/src/upgrade_agents.py index 7b5ee4cae8..270cd7ea96 100755 --- a/deploy/test-environments/fleet_api/src/upgrade_agents.py +++ b/deploy/test-environments/fleet_api/src/upgrade_agents.py @@ -31,6 +31,7 @@ """ import sys +import time from pathlib import Path from loguru import logger import configuration_fleet as cnfg @@ -43,7 +44,9 @@ get_artifact_server, bulk_upgrade_agents, wait_for_action_status, + get_package_version, ) +from api.package_policy_api import get_package_policy_by_id from state_file_manager import state_manager, HostType STATE_DATA_PATH = Path(__file__).parent / "state_data.json" @@ -57,6 +60,7 @@ def create_custom_agent_download_source() -> str: name="custom_source", host=host_url, ) + logger.info(f"Download source id '{download_source_id}' is created") return download_source_id @@ -81,10 +85,70 @@ def update_linux_policies(download_source_id: str): return linux_policies_list +def wait_for_packages_upgrade(): + """ + This function waits until all packages version is upgraded. + """ + desired_version = get_package_version(cfg=cnfg.elk_config) + policies = state_manager.get_policies() + for policy in policies: + if policy.integration_name == "tf-ap-d4c": + continue + if not wait_for_package_policy_version( + cfg=cnfg.elk_config, + policy_id=policy.pkg_policy_id, + desired_version=desired_version, + ): + logger.error(f"Integration {policy.integration_name} failed to upgrade.") + sys.exit(1) + + +def wait_for_package_policy_version( + cfg, + policy_id, + desired_version, + timeout_secs=300, + poll_interval_secs=10, +): + """ + Wait for a package policy to reach the desired version with a timeout. + + Args: + cfg (Munch): A configuration object containing Kibana URL, authentication details, etc. + policy_id (str): The package policy ID to monitor. + desired_version (str): The desired version to wait for. + timeout_secs (int, optional): Maximum time to wait in seconds. Default is 300 seconds. + poll_interval_secs (int, optional): Time to wait between polling for the package version. + Default is 10 seconds. + + Returns: + bool: True if the package policy reaches the desired version within the timeout, + False otherwise. + """ + start_time = time.time() + + while time.time() - start_time < timeout_secs: + policy_info = get_package_policy_by_id(cfg, policy_id) + policy_name = policy_info.get("name", "") + policy_version = policy_info.get("package", {}).get("version", "") + logger.info( + f"Integration: {policy_name}, current version: {policy_version}, desired version: {desired_version}", + ) + if policy_version == desired_version: + return True # Desired version reached + + time.sleep(poll_interval_secs) # Wait and poll again + + return False # Desired version not reached within the timeout + + def main(): """ Main linux agents upgrade flow """ + # Ensure that all packages are on the latest version + wait_for_packages_upgrade() + download_source_id = create_custom_agent_download_source() if not download_source_id: From dd5a57f7f15587587617c01812d60ac4a6f6a4ae Mon Sep 17 00:00:00 2001 From: Dmitry Gurevich <99176494+gurevichdmitry@users.noreply.github.com> Date: Sun, 29 Oct 2023 21:37:01 +0200 Subject: [PATCH 30/55] change logic to update agents one by one --- .../fleet_api/src/upgrade_agents.py | 30 ++++++++++--------- 1 file changed, 16 insertions(+), 14 deletions(-) diff --git a/deploy/test-environments/fleet_api/src/upgrade_agents.py b/deploy/test-environments/fleet_api/src/upgrade_agents.py index 270cd7ea96..558e7b165f 100755 --- a/deploy/test-environments/fleet_api/src/upgrade_agents.py +++ b/deploy/test-environments/fleet_api/src/upgrade_agents.py @@ -156,23 +156,25 @@ def main(): sys.exit(1) linux_policies_list = update_linux_policies(download_source_id) - + time.sleep(10) # To ensure that policies updated agents = get_agents(cfg=cnfg.elk_config) linux_agent_ids = [agent.id for agent in agents if agent.policy_id in linux_policies_list] + for agent_id in linux_agent_ids: + action_id = bulk_upgrade_agents( + cfg=cnfg.elk_config, + agent_ids=agent_id, + version=cnfg.elk_config.stack_version, + source_uri=get_artifact_server(version=cnfg.elk_config.stack_version), + ) - action_id = bulk_upgrade_agents( - cfg=cnfg.elk_config, - agent_ids=linux_agent_ids, - version=cnfg.elk_config.stack_version, - source_uri=get_artifact_server(version=cnfg.elk_config.stack_version), - ) - - wait_for_action_status( - cfg=cnfg.elk_config, - target_action_id=action_id, - target_type="UPGRADE", - target_status="COMPLETE", - ) + if not wait_for_action_status( + cfg=cnfg.elk_config, + target_action_id=action_id, + target_type="UPGRADE", + target_status="COMPLETE", + ): + sys.exit(1) + logger.info(f"Agent {agent_id} upgrade is finished") if __name__ == "__main__": From b301999dd975019c97bfe2e5e2e6e78d5ba3865d Mon Sep 17 00:00:00 2001 From: Dmitry Gurevich <99176494+gurevichdmitry@users.noreply.github.com> Date: Mon, 30 Oct 2023 15:30:54 +0200 Subject: [PATCH 31/55] replace raise error by logging warning message --- tests/integration/tests/test_sanity_checks.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tests/integration/tests/test_sanity_checks.py b/tests/integration/tests/test_sanity_checks.py index ebfae64ea8..5d2ee509aa 100644 --- a/tests/integration/tests/test_sanity_checks.py +++ b/tests/integration/tests/test_sanity_checks.py @@ -9,6 +9,7 @@ import pytest from commonlib.utils import get_findings from configuration import elasticsearch +from loguru import logger CONFIG_TIMEOUT = 120 GCP_CONFIG_TIMEOUT = 600 @@ -17,7 +18,7 @@ STACK_VERSION = elasticsearch.stack_version # Check if STACK_VERSION is provided if not STACK_VERSION: - raise ValueError("STACK_VERSION is not provided. Please set the STACK_VERSION in the configuration.") + logger.warning("STACK_VERSION is not provided. Please set the STACK_VERSION in the configuration.") tests_data = { "cis_aws": [ From 7642fb96d93c8a222257dffc123cd8a16d3ca179 Mon Sep 17 00:00:00 2001 From: Dmitry Gurevich <99176494+gurevichdmitry@users.noreply.github.com> Date: Wed, 1 Nov 2023 14:59:28 +0200 Subject: [PATCH 32/55] adding readme file --- dev-docs/Cloud-Env-Upgrade.md | 54 +++++++++++++++++++++++++++++++++++ 1 file changed, 54 insertions(+) create mode 100644 dev-docs/Cloud-Env-Upgrade.md diff --git a/dev-docs/Cloud-Env-Upgrade.md b/dev-docs/Cloud-Env-Upgrade.md new file mode 100644 index 0000000000..59d211dc78 --- /dev/null +++ b/dev-docs/Cloud-Env-Upgrade.md @@ -0,0 +1,54 @@ +# Cloud Environment Upgrade Testing + +The [`Test Upgrade Environment`](https://github.com/elastic/cloudbeat/actions/workflows/upgrade-environment.yml) GitHub action automates the process of deploying a fully-featured cloud environment, pre-configured with all integrations (KSPM, CSPM, and D4C). +It also facilitates the upgrade of the environment to a new version of the ELK stack and all installed agents, along with performing findings retrieval checks. + + +## How to Run the Workflow + +Follow these steps to run the workflow: + +1. Go to [`Actions > Test Upgrade Environment`](https://github.com/elastic/cloudbeat/actions/workflows/upgrade-environment.yml). + + ![Navigate to Actions](https://github.com/elastic/cloudbeat/assets/99176494/2686668f-7be6-4b55-a37b-e37426c1a0e1) + +2. Click the `Run workflow` button. + + ![Run Workflow](https://github.com/elastic/cloudbeat/assets/99176494/902efe40-ed1b-4175-92a6-504439eb9e3d) + +3. Complete the required parameters: + + - **`deployment_name`**: Name your environment (Allowed characters: a-z0-9 and `-`). For + instance: `john-8-11-0-nov1`. + + - **`elk-stack-version`**: Specify the version of Elastic Cloud stack, either a SNAPSHOT or a build candidate (BC) + version. Check the available versions [here](https://artifacts-staging.elastic.co/dra-info/index.html). + For BC, enter only the version without additions/commit sha, e.g. `8.11.0`. + For SNAPSHOT, enter the full version, e.g. `8.12.0-SNAPSHOT`. + + ![Required Parameters](https://github.com/elastic/cloudbeat/assets/99176494/a50141d7-7554-4761-a737-e0f23f0b0492) + +4. Optionally, modify other parameters if required: + + - **`docker-image-override`** (**optional**): Use this to replace the default Docker image for build candidate (BC) or + SNAPSHOT versions. + Provide the full image path. Leave this field blank for snapshot versions. Follow this format for the image + path: `docker.elastic.co/cloud-release/elastic-agent-cloud:8.11.0-cb971279`. If you're not sure where to get this + image path from, look for message like [this](https://elastic.slack.com/archives/C0JFN9HJL/p1698263174847419) in + #mission-control channel, you can see it specify the stack version and the BC commit sha in the first line, + e.g. `elastic / unified-release - staging # 8.11 - 10 - 8.9.0-cb971279`. Now just copy it + and replace it the image path: `docker.elastic.co/cloud-release/elastic-agent-cloud:8.11.0-cb971279`. + + ![Optional Parameters](https://github.com/elastic/cloudbeat/assets/99176494/5b7f15bd-6f56-4eb0-b7d6-fc6a7656ffb0) + +## Tracking Workflow Execution + +Tracking workflow execution follows the same steps as defined in the [Create Environment](./Cloud-Env-Testing.md#tracking-workflow-execution) + +## Logging into the Environment + +Logging into the environment can be done following the steps detailed in the [Create Environment](./Cloud-Env-Testing.md#logging-into-the-environment) + +## Cleanup Procedure + +The cleanup procedure is also described in the [Create Environment](./Cloud-Env-Testing.md#cleanup-procedure) From 6ea8e0f468c52ab2636dbe128ba443b7cdf584c7 Mon Sep 17 00:00:00 2001 From: Dmitry Gurevich <99176494+gurevichdmitry@users.noreply.github.com> Date: Thu, 2 Nov 2023 15:49:55 +0200 Subject: [PATCH 33/55] update sleep timeout --- deploy/test-environments/fleet_api/src/upgrade_agents.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/deploy/test-environments/fleet_api/src/upgrade_agents.py b/deploy/test-environments/fleet_api/src/upgrade_agents.py index 558e7b165f..2f17d0598f 100755 --- a/deploy/test-environments/fleet_api/src/upgrade_agents.py +++ b/deploy/test-environments/fleet_api/src/upgrade_agents.py @@ -156,7 +156,7 @@ def main(): sys.exit(1) linux_policies_list = update_linux_policies(download_source_id) - time.sleep(10) # To ensure that policies updated + time.sleep(180) # To ensure that policies updated agents = get_agents(cfg=cnfg.elk_config) linux_agent_ids = [agent.id for agent in agents if agent.policy_id in linux_policies_list] for agent_id in linux_agent_ids: From 1ef9a5d8d9f0301f2970a312210d62e66080142d Mon Sep 17 00:00:00 2001 From: Dmitry Gurevich <99176494+gurevichdmitry@users.noreply.github.com> Date: Sun, 5 Nov 2023 22:18:08 +0200 Subject: [PATCH 34/55] Update deploy/test-environments/fleet_api/src/upgrade_cnvm.py Co-authored-by: Oren Zohar <85433724+oren-zohar@users.noreply.github.com> --- deploy/test-environments/fleet_api/src/upgrade_cnvm.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/deploy/test-environments/fleet_api/src/upgrade_cnvm.py b/deploy/test-environments/fleet_api/src/upgrade_cnvm.py index 9cf63bb901..56d4f7d4ca 100755 --- a/deploy/test-environments/fleet_api/src/upgrade_cnvm.py +++ b/deploy/test-environments/fleet_api/src/upgrade_cnvm.py @@ -45,7 +45,7 @@ def update_cloudformation_stack(cfg: Munch): Update an AWS CloudFormation stack with the provided configuration. Args: - cnfg (Munch): A configuration object containing the following attributes: + cfg (Munch): A configuration object containing the following attributes: - stack_name (str): The name of the CloudFormation stack to update. - template (str): The URL or S3 path to the CloudFormation template. - elastic_agent_version (str): The Elastic Agent version to set as a parameter. From 0b8e5e971c3024e7f43012bc125fa9d2ceba5c44 Mon Sep 17 00:00:00 2001 From: Dmitry Gurevich <99176494+gurevichdmitry@users.noreply.github.com> Date: Sun, 5 Nov 2023 22:19:39 +0200 Subject: [PATCH 35/55] fix review comments --- .github/workflows/test-environment.yml | 2 +- .github/workflows/upgrade-environment.yml | 14 +++++++------- .../fleet_api/src/api/base_call_api.py | 2 ++ .../fleet_api/src/api/common_api.py | 2 +- 4 files changed, 11 insertions(+), 9 deletions(-) diff --git a/.github/workflows/test-environment.yml b/.github/workflows/test-environment.yml index 3dd6d0c8ae..bc4659f2e6 100644 --- a/.github/workflows/test-environment.yml +++ b/.github/workflows/test-environment.yml @@ -353,7 +353,7 @@ jobs: poetry run python src/agents_enrolled.py - name: Run Sanity checks - if: ${{ success() && (inputs.run-sanity-tests == 'true' || inputs.run-sanity-tests == true) }} + if: ${{ success() && inputs.run-sanity-tests == true }} working-directory: ./tests run: | poetry install diff --git a/.github/workflows/upgrade-environment.yml b/.github/workflows/upgrade-environment.yml index 31dc40cfc1..3cbdb6aa4c 100644 --- a/.github/workflows/upgrade-environment.yml +++ b/.github/workflows/upgrade-environment.yml @@ -10,9 +10,9 @@ on: description: | Name with letters, numbers, hyphens; start with a letter. Max 20 chars. e.g., 'my-env-123' required: true - elk-stack-version: + target-elk-stack-version: required: true - description: "Stack version: For released/BC version use 8.x.y, for SNAPSHOT use 8.x.y-SNAPSHOT" + description: "Target version of the ELK stack: For BC version use 8.x.y, for SNAPSHOT use 8.x.y-SNAPSHOT" default: "8.11.0" type: string docker-image-override: @@ -25,7 +25,7 @@ env: AWS_REGION: "eu-west-1" WORKING_DIR: deploy/test-environments FLEET_API_DIR: fleet_api/src - TF_VAR_stack_version: ${{ inputs.elk-stack-version }} + TF_VAR_stack_version: ${{ inputs.target-elk-stack-version }} TF_VAR_ess_region: gcp-us-west2 TF_VAR_ec_api_key: ${{ secrets.EC_API_KEY }} DOCKER_IMAGE: ${{ inputs.docker-image-override }} @@ -40,7 +40,7 @@ jobs: - name: Set Previous Version id: set-previous-version run: | - VERSION="${{ inputs.elk-stack-version }}" + VERSION="${{ inputs.target-elk-stack-version }}" # Extract the major and minor versions MAJOR_VERSION=$(echo $VERSION | cut -d'.' -f1) @@ -187,7 +187,7 @@ jobs: - name: Set Docker Image version if: ${{ ! inputs.docker-image-override }} env: - VERSION: 'docker.elastic.co/beats/elastic-agent:${{ inputs.elk-stack-version }}' + VERSION: 'docker.elastic.co/beats/elastic-agent:${{ inputs.target-elk-stack-version }}' run: | echo "DOCKER_IMAGE=${{ env.VERSION }}" >> $GITHUB_ENV @@ -215,7 +215,7 @@ jobs: working-directory: ${{ env.WORKING_DIR }}/${{ env.FLEET_API_DIR }} env: CNVM_STACK_NAME: ${{ needs.deploy.outputs.cnvm-stack-name }} - STACK_VERSION: ${{ inputs.elk-stack-version }} + STACK_VERSION: ${{ inputs.target-elk-stack-version }} run: | poetry run python upgrade_agents.py @@ -223,7 +223,7 @@ jobs: if: success() working-directory: ./tests env: - STACK_VERSION: ${{ inputs.elk-stack-version }} + STACK_VERSION: ${{ inputs.target-elk-stack-version }} USE_K8S: false run: | poetry install diff --git a/deploy/test-environments/fleet_api/src/api/base_call_api.py b/deploy/test-environments/fleet_api/src/api/base_call_api.py index 61b63283a8..1479251d87 100644 --- a/deploy/test-environments/fleet_api/src/api/base_call_api.py +++ b/deploy/test-environments/fleet_api/src/api/base_call_api.py @@ -40,6 +40,8 @@ def perform_api_call(method, url, return_json=True, headers=None, auth=None, par Args: method (str): The HTTP method for the API call (e.g., 'GET', 'POST', 'PUT', 'DELETE'). url (str): The URL of the API endpoint. + return_json (bool, optional): Indicates whether the function should return + JSON data (default is True). headers (dict, optional): The headers to be included in the API request. If not provided, default headers will be used. auth (tuple or None, optional): The authentication tuple (username, password) diff --git a/deploy/test-environments/fleet_api/src/api/common_api.py b/deploy/test-environments/fleet_api/src/api/common_api.py index 0798e6b9c2..d2c61cba0d 100644 --- a/deploy/test-environments/fleet_api/src/api/common_api.py +++ b/deploy/test-environments/fleet_api/src/api/common_api.py @@ -196,7 +196,7 @@ def get_artifact_server(version: str, is_short_url: bool = False) -> str: Args: elastic_version (str): The version of Elastic. - is_snapshot_url (bool, optional): Indicates whether to use the short artifact URL. + is_short_url (bool, optional): Indicates whether to use the short artifact URL. Defaults to False. Returns: From a34f32feec7a2918006d7775cbf8fd1578f4789b Mon Sep 17 00:00:00 2001 From: Dmitry Gurevich <99176494+gurevichdmitry@users.noreply.github.com> Date: Mon, 6 Nov 2023 16:30:09 +0200 Subject: [PATCH 36/55] fix timeout and enable workflow --- .github/workflows/gcp-ci.yml | 1 - tests/integration/tests/test_sanity_checks.py | 2 +- 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/.github/workflows/gcp-ci.yml b/.github/workflows/gcp-ci.yml index 39fa60c83c..c29959baea 100644 --- a/.github/workflows/gcp-ci.yml +++ b/.github/workflows/gcp-ci.yml @@ -10,7 +10,6 @@ on: jobs: Run-CSPM-GCP-Tests: name: CIS GCP integration test - if: false runs-on: ubuntu-22.04 timeout-minutes: 60 permissions: diff --git a/tests/integration/tests/test_sanity_checks.py b/tests/integration/tests/test_sanity_checks.py index e970534d1a..cfde69ac05 100644 --- a/tests/integration/tests/test_sanity_checks.py +++ b/tests/integration/tests/test_sanity_checks.py @@ -10,7 +10,7 @@ from commonlib.utils import get_findings CONFIG_TIMEOUT = 120 -GCP_CONFIG_TIMEOUT = 600 +GCP_CONFIG_TIMEOUT = 1200 CNVM_CONFIG_TIMEOUT = 3600 tests_data = { From 5f7b3a28371a77309881a993588543d8437f29ae Mon Sep 17 00:00:00 2001 From: Dmitry Gurevich <99176494+gurevichdmitry@users.noreply.github.com> Date: Mon, 6 Nov 2023 16:34:44 +0200 Subject: [PATCH 37/55] add id to test action flow --- .github/workflows/gcp-ci.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/gcp-ci.yml b/.github/workflows/gcp-ci.yml index c29959baea..ad2c1767ae 100644 --- a/.github/workflows/gcp-ci.yml +++ b/.github/workflows/gcp-ci.yml @@ -62,6 +62,7 @@ jobs: ./cloudbeat -c deploy/gcp/cloudbeat-gcp.yml -d '*' & - name: Check for findings + id: run-tests working-directory: ./tests env: USE_K8S: false From f3ccecf1f0db6d1fd6807205a157a78736b60a24 Mon Sep 17 00:00:00 2001 From: Dmitry Gurevich <99176494+gurevichdmitry@users.noreply.github.com> Date: Mon, 6 Nov 2023 16:47:24 +0200 Subject: [PATCH 38/55] add env var stack version --- .github/workflows/gcp-ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/gcp-ci.yml b/.github/workflows/gcp-ci.yml index ad2c1767ae..aa7a133126 100644 --- a/.github/workflows/gcp-ci.yml +++ b/.github/workflows/gcp-ci.yml @@ -62,10 +62,10 @@ jobs: ./cloudbeat -c deploy/gcp/cloudbeat-gcp.yml -d '*' & - name: Check for findings - id: run-tests working-directory: ./tests env: USE_K8S: false + STACK_VERSION: ${{ env.ELK_VERSION }} run: | poetry install poetry run pytest -k "cspm_gcp" --alluredir=./allure/results/ --clean-alluredir --maxfail=4 From 76dd33372e4375087aff395fa41f79ee597f27e5 Mon Sep 17 00:00:00 2001 From: Dmitry Gurevich <99176494+gurevichdmitry@users.noreply.github.com> Date: Mon, 6 Nov 2023 20:24:05 +0200 Subject: [PATCH 39/55] update env vars --- .github/workflows/gcp-ci.yml | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/.github/workflows/gcp-ci.yml b/.github/workflows/gcp-ci.yml index aa7a133126..5690d69086 100644 --- a/.github/workflows/gcp-ci.yml +++ b/.github/workflows/gcp-ci.yml @@ -66,9 +66,16 @@ jobs: env: USE_K8S: false STACK_VERSION: ${{ env.ELK_VERSION }} + ES_USER: elastic run: | poetry install - poetry run pytest -k "cspm_gcp" --alluredir=./allure/results/ --clean-alluredir --maxfail=4 + poetry run pytest -k "cspm_gcp" --alluredir=./allure/results/ --clean-alluredir --maxfail=1 + + - name: Setup tmate session + uses: mxschmitt/action-tmate@v3 + if: failure() + with: + limit-access-to-actor: true - name: Print cloudbeat logs if: always() From 903816748940f24cce3ff7c45ebd27602b2f7e54 Mon Sep 17 00:00:00 2001 From: Dmitry Gurevich <99176494+gurevichdmitry@users.noreply.github.com> Date: Mon, 6 Nov 2023 20:25:50 +0200 Subject: [PATCH 40/55] update id --- .github/workflows/gcp-ci.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/gcp-ci.yml b/.github/workflows/gcp-ci.yml index 5690d69086..6f14cb6bb2 100644 --- a/.github/workflows/gcp-ci.yml +++ b/.github/workflows/gcp-ci.yml @@ -62,6 +62,7 @@ jobs: ./cloudbeat -c deploy/gcp/cloudbeat-gcp.yml -d '*' & - name: Check for findings + id: run-tests working-directory: ./tests env: USE_K8S: false From 1f5c9e59e0bcb972773302bcf51f9afffb209cfd Mon Sep 17 00:00:00 2001 From: Dmitry Gurevich <99176494+gurevichdmitry@users.noreply.github.com> Date: Mon, 6 Nov 2023 21:29:32 +0200 Subject: [PATCH 41/55] update env vars --- .github/workflows/gcp-ci.yml | 9 +-------- 1 file changed, 1 insertion(+), 8 deletions(-) diff --git a/.github/workflows/gcp-ci.yml b/.github/workflows/gcp-ci.yml index 6f14cb6bb2..9f5cecd6c0 100644 --- a/.github/workflows/gcp-ci.yml +++ b/.github/workflows/gcp-ci.yml @@ -66,18 +66,11 @@ jobs: working-directory: ./tests env: USE_K8S: false - STACK_VERSION: ${{ env.ELK_VERSION }} - ES_USER: elastic + STACK_VERSION: ${{ env.CLOUDBEAT_VERSION }} run: | poetry install poetry run pytest -k "cspm_gcp" --alluredir=./allure/results/ --clean-alluredir --maxfail=1 - - name: Setup tmate session - uses: mxschmitt/action-tmate@v3 - if: failure() - with: - limit-access-to-actor: true - - name: Print cloudbeat logs if: always() run: | From 2dd47bd50fc53f0240f54da0d451146b23f6c4cb Mon Sep 17 00:00:00 2001 From: Dmitry Gurevich <99176494+gurevichdmitry@users.noreply.github.com> Date: Mon, 6 Nov 2023 21:31:11 +0200 Subject: [PATCH 42/55] remove id --- .github/workflows/gcp-ci.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.github/workflows/gcp-ci.yml b/.github/workflows/gcp-ci.yml index 9f5cecd6c0..36fd57ece0 100644 --- a/.github/workflows/gcp-ci.yml +++ b/.github/workflows/gcp-ci.yml @@ -62,7 +62,6 @@ jobs: ./cloudbeat -c deploy/gcp/cloudbeat-gcp.yml -d '*' & - name: Check for findings - id: run-tests working-directory: ./tests env: USE_K8S: false From bc8d0cc0fd969a20a06fb2d5981096ad9f3b42fe Mon Sep 17 00:00:00 2001 From: Dmitry Gurevich <99176494+gurevichdmitry@users.noreply.github.com> Date: Tue, 7 Nov 2023 15:01:52 +0200 Subject: [PATCH 43/55] update timeout and period config --- deploy/gcp/cloudbeat-gcp.yml | 2 +- tests/integration/tests/test_sanity_checks.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/deploy/gcp/cloudbeat-gcp.yml b/deploy/gcp/cloudbeat-gcp.yml index 13b3d79bc0..dad778a685 100644 --- a/deploy/gcp/cloudbeat-gcp.yml +++ b/deploy/gcp/cloudbeat-gcp.yml @@ -11,7 +11,7 @@ cloudbeat: credentials_file_path: ${GOOGLE_APPLICATION_CREDENTIALS:""} type: cloudbeat/cis_gcp # Defines how often an event is sent to the output - period: 30s + period: 60s evaluator: decision_logs: false # =================================== Kibana =================================== diff --git a/tests/integration/tests/test_sanity_checks.py b/tests/integration/tests/test_sanity_checks.py index cfde69ac05..c6e0e27889 100644 --- a/tests/integration/tests/test_sanity_checks.py +++ b/tests/integration/tests/test_sanity_checks.py @@ -10,7 +10,7 @@ from commonlib.utils import get_findings CONFIG_TIMEOUT = 120 -GCP_CONFIG_TIMEOUT = 1200 +GCP_CONFIG_TIMEOUT = 1800 CNVM_CONFIG_TIMEOUT = 3600 tests_data = { From 3f8f13585d860a95c1434d963e65073e5158ee73 Mon Sep 17 00:00:00 2001 From: Dmitry Gurevich <99176494+gurevichdmitry@users.noreply.github.com> Date: Tue, 7 Nov 2023 15:36:21 +0200 Subject: [PATCH 44/55] add id --- .github/workflows/gcp-ci.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/gcp-ci.yml b/.github/workflows/gcp-ci.yml index 36fd57ece0..d77d2ec1c0 100644 --- a/.github/workflows/gcp-ci.yml +++ b/.github/workflows/gcp-ci.yml @@ -62,6 +62,7 @@ jobs: ./cloudbeat -c deploy/gcp/cloudbeat-gcp.yml -d '*' & - name: Check for findings + id: test-run working-directory: ./tests env: USE_K8S: false From 2757bd51084df981b1733ab0e04d43d074a7e144 Mon Sep 17 00:00:00 2001 From: Dmitry Gurevich <99176494+gurevichdmitry@users.noreply.github.com> Date: Tue, 7 Nov 2023 16:16:15 +0200 Subject: [PATCH 45/55] Update test_sanity_checks.py --- tests/integration/tests/test_sanity_checks.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/tests/integration/tests/test_sanity_checks.py b/tests/integration/tests/test_sanity_checks.py index a817473336..e01c0db783 100644 --- a/tests/integration/tests/test_sanity_checks.py +++ b/tests/integration/tests/test_sanity_checks.py @@ -20,6 +20,8 @@ if not STACK_VERSION: logger.warning("STACK_VERSION is not provided. Please set the STACK_VERSION in the configuration.") +agent_term = {"term": {"agent.version": STACK_VERSION}} + tests_data = { "cis_aws": [ "cloud-compute", @@ -168,8 +170,9 @@ def test_cspm_gcp_findings(cspm_client, match_type): query_list = [ {"term": {"rule.benchmark.id": "cis_gcp"}}, {"term": {"resource.type": match_type}}, - {"term": {"agent.version": STACK_VERSION}}, ] + if STACK_VERSION: + query_list.append(agent_term) query, sort = cspm_client.build_es_must_match_query(must_query_list=query_list, time_range="now-24h") results = get_findings(cspm_client, GCP_CONFIG_TIMEOUT, query, sort, match_type) From 0fcaff1137cc3d0285cdd652a86e877649890dcf Mon Sep 17 00:00:00 2001 From: Dmitry Gurevich <99176494+gurevichdmitry@users.noreply.github.com> Date: Tue, 7 Nov 2023 16:17:26 +0200 Subject: [PATCH 46/55] Update gcp-ci.yml --- .github/workflows/gcp-ci.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.github/workflows/gcp-ci.yml b/.github/workflows/gcp-ci.yml index 36fd57ece0..221f963b4c 100644 --- a/.github/workflows/gcp-ci.yml +++ b/.github/workflows/gcp-ci.yml @@ -65,7 +65,6 @@ jobs: working-directory: ./tests env: USE_K8S: false - STACK_VERSION: ${{ env.CLOUDBEAT_VERSION }} run: | poetry install poetry run pytest -k "cspm_gcp" --alluredir=./allure/results/ --clean-alluredir --maxfail=1 From 4da12a3e4b3b6f202b5e927611aa96112db40fb3 Mon Sep 17 00:00:00 2001 From: Dmitry Gurevich <99176494+gurevichdmitry@users.noreply.github.com> Date: Tue, 7 Nov 2023 16:18:15 +0200 Subject: [PATCH 47/55] remove id --- .github/workflows/gcp-ci.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.github/workflows/gcp-ci.yml b/.github/workflows/gcp-ci.yml index d77d2ec1c0..36fd57ece0 100644 --- a/.github/workflows/gcp-ci.yml +++ b/.github/workflows/gcp-ci.yml @@ -62,7 +62,6 @@ jobs: ./cloudbeat -c deploy/gcp/cloudbeat-gcp.yml -d '*' & - name: Check for findings - id: test-run working-directory: ./tests env: USE_K8S: false From ba9c0b3cc58134cf42cbd738930e5a1f5fd7b0c0 Mon Sep 17 00:00:00 2001 From: Dmitry Gurevich <99176494+gurevichdmitry@users.noreply.github.com> Date: Tue, 7 Nov 2023 16:21:30 +0200 Subject: [PATCH 48/55] remove env var --- .github/workflows/gcp-ci.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.github/workflows/gcp-ci.yml b/.github/workflows/gcp-ci.yml index 36fd57ece0..221f963b4c 100644 --- a/.github/workflows/gcp-ci.yml +++ b/.github/workflows/gcp-ci.yml @@ -65,7 +65,6 @@ jobs: working-directory: ./tests env: USE_K8S: false - STACK_VERSION: ${{ env.CLOUDBEAT_VERSION }} run: | poetry install poetry run pytest -k "cspm_gcp" --alluredir=./allure/results/ --clean-alluredir --maxfail=1 From 75f395b61b165338117261ca6e5319b61dbe24d9 Mon Sep 17 00:00:00 2001 From: Dmitry Gurevich <99176494+gurevichdmitry@users.noreply.github.com> Date: Tue, 7 Nov 2023 17:01:27 +0200 Subject: [PATCH 49/55] Update gcp-ci.yml --- .github/workflows/gcp-ci.yml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/.github/workflows/gcp-ci.yml b/.github/workflows/gcp-ci.yml index 221f963b4c..e3988792ff 100644 --- a/.github/workflows/gcp-ci.yml +++ b/.github/workflows/gcp-ci.yml @@ -69,6 +69,12 @@ jobs: poetry install poetry run pytest -k "cspm_gcp" --alluredir=./allure/results/ --clean-alluredir --maxfail=1 + - name: Setup tmate session + uses: mxschmitt/action-tmate@v3 + if: failure() + with: + limit-access-to-actor: true + - name: Print cloudbeat logs if: always() run: | From be08587c7dbe8437b1129d3502c89d93e67330f6 Mon Sep 17 00:00:00 2001 From: Dmitry Gurevich <99176494+gurevichdmitry@users.noreply.github.com> Date: Tue, 7 Nov 2023 17:02:22 +0200 Subject: [PATCH 50/55] Update test_sanity_checks.py --- tests/integration/tests/test_sanity_checks.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/integration/tests/test_sanity_checks.py b/tests/integration/tests/test_sanity_checks.py index e01c0db783..b3943cca66 100644 --- a/tests/integration/tests/test_sanity_checks.py +++ b/tests/integration/tests/test_sanity_checks.py @@ -12,7 +12,7 @@ from loguru import logger CONFIG_TIMEOUT = 120 -GCP_CONFIG_TIMEOUT = 1800 +GCP_CONFIG_TIMEOUT = 300 CNVM_CONFIG_TIMEOUT = 3600 STACK_VERSION = elasticsearch.stack_version From 557bc0e2175a725f0f9142721052999c4717db63 Mon Sep 17 00:00:00 2001 From: Dmitry Gurevich <99176494+gurevichdmitry@users.noreply.github.com> Date: Tue, 7 Nov 2023 17:04:09 +0200 Subject: [PATCH 51/55] add run-tests id --- .github/workflows/gcp-ci.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/gcp-ci.yml b/.github/workflows/gcp-ci.yml index 221f963b4c..3ffb532a78 100644 --- a/.github/workflows/gcp-ci.yml +++ b/.github/workflows/gcp-ci.yml @@ -62,6 +62,7 @@ jobs: ./cloudbeat -c deploy/gcp/cloudbeat-gcp.yml -d '*' & - name: Check for findings + id: run-tests working-directory: ./tests env: USE_K8S: false From 46d33e663179f6a9af4e440b0e57f1e984d3603c Mon Sep 17 00:00:00 2001 From: Dmitry Gurevich <99176494+gurevichdmitry@users.noreply.github.com> Date: Tue, 7 Nov 2023 18:33:47 +0200 Subject: [PATCH 52/55] Update gcp-ci.yml --- .github/workflows/gcp-ci.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.github/workflows/gcp-ci.yml b/.github/workflows/gcp-ci.yml index e3988792ff..a8c92caa33 100644 --- a/.github/workflows/gcp-ci.yml +++ b/.github/workflows/gcp-ci.yml @@ -66,6 +66,9 @@ jobs: env: USE_K8S: false run: | + curl -X PUT "http://localhost:9200/*cloud_security_posture.findings*/_settings" -H "Content-Type: application/json" -d '{ + "index.mapping.total_fields.limit": 2000 + }' poetry install poetry run pytest -k "cspm_gcp" --alluredir=./allure/results/ --clean-alluredir --maxfail=1 From 7ccd84e431ab047b6610b7a3e5e9e9d98eaf11ab Mon Sep 17 00:00:00 2001 From: Dmitry Gurevich <99176494+gurevichdmitry@users.noreply.github.com> Date: Tue, 7 Nov 2023 18:35:04 +0200 Subject: [PATCH 53/55] update commands --- .github/workflows/gcp-ci.yml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.github/workflows/gcp-ci.yml b/.github/workflows/gcp-ci.yml index 3ffb532a78..8f7ed523dd 100644 --- a/.github/workflows/gcp-ci.yml +++ b/.github/workflows/gcp-ci.yml @@ -62,11 +62,13 @@ jobs: ./cloudbeat -c deploy/gcp/cloudbeat-gcp.yml -d '*' & - name: Check for findings - id: run-tests working-directory: ./tests env: USE_K8S: false run: | + curl -X PUT "http://localhost:9200/*cloud_security_posture.findings*/_settings" -H "Content-Type: application/json" -d '{ + "index.mapping.total_fields.limit": 2000, + }' poetry install poetry run pytest -k "cspm_gcp" --alluredir=./allure/results/ --clean-alluredir --maxfail=1 From ff6f463236bc641758bb0f4e0e3a7cfee1da24c8 Mon Sep 17 00:00:00 2001 From: Dmitry Gurevich <99176494+gurevichdmitry@users.noreply.github.com> Date: Tue, 7 Nov 2023 18:59:07 +0200 Subject: [PATCH 54/55] update commands --- .github/workflows/gcp-ci.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/gcp-ci.yml b/.github/workflows/gcp-ci.yml index 8f7ed523dd..0e7339724d 100644 --- a/.github/workflows/gcp-ci.yml +++ b/.github/workflows/gcp-ci.yml @@ -62,6 +62,7 @@ jobs: ./cloudbeat -c deploy/gcp/cloudbeat-gcp.yml -d '*' & - name: Check for findings + id: test-id working-directory: ./tests env: USE_K8S: false From abd7a2f528efa13859dbd1f308f699120cdaffa2 Mon Sep 17 00:00:00 2001 From: Dmitry Gurevich <99176494+gurevichdmitry@users.noreply.github.com> Date: Tue, 7 Nov 2023 19:33:34 +0200 Subject: [PATCH 55/55] update flow --- .github/workflows/gcp-ci.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/gcp-ci.yml b/.github/workflows/gcp-ci.yml index 00f104d570..b92b85b208 100644 --- a/.github/workflows/gcp-ci.yml +++ b/.github/workflows/gcp-ci.yml @@ -62,15 +62,15 @@ jobs: ./cloudbeat -c deploy/gcp/cloudbeat-gcp.yml -d '*' & - name: Check for findings - id: test-id working-directory: ./tests env: USE_K8S: false run: | + poetry install + sleep 5 curl -X PUT "http://localhost:9200/*cloud_security_posture.findings*/_settings" -H "Content-Type: application/json" -d '{ "index.mapping.total_fields.limit": 2000 }' - poetry install poetry run pytest -k "cspm_gcp" --alluredir=./allure/results/ --clean-alluredir --maxfail=1 - name: Setup tmate session