Skip to content

Commit 3ceaf0d

Browse files
authored
[Asset Inventory] [AWS] Add setup to support integration testing and automatic provisioning for Asset Inventory (#2477)
1 parent dfbb594 commit 3ceaf0d

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

45 files changed

+640
-59
lines changed

.ci/scripts/set_cloud_env_params.sh

+12
Original file line numberDiff line numberDiff line change
@@ -32,6 +32,10 @@ EC2_KSPM=$(terraform output -raw ec2_kspm_ssh_cmd)
3232
echo "::add-mask::$EC2_KSPM"
3333
echo "EC2_KSPM=$EC2_KSPM" >>"$GITHUB_ENV"
3434

35+
EC2_ASSET_INV=$(terraform output -raw ec2_asset_inventory_ssh_cmd)
36+
echo "::add-mask::$EC2_ASSET_INV"
37+
echo "EC2_ASSET_INV=$EC2_ASSET_INV" >>"$GITHUB_ENV"
38+
3539
EC2_CSPM_KEY=$(terraform output -raw ec2_cspm_key)
3640
echo "::add-mask::$EC2_CSPM_KEY"
3741
echo "EC2_CSPM_KEY=$EC2_CSPM_KEY" >>"$GITHUB_ENV"
@@ -40,10 +44,18 @@ EC2_KSPM_KEY=$(terraform output -raw ec2_kspm_key)
4044
echo "::add-mask::$EC2_KSPM_KEY"
4145
echo "EC2_KSPM_KEY=$EC2_KSPM_KEY" >>"$GITHUB_ENV"
4246

47+
EC2_ASSET_INV_KEY=$(terraform output -raw ec2_asset_inventory_key)
48+
echo "::add-mask::$EC2_ASSET_INV_KEY"
49+
echo "EC2_ASSET_INV_KEY=$EC2_ASSET_INV_KEY" >>"$GITHUB_ENV"
50+
4351
KSPM_PUBLIC_IP=$(terraform output -raw ec2_kspm_public_ip)
4452
echo "::add-mask::$KSPM_PUBLIC_IP"
4553
echo "KSPM_PUBLIC_IP=$KSPM_PUBLIC_IP" >>"$GITHUB_ENV"
4654

55+
ASSET_INV_PUBLIC_IP=$(terraform output -raw ec2_asset_inventory_public_ip)
56+
echo "::add-mask::$ASSET_INV_PUBLIC_IP"
57+
echo "ASSET_INV_PUBLIC_IP=$ASSET_INV_PUBLIC_IP" >>"$GITHUB_ENV"
58+
4759
CSPM_PUBLIC_IP=$(terraform output -raw ec2_cspm_public_ip)
4860
echo "::add-mask::$CSPM_PUBLIC_IP"
4961
echo "CSPM_PUBLIC_IP=$CSPM_PUBLIC_IP" >>"$GITHUB_ENV"
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,69 @@
1+
name: 'AWS Asset Inventory CI'
2+
description: 'AWS Asset Inventory integration tests'
3+
inputs:
4+
elk-version:
5+
description: 'ELK version'
6+
required: true
7+
aws-access-key-id:
8+
description: 'AWS access key id'
9+
required: true
10+
aws-secret-access-key:
11+
description: 'AWS secret access key'
12+
required: true
13+
aws-account-type:
14+
description: 'AWS account type'
15+
required: false
16+
default: single-account
17+
18+
debug:
19+
description: 'debug'
20+
required: false
21+
default: 'false'
22+
runs:
23+
using: composite
24+
steps:
25+
- name: Init Integration
26+
uses: ./.github/actions/init-integration
27+
with:
28+
elk-version: ${{ inputs.elk-version }}
29+
30+
- name: Run cloudbeat in background
31+
env:
32+
ES_HOST: http://localhost:9200
33+
ES_USERNAME: elastic
34+
ES_PASSWORD: changeme
35+
AWS_ACCESS_KEY_ID: ${{ inputs.aws-access-key-id }}
36+
AWS_SECRET_ACCESS_KEY: ${{ inputs.aws-secret-access-key }}
37+
AWS_ACCOUNT_TYPE: ${{ inputs.aws-account-type }}
38+
shell: bash
39+
run: |
40+
./cloudbeat -c deploy/aws-asset-inventory/cloudbeat-aws-asset-inventory.yml -d '*' &
41+
42+
- name: Wait for cloudbeat to send some events
43+
shell: bash
44+
run: sleep 20
45+
46+
- name: Check for assets
47+
working-directory: ./tests
48+
env:
49+
USE_K8S: "false"
50+
shell: bash
51+
run: poetry run pytest -k "asset_inventory_aws" --alluredir=./allure/results/ --clean-alluredir
52+
53+
- name: Upload test results
54+
if: ${{ always() }}
55+
uses: actions/upload-artifact@v4
56+
with:
57+
name: allure-results-ci-aws-asset-inventory
58+
path: tests/allure/results/
59+
overwrite: true
60+
61+
- if: ${{ failure() || cancelled() || inputs.debug == 'true' }}
62+
name: Upload cloudbeat logs
63+
uses: actions/upload-artifact@v4
64+
with:
65+
name: cloubeat-logs-ci-aws-asset-inventory
66+
path: logs/
67+
if-no-files-found: warn
68+
retention-days: 1
69+
overwrite: true

.github/actions/aws-ci/action.yml

+1-1
Original file line numberDiff line numberDiff line change
@@ -48,7 +48,7 @@ runs:
4848
env:
4949
USE_K8S: "false"
5050
shell: bash
51-
run: poetry run pytest -k "aws" --alluredir=./allure/results/ --clean-alluredir
51+
run: poetry run pytest -k "cspm_aws" --alluredir=./allure/results/ --clean-alluredir
5252

5353
- name: Upload test results
5454
if: ${{ always() }}

.github/actions/azure-ci/action.yml

+1-1
Original file line numberDiff line numberDiff line change
@@ -49,7 +49,7 @@ runs:
4949
env:
5050
USE_K8S: "false"
5151
shell: bash
52-
run: poetry run pytest -k "azure" --alluredir=./allure/results/ --clean-alluredir
52+
run: poetry run pytest -k "cspm_azure" --alluredir=./allure/results/ --clean-alluredir
5353

5454
- name: Upload test results
5555
if: ${{ always() }}

.github/workflows/ci.yml

+23
Original file line numberDiff line numberDiff line change
@@ -76,6 +76,28 @@ jobs:
7676
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY_TEST_ACC }}
7777
aws-account-type: single-account
7878

79+
ci-aws-asset-inventory:
80+
needs: [ init-hermit ]
81+
name: AWS Asset Inventory CI
82+
runs-on: ubuntu-22.04
83+
timeout-minutes: 60
84+
permissions:
85+
contents: "read"
86+
id-token: "write"
87+
steps:
88+
- name: Check out the repo
89+
uses: actions/checkout@v4
90+
91+
- name: Hermit Environment
92+
uses: ./.github/actions/hermit
93+
94+
- name: Run AWS Asset Inventory integration tests
95+
uses: ./.github/actions/aws-asset-inventory-ci
96+
with:
97+
elk-version: ${{ env.ELK_VERSION }}
98+
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID_TEST_ACC }}
99+
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY_TEST_ACC }}
100+
79101
ci-gcp:
80102
needs: [ init-hermit ]
81103
name: CIS GCP CI
@@ -225,6 +247,7 @@ jobs:
225247
needs:
226248
- ci-azure
227249
- ci-aws
250+
- ci-aws-asset-inventory
228251
- ci-gcp
229252
- ci-cnvm
230253
- ci-k8s

.github/workflows/test-environment.yml

+20-5
Original file line numberDiff line numberDiff line change
@@ -305,6 +305,7 @@ jobs:
305305
aws s3 cp "./terraform.tfstate" "${S3_BUCKET}/terraform.tfstate"
306306
aws s3 cp "${EC2_CSPM_KEY}" "${S3_BUCKET}/cspm.pem"
307307
aws s3 cp "${EC2_KSPM_KEY}" "${S3_BUCKET}/kspm.pem"
308+
aws s3 cp "${EC2_ASSET_INV_KEY}" "${S3_BUCKET}/asset_inv.pem"
308309
echo "s3-bucket-folder=${S3_BUCKET}" >> $GITHUB_OUTPUT
309310
echo "aws-cnvm-stack=${CNVM_STACK_NAME}" >> $GITHUB_OUTPUT
310311
python3 ../../.ci/scripts/create_env_config.py
@@ -473,17 +474,31 @@ jobs:
473474
cmd="chmod +x $scriptname && ./$scriptname"
474475
../../.ci/scripts/remote_setup.sh -k "$EC2_CSPM_KEY" -s "$src" -h "$CSPM_PUBLIC_IP" -d "~/$scriptname" -c "$cmd"
475476
477+
- name: Install AWS Asset Inventory integration
478+
id: aws-asset-inventory
479+
working-directory: ${{ env.INTEGRATIONS_SETUP_DIR }}
480+
run: |
481+
poetry run python ./install_aws_asset_inventory_integration.py
482+
483+
- name: Deploy AWS Asset Inventory agent
484+
run: |
485+
scriptname="aws-asset-inventory-linux.sh"
486+
src="../../$INTEGRATIONS_SETUP_DIR/$scriptname"
487+
cmd="chmod +x $scriptname && ./$scriptname"
488+
../../.ci/scripts/remote_setup.sh -k "$EC2_ASSET_INV_KEY" -s "$src" -h "$ASSET_INV_PUBLIC_IP" -d "~/$scriptname" -c "$cmd"
489+
476490
- name: Upload Integrations data
477491
if: always()
478492
env:
479493
S3_BUCKET: "${{ env.S3_BASE_BUCKET }}/${{ env.DEPLOYMENT_NAME }}_${{ env.TF_STATE_FOLDER }}"
480494
working-directory: ${{ env.INTEGRATIONS_SETUP_DIR }}
481495
run: |
482-
aws s3 cp "./kspm_unmanaged.yaml" "${{ env.S3_BUCKET }}/kspm_unmanaged.yaml"
483-
aws s3 cp "./kspm_d4c.yaml" "${{ env.S3_BUCKET }}/kspm_d4c.yaml"
484-
aws s3 cp "./kspm_eks.yaml" "${{ env.S3_BUCKET }}/kspm_eks.yaml"
485-
aws s3 cp "./cspm-linux.sh" "${{ env.S3_BUCKET }}/cspm-linux.sh"
486-
aws s3 cp "./state_data.json" "${{ env.S3_BUCKET }}/state_data.json"
496+
aws s3 cp "./kspm_unmanaged.yaml" "$S3_BUCKET/kspm_unmanaged.yaml"
497+
aws s3 cp "./kspm_d4c.yaml" "$S3_BUCKET/kspm_d4c.yaml"
498+
aws s3 cp "./kspm_eks.yaml" "$S3_BUCKET/kspm_eks.yaml"
499+
aws s3 cp "./cspm-linux.sh" "$S3_BUCKET/cspm-linux.sh"
500+
aws s3 cp "./aws-asset-inventory-linux.sh" "$S3_BUCKET/aws-asset-inventory-linux.sh"
501+
aws s3 cp "./state_data.json" "$S3_BUCKET/state_data.json"
487502
488503
- name: Install Agentless integrations
489504
id: agentless

.gitignore

+2
Original file line numberDiff line numberDiff line change
@@ -32,6 +32,8 @@ vendor/*
3232
test-logs/*.log
3333
tests/allure/results/*
3434
tests/allure/reports/*
35+
tests/integrations_setup/state_data.json
36+
tests/integrations_setup/*.sh
3537

3638
# vscode
3739
.vscode*
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,64 @@
1+
cloudbeat:
2+
type: cloudbeat/asset_inventory
3+
config:
4+
v1:
5+
type: asset_inventory
6+
asset_inventory_provider: aws
7+
aws:
8+
credentials:
9+
access_key_id: ${AWS_ACCESS_KEY_ID:""}
10+
secret_access_key: ${AWS_SECRET_ACCESS_KEY:""}
11+
account_type: ${AWS_ACCOUNT_TYPE:""}
12+
# Defines how often an event is sent to the output
13+
period: 30s
14+
evaluator:
15+
decision_logs: false
16+
# =================================== Kibana ===================================
17+
setup.kibana:
18+
# Kibana Host
19+
host: "http://host.docker.internal:5601"
20+
# =============================== Elastic Cloud ================================
21+
22+
# These settings simplify using Cloudbeat with the Elastic Cloud (https://cloud.elastic.co/).
23+
24+
# The cloud.id setting overwrites the `output.elasticsearch.hosts` and
25+
# `setup.kibana.host` options.
26+
# You can find the `cloud.id` in the Elastic Cloud web UI.
27+
#cloud.id:
28+
29+
# The cloud.auth setting overwrites the `output.elasticsearch.username` and
30+
# `output.elasticsearch.password` settings. The format is `<user>:<pass>`.
31+
#cloud.auth:
32+
33+
# ---------------------------- Elasticsearch Output ----------------------------
34+
output.elasticsearch:
35+
# Array of hosts to connect to.
36+
hosts: ${ES_HOST}
37+
38+
# Protocol - either `http` (default) or `https`.
39+
# protocol: "https"
40+
41+
# Authentication credentials - either API key or username/password.
42+
#api_key: "id:api_key"
43+
username: ${ES_USERNAME}
44+
password: ${ES_PASSWORD}
45+
46+
# Enable to allow sending output to older ES versions
47+
allow_older_versions: true
48+
49+
# ================================= Processors =================================
50+
processors:
51+
- add_cloud_metadata: ~
52+
- add_docker_metadata: ~
53+
- drop_fields:
54+
fields: ["host.name"]
55+
# Sets log level. The default log level is info.
56+
# Available log levels are: error, warning, info, debug
57+
logging.level: debug
58+
# Enable debug output for selected components. To enable all selectors use ["*"]
59+
# Other available selectors are "beat", "publisher", "service"
60+
# Multiple selectors can be chained.
61+
#logging.selectors: ["publisher"]
62+
63+
# Send all logging output to stderr. The default is false.
64+
#logging.to_stderr: false

deploy/test-environments/main.tf

+10-1
Original file line numberDiff line numberDiff line change
@@ -47,6 +47,16 @@ module "aws_ec2_for_cspm" {
4747
specific_tags = merge(local.common_tags, { "ec2_type" : "cspm" })
4848
}
4949

50+
module "aws_ec2_for_asset_inventory" {
51+
source = "../cloud/modules/ec2"
52+
providers = { aws : aws }
53+
aws_ami = var.ami_map[var.region]
54+
deploy_k8s = false
55+
deploy_agent = false # Agent will not be deployed
56+
deployment_name = "${var.deployment_name}-${random_string.suffix.result}"
57+
specific_tags = merge(local.common_tags, { "ec2_type" : "asset_inventory" })
58+
}
59+
5060
module "gcp_audit_logs" {
5161
count = var.cdr_infra ? 1 : 0
5262
providers = { google : google }
@@ -55,7 +65,6 @@ module "gcp_audit_logs" {
5565
deployment_name = var.deployment_name
5666
network = "default"
5767
specific_tags = merge(local.common_tags, { "vm_instance" : "audit-logs" })
58-
5968
}
6069

6170
resource "random_string" "suffix" {

deploy/test-environments/output.tf

+15
Original file line numberDiff line numberDiff line change
@@ -37,6 +37,21 @@ output "ec2_cspm_key" {
3737
sensitive = true
3838
}
3939

40+
output "ec2_asset_inventory_ssh_cmd" {
41+
value = module.aws_ec2_for_asset_inventory.cloudbeat_ssh_cmd
42+
sensitive = true
43+
}
44+
45+
output "ec2_asset_inventory_public_ip" {
46+
value = module.aws_ec2_for_asset_inventory.aws_instance_cloudbeat_public_ip
47+
sensitive = true
48+
}
49+
50+
output "ec2_asset_inventory_key" {
51+
value = module.aws_ec2_for_asset_inventory.ec2_ssh_key
52+
sensitive = true
53+
}
54+
4055
output "ec2_cloudtrail_ssh_cmd" {
4156
value = var.cdr_infra ? module.aws_ec2_for_cloudtrail[0].cloudbeat_ssh_cmd : null
4257
sensitive = true

tests/commonlib/io_utils.py

+51
Original file line numberDiff line numberDiff line change
@@ -56,6 +56,57 @@ def get_events_from_index(
5656
return events
5757

5858

59+
def get_assets_from_index(
60+
elastic_client,
61+
category: str,
62+
sub_category: str,
63+
type_: str,
64+
sub_type: str,
65+
time_after: datetime,
66+
) -> list[Munch]:
67+
"""
68+
Resturns assets from a given index matching given classification.
69+
@param elastic_client: Client to connect to Elasticsearch.
70+
@param category: Asset category used as a filter
71+
@param sub_category: Asset subcategory used as a filter
72+
@param type: Asset type used as a filter
73+
@param sub_type: Asset subtype used as a filter
74+
@param time_after: Filter events having timestamp > time_after
75+
@return: List of Munch objects
76+
"""
77+
query = {
78+
"bool": {
79+
"must": [
80+
{"match": {"asset.category": category}},
81+
{"match": {"asset.sub_category": sub_category}},
82+
{"match": {"asset.type": type_}},
83+
{"match": {"asset.sub_type": sub_type}},
84+
],
85+
"filter": [
86+
{
87+
"range": {
88+
"@timestamp": {
89+
"gte": time_after.strftime("%Y-%m-%dT%H:%M:%S.%f"),
90+
},
91+
},
92+
},
93+
],
94+
},
95+
}
96+
sort = [{"@timestamp": {"order": "desc"}}]
97+
result = elastic_client.get_index_data(
98+
query=query,
99+
sort=sort,
100+
size=1000,
101+
)
102+
103+
assets = []
104+
for asset in munchify(dict(result)).hits.hits:
105+
assets.append(asset._source)
106+
107+
return assets
108+
109+
59110
def get_logs_from_stream(stream: str) -> list[Munch]:
60111
"""
61112
This function converts logs stream to list of Munch objects (dictionaries)

0 commit comments

Comments
 (0)