diff --git a/.github/actions/cdr/action.yml b/.github/actions/cdr/action.yml
index 384f2d2cf5..0167335a69 100644
--- a/.github/actions/cdr/action.yml
+++ b/.github/actions/cdr/action.yml
@@ -65,6 +65,21 @@ inputs:
default: "default"
required: false
type: string
+ okta-logs-url:
+ description: "Okta System Log API URL (e.g. https://dev-xxx.okta.com/api/v1/logs); empty skips Okta"
+ default: "default"
+ required: false
+ type: string
+ okta-api-key:
+ description: "Okta API token for System Log"
+ default: "default"
+ required: false
+ type: string
+ okta-entity-analytics-domain:
+ description: "Okta org hostname for Entity Analytics (e.g. dev-123456.okta.com); empty skips that integration"
+ default: "default"
+ required: false
+ type: string
es-user:
description: "Elasticsearch user"
default: "elastic"
@@ -100,6 +115,11 @@ inputs:
default: "cloudbeat"
required: false
type: string
+ enable-entity-store-v2:
+ description: "When true, run Entity Store v2 installer; when false, run Entity Store v1 only"
+ default: true
+ required: false
+ type: boolean
runs:
using: composite
@@ -113,9 +133,21 @@ runs:
TF_VAR_gcp_service_account_json: ${{ inputs.gcp-service-account-json }}
TF_VAR_project: ${{ inputs.tag-project }}
TF_VAR_owner: ${{ inputs.tag-owner }}
+ # This workflow always provisions both Elastic Defend VMs; use Terraform variables for selective applies outside CI.
+ TF_VAR_deploy_aws_elastic_defend_linux: true
+ TF_VAR_deploy_aws_elastic_defend_windows: true
shell: bash
working-directory: "deploy/test-environments/cdr"
run: |
+ # Restrict WinRM (5985) ingress to the runner egress IP.
+ # The ec2-windows module rejects empty / 0.0.0.0/0 to avoid exposing WinRM to the internet by default.
+ RUNNER_EGRESS_IP="$(curl -fsS https://checkip.amazonaws.com | tr -d '\n')"
+ if [[ -z "$RUNNER_EGRESS_IP" ]]; then
+ echo "Failed to determine runner egress IP for WinRM allowlist"
+ exit 1
+ fi
+ export TF_VAR_windows_elastic_defend_winrm_ingress_cidr="${RUNNER_EGRESS_IP}/32"
+
terraform init
terraform validate
terraform apply -auto-approve
@@ -125,46 +157,7 @@ runs:
if: success()
shell: bash
working-directory: "deploy/test-environments/cdr"
- run: |
- aws_ec2_cloudtrail_public_ip=$(terraform output -raw ec2_cloudtrail_public_ip)
- echo "::add-mask::$aws_ec2_cloudtrail_public_ip"
- echo "aws-ec2-cloudtrail-public-ip=$aws_ec2_cloudtrail_public_ip" >> "$GITHUB_OUTPUT"
-
- aws_ec2_cloudtrail_key=$(terraform output -raw ec2_cloudtrail_key)
- echo "::add-mask::$aws_ec2_cloudtrail_key"
- echo "aws-ec2-cloudtrail-key=$aws_ec2_cloudtrail_key" >>"$GITHUB_OUTPUT"
-
- az_vm_activity_logs_public_ip=$(terraform output -raw az_vm_activity_logs_public_ip)
- echo "::add-mask::$az_vm_activity_logs_public_ip"
- echo "az-vm-activity-logs-public-ip=$az_vm_activity_logs_public_ip" >> "$GITHUB_OUTPUT"
-
- az_vm_activity_logs_key=$(terraform output -raw az_vm_activity_logs_key)
- echo "::add-mask::$az_vm_activity_logs_key"
- echo "az-vm-activity-logs-key=$az_vm_activity_logs_key" >> "$GITHUB_OUTPUT"
-
- gcp_audit_logs_public_ip=$(terraform output -raw gcp_audit_logs_public_ip)
- echo "::add-mask::$gcp_audit_logs_public_ip"
- echo "gcp-audit-logs-public-ip=$gcp_audit_logs_public_ip" >> "$GITHUB_OUTPUT"
-
- gcp_audit_logs_key=$(terraform output -raw gcp_audit_logs_key)
- echo "::add-mask::$gcp_audit_logs_key"
- echo "gcp-audit-logs-key=$gcp_audit_logs_key" >> "$GITHUB_OUTPUT"
-
- ec2_asset_inv_key=$(terraform output -raw ec2_asset_inventory_key)
- echo "::add-mask::$ec2_asset_inv_key"
- echo "ec2-asset-inv-key=$ec2_asset_inv_key" >> "$GITHUB_OUTPUT"
-
- asset_inv_public_ip=$(terraform output -raw ec2_asset_inventory_public_ip)
- echo "::add-mask::$asset_inv_public_ip"
- echo "asset-inv-public-ip=$asset_inv_public_ip" >> "$GITHUB_OUTPUT"
-
- ec2_wiz_key=$(terraform output -raw ec2_wiz_key)
- echo "::add-mask::$ec2_wiz_key"
- echo "ec2-wiz-key=$ec2_wiz_key" >> "$GITHUB_OUTPUT"
-
- ec2_wiz_public_ip=$(terraform output -raw ec2_wiz_public_ip)
- echo "::add-mask::$ec2_wiz_public_ip"
- echo "ec2-wiz-public-ip=$ec2_wiz_public_ip" >> "$GITHUB_OUTPUT"
+ run: ./export_terraform_outputs.sh
- name: Install AWS Cloudtrail integration
id: cloudtrail-integration
@@ -250,7 +243,7 @@ runs:
- name: Install WIZ integration
id: wiz-integration
- if: ${{ !cancelled() && steps.deploy-cdr-infra.outcome == 'success' }}
+ if: ${{ !cancelled() && steps.deploy-cdr-infra.outcome == 'success' }}
working-directory: tests/integrations_setup
shell: bash
env:
@@ -264,8 +257,40 @@ runs:
run: |
poetry run python ./install_wiz_integration.py
+ - name: Install Okta integration (Fleet, Wiz agent policy)
+ id: okta-integration
+ if: ${{ !cancelled() && steps.deploy-cdr-infra.outcome == 'success' && steps.wiz-integration.outcome == 'success' }}
+ working-directory: tests/integrations_setup
+ shell: bash
+ env:
+ OKTA_LOGS_URL: ${{ inputs.okta-logs-url }}
+ OKTA_API_KEY: ${{ inputs.okta-api-key }}
+ ES_USER: ${{ inputs.es-user }}
+ ES_PASSWORD: ${{ inputs.es-password }}
+ KIBANA_URL: ${{ inputs.kibana-url }}
+ STACK_VERSION: ${{ inputs.elk-stack-version }}
+ AGENT_VERSION: ${{ inputs.elk-stack-version }}
+ run: |
+ poetry run python ./install_okta_integration.py
+
+ - name: Install Okta Entity Analytics (Fleet, Wiz agent policy)
+ id: okta-entityanalytics-integration
+ if: ${{ !cancelled() && steps.deploy-cdr-infra.outcome == 'success' && steps.wiz-integration.outcome == 'success' }}
+ working-directory: tests/integrations_setup
+ shell: bash
+ env:
+ OKTA_ENTITY_ANALYTICS_DOMAIN: ${{ inputs.okta-entity-analytics-domain }}
+ OKTA_API_KEY: ${{ inputs.okta-api-key }}
+ ES_USER: ${{ inputs.es-user }}
+ ES_PASSWORD: ${{ inputs.es-password }}
+ KIBANA_URL: ${{ inputs.kibana-url }}
+ STACK_VERSION: ${{ inputs.elk-stack-version }}
+ AGENT_VERSION: ${{ inputs.elk-stack-version }}
+ run: |
+ poetry run python ./install_entityanalytics_okta_integration.py
+
- name: Deploy WIZ agent
- if: ${{ !cancelled() && steps.deploy-cdr-infra.outcome == 'success' && steps.wiz-integration.outcome == 'success' }}
+ if: ${{ !cancelled() && steps.deploy-cdr-infra.outcome == 'success' && steps.wiz-integration.outcome == 'success' }}
working-directory: deploy/test-environments/cdr
shell: bash
env:
@@ -277,6 +302,118 @@ runs:
cmd="chmod +x $scriptname && ./$scriptname"
../remote_setup.sh -k "$WIZ_KEY" -s "$src" -h "$WIZ_PUBLIC_IP" -d "~/$scriptname" -c "$cmd"
+ - name: Install Elastic Defend (Fleet)
+ id: elastic-defend-fleet
+ if: ${{ !cancelled() && steps.deploy-cdr-infra.outcome == 'success' }}
+ working-directory: tests/integrations_setup
+ shell: bash
+ env:
+ ES_USER: ${{ inputs.es-user }}
+ ES_PASSWORD: ${{ inputs.es-password }}
+ KIBANA_URL: ${{ inputs.kibana-url }}
+ STACK_VERSION: ${{ inputs.elk-stack-version }}
+ AGENT_VERSION: ${{ inputs.elk-stack-version }}
+ ELASTIC_DEFEND_ENROLL_LINUX: true
+ ELASTIC_DEFEND_ENROLL_WINDOWS: true
+ ELASTIC_DEFEND_LINUX_PUBLIC_IP: ${{ steps.generate-data.outputs.elastic-defend-linux-public-ip }}
+ ELASTIC_DEFEND_WINDOWS_PUBLIC_IP: ${{ steps.generate-data.outputs.elastic-defend-windows-public-ip }}
+ ELASTIC_DEFEND_WINDOWS_INSTANCE_ID: ${{ steps.generate-data.outputs.elastic-defend-windows-instance-id }}
+ run: |
+ poetry run python ./install_elastic_defend_integration.py
+
+ - name: Deploy Elastic Defend agent (Linux)
+ if: ${{ !cancelled() && steps.deploy-cdr-infra.outcome == 'success' && steps.elastic-defend-fleet.outcome == 'success' && steps.generate-data.outputs.elastic-defend-linux-public-ip != '' }}
+ working-directory: deploy/test-environments/cdr
+ shell: bash
+ env:
+ DEFEND_KEY: ${{ steps.generate-data.outputs.elastic-defend-linux-key }}
+ DEFEND_PUBLIC_IP: ${{ steps.generate-data.outputs.elastic-defend-linux-public-ip }}
+ run: |
+ scriptname="elastic-defend-linux.sh"
+ src="../../../tests/integrations_setup/$scriptname"
+ cmd="chmod +x $scriptname && ./$scriptname"
+ ../remote_setup.sh -k "$DEFEND_KEY" -s "$src" -h "$DEFEND_PUBLIC_IP" -d "~/$scriptname" -c "$cmd"
+
+ # PEM path in Terraform output is relative to deploy/test-environments/cdr (same as terraform apply).
+ - name: Prepare Windows Defend credentials (WinRM)
+ id: windows-defend-credentials
+ if: ${{ !cancelled() && steps.deploy-cdr-infra.outcome == 'success' && steps.elastic-defend-fleet.outcome == 'success' && steps.generate-data.outputs.elastic-defend-windows-public-ip != '' }}
+ working-directory: deploy/test-environments/cdr
+ shell: bash
+ env:
+ AWS_DEFAULT_REGION: ${{ inputs.aws-region }}
+ DEFEND_WIN_KEY: ${{ steps.generate-data.outputs.elastic-defend-windows-key }}
+ DEFEND_WIN_INSTANCE: ${{ steps.generate-data.outputs.elastic-defend-windows-instance-id }}
+ DEFEND_WIN_IP: ${{ steps.generate-data.outputs.elastic-defend-windows-public-ip }}
+ WINDOWS_DEFEND_CREDENTIALS_FILE: "${{ github.workspace }}/tests/integrations_setup/.windows-defend-connection.json"
+ run: |
+ set -euo pipefail
+ echo "Waiting on ${DEFEND_WIN_IP}:5985 ..."
+ PORT_OK=""
+ for i in $(seq 1 12); do
+ if timeout 5 bash -c "echo > /dev/tcp/${DEFEND_WIN_IP}/5985" 2>/dev/null; then
+ PORT_OK=1
+ echo "Port is open"
+ break
+ fi
+ echo "attempt $i/12: sleeping 5s"
+ sleep 5
+ done
+ if [ -z "$PORT_OK" ]; then
+ echo "Timed out waiting for TCP 5985 on ${DEFEND_WIN_IP}"
+ exit 1
+ fi
+ if [ ! -f "$DEFEND_WIN_KEY" ]; then
+ echo "Windows PEM not found at DEFEND_WIN_KEY=$DEFEND_WIN_KEY (cwd=$(pwd); resolve paths from deploy/test-environments/cdr)"
+ exit 1
+ fi
+ PASSWORD=""
+ for j in $(seq 1 30); do
+ PASSWORD="$(aws ec2 get-password-data --region "$AWS_DEFAULT_REGION" --instance-id "$DEFEND_WIN_INSTANCE" --priv-launch-key "$DEFEND_WIN_KEY" --query PasswordData --output text || true)"
+ if [ -n "$PASSWORD" ] && [ "$PASSWORD" != "None" ]; then
+ break
+ fi
+ echo "Waiting for password ($j/30)..."
+ sleep 10
+ done
+ if [ -z "$PASSWORD" ] || [ "$PASSWORD" = "None" ]; then
+ echo "Failed to retrieve password"
+ exit 1
+ fi
+ echo "::add-mask::$PASSWORD"
+ export DEFEND_ADMIN_PASSWORD="$PASSWORD"
+ python3 <<'PY'
+ import json
+ import os
+ import pathlib
+
+ pathlib.Path(os.environ["WINDOWS_DEFEND_CREDENTIALS_FILE"]).write_text(
+ json.dumps(
+ {
+ "instance_id": os.environ["DEFEND_WIN_INSTANCE"],
+ "public_ip": os.environ["DEFEND_WIN_IP"],
+ "winrm_port": 5985,
+ "winrm_use_ssl": False,
+ "username": "Administrator",
+ "password": os.environ["DEFEND_ADMIN_PASSWORD"],
+ },
+ indent=2,
+ ),
+ encoding="utf-8",
+ )
+ PY
+
+ - name: Enroll Elastic Defend agent (Windows WinRM)
+ if: ${{ !cancelled() && steps.windows-defend-credentials.outcome == 'success' }}
+ working-directory: tests/integrations_setup
+ shell: bash
+ env:
+ ELASTIC_DEFEND_WINDOWS_PS1: "${{ github.workspace }}/tests/integrations_setup/elastic-defend-windows.ps1"
+ WINDOWS_DEFEND_CREDENTIALS_FILE: "${{ github.workspace }}/tests/integrations_setup/.windows-defend-connection.json"
+ run: |
+ set -euo pipefail
+ poetry run python ./enroll_elastic_defend_winrm.py
+
- name: Check Asset Inventory supported version
id: asset-inventory-version-check
if: ${{ !cancelled() && steps.deploy-cdr-infra.outcome == 'success' }}
@@ -293,7 +430,7 @@ runs:
echo "asset_inventory_supported=false" >> $GITHUB_OUTPUT
fi
- - name: Enable Entity Store
+ - name: Enable Entity Store (v1 or v2)
if: ${{ !cancelled() && steps.asset-inventory-version-check.outputs.asset_inventory_supported == 'true' }}
working-directory: tests/integrations_setup
shell: bash
@@ -302,8 +439,16 @@ runs:
ES_PASSWORD: ${{ inputs.es-password }}
KIBANA_URL: ${{ inputs.kibana-url }}
STACK_VERSION: ${{ inputs.elk-stack-version }}
+ ENABLE_ENTITY_STORE_V2: ${{ inputs.enable-entity-store-v2 }}
run: |
- poetry run python ./enable_entity_store.py
+ set -euo pipefail
+ if [[ "${ENABLE_ENTITY_STORE_V2}" == "true" ]]; then
+ # Entity Store v2 prerequisites + install + maintainers + v2 status poll
+ poetry run python ./enable_entity_store_v2.py
+ else
+ # Entity Store v1 prerequisites + enable + v1 status poll (includes data view creation)
+ poetry run python ./enable_entity_store.py
+ fi
- name: Install Azure Asset Inventory integration
id: azure-asset-inventory-integration
@@ -413,5 +558,8 @@ runs:
EC2_ASSET_INV_KEY: ${{ steps.generate-data.outputs.ec2-asset-inv-key }}
EC2_WIZ_KEY: ${{ steps.generate-data.outputs.ec2-wiz-key }}
INTEGRATIONS_SETUP_DIR: "../../../tests/integrations_setup"
+ ELASTIC_DEFEND_LINUX_KEY: ${{ steps.generate-data.outputs.elastic-defend-linux-key }}
+ ELASTIC_DEFEND_WINDOWS_KEY: ${{ steps.generate-data.outputs.elastic-defend-windows-key }}
+ WINDOWS_DEFEND_CREDENTIALS_FILE: "${{ github.workspace }}/tests/integrations_setup/.windows-defend-connection.json"
run: |
./manage_infrastructure.sh "cdr" "upload-state"
diff --git a/.github/actions/elk-stack/action.yml b/.github/actions/elk-stack/action.yml
index 20c2e91aec..99ea30b425 100644
--- a/.github/actions/elk-stack/action.yml
+++ b/.github/actions/elk-stack/action.yml
@@ -29,6 +29,11 @@ inputs:
default: "latest"
type: string
required: false
+ kibana-security-solution-experimental:
+ description: "ESS only: set Kibana user_settings_yaml for Security Solution experimental flags"
+ type: boolean
+ default: true
+ required: false
docker-image-version-override:
description: "Optional Docker image version to override the default stack image. Accepts formats like 8.x.y, 8.x.y-hash, or 8.x.y-SNAPSHOT."
type: string
@@ -79,6 +84,7 @@ runs:
TF_VAR_ess_region: ${{ inputs.ess-region }}
TF_VAR_ec_url: ${{ inputs.ec-url }}
TF_VAR_pin_version: ${{ inputs.docker-image-version-override }}
+ TF_VAR_kibana_security_solution_experimental: ${{ inputs.kibana-security-solution-experimental }}
TF_VAR_ec_api_key: ${{ inputs.ec-api-key }}
TF_VAR_deployment_template: ${{ env.TF_VAR_deployment_template }}
TF_VAR_max_size: ${{ env.TF_VAR_max_size }}
diff --git a/.github/workflows/cdr-infra.yml b/.github/workflows/cdr-infra.yml
index c612b16edb..44a3cfb5dd 100644
--- a/.github/workflows/cdr-infra.yml
+++ b/.github/workflows/cdr-infra.yml
@@ -33,6 +33,16 @@ on:
description: "Number of days until environment expiration"
required: false
default: "5"
+ kibana_security_solution_experimental:
+ description: "ESS only: apply Kibana Security Solution experimental user settings (new home, watchlist)"
+ type: boolean
+ required: false
+ default: true
+ enable-entity-store-v2:
+ description: "CDR: when true run Entity Store v2; when false run Entity Store v1 only"
+ type: boolean
+ required: false
+ default: true
jobs:
init:
@@ -65,3 +75,5 @@ jobs:
serverless_mode: ${{ fromJSON(inputs.serverless_mode) }}
infra-type: ${{ needs.init.outputs.infra-type }}
expiration_days: ${{ inputs.expiration-days }}
+ kibana_security_solution_experimental: ${{ inputs.kibana_security_solution_experimental }}
+ enable-entity-store-v2: ${{ fromJSON(inputs.enable-entity-store-v2) }}
diff --git a/.github/workflows/test-environment.yml b/.github/workflows/test-environment.yml
index 023c17a706..9d2c2bd4c7 100644
--- a/.github/workflows/test-environment.yml
+++ b/.github/workflows/test-environment.yml
@@ -37,6 +37,11 @@ on:
required: false
description: "Provide the full Docker image path to override the default image (e.g. for testing BC/SNAPSHOT)"
type: string
+ kibana_security_solution_experimental:
+ description: "ESS only: apply Kibana user_settings_yaml for Security Solution experimental flags (new home, watchlist)"
+ type: boolean
+ required: false
+ default: true
run-sanity-tests:
description: "Run sanity tests after provision"
default: false
@@ -84,6 +89,16 @@ on:
required: false
description: "Provide the full Docker image path to override the default image (e.g. for testing BC/SNAPSHOT)"
type: string
+ kibana_security_solution_experimental:
+ description: "ESS only: apply Kibana user_settings_yaml for Security Solution experimental flags (new home, watchlist)"
+ type: boolean
+ required: false
+ default: true
+ enable-entity-store-v2:
+ description: "CDR: when true run Entity Store v2; when false run Entity Store v1 only"
+ type: boolean
+ required: false
+ default: true
run-sanity-tests:
description: "Run sanity tests after provision"
default: false
@@ -395,6 +410,7 @@ jobs:
ec-api-key: ${{ env.TF_VAR_ec_api_key }}
ec-url: ${{ env.EC_URL }}
docker-image-version-override: ${{ env.TF_VAR_pin_version }}
+ kibana-security-solution-experimental: ${{ inputs.kibana_security_solution_experimental }}
env-s3-bucket: "${{ env.S3_BASE_BUCKET }}/${{ env.DEPLOYMENT_NAME }}_${{ env.TF_STATE_FOLDER }}"
tag-project: ${{ github.actor }}
tag-owner: ${{ github.actor }}
@@ -479,11 +495,15 @@ jobs:
wiz-client-secret: ${{ secrets.WIZ_CLIENT_SECRET }}
wiz-endpoint-url: ${{ secrets.WIZ_ENDPOINT_URL }}
wiz-token-url: ${{ secrets.WIZ_TOKEN_URL }}
+ okta-logs-url: ${{ secrets.OKTA_LOGS_URL }}
+ okta-api-key: ${{ secrets.OKTA_API_KEY }}
+ okta-entity-analytics-domain: ${{ secrets.OKTA_ENTITY_ANALYTICS_DOMAIN }}
env-s3-bucket: "${{ env.S3_BASE_BUCKET }}/${{ env.DEPLOYMENT_NAME }}_${{ env.TF_STATE_FOLDER }}"
es-user: ${{ env.ES_USER }}
es-password: ${{ env.ES_PASSWORD }}
kibana-url: ${{ env.KIBANA_URL }}
elk-stack-version: ${{ env.STACK_VERSION }}
+ enable-entity-store-v2: ${{ inputs.enable-entity-store-v2 }}
azure-tags: ${{ env.AZURE_DEFAULT_TAGS }}
tag-project: ${{ github.actor }}
tag-owner: ${{ github.actor }}
diff --git a/.gitignore b/.gitignore
index d77d6995e6..cedb9ebf28 100644
--- a/.gitignore
+++ b/.gitignore
@@ -33,7 +33,11 @@ test-logs/*.log
tests/allure/results/*
tests/allure/reports/*
tests/integrations_setup/state_data.json
+tests/integrations_setup/cdr_wiz_agent_policy.json
tests/integrations_setup/*.sh
+tests/integrations_setup/*.ps1
+tests/integrations_setup/elastic_defend_hosts.json
+tests/integrations_setup/.windows-defend-connection.json
# vscode
.vscode*
diff --git a/deploy/test-environments/cdr/export_terraform_outputs.sh b/deploy/test-environments/cdr/export_terraform_outputs.sh
new file mode 100755
index 0000000000..d85d6ec04f
--- /dev/null
+++ b/deploy/test-environments/cdr/export_terraform_outputs.sh
@@ -0,0 +1,35 @@
+#!/usr/bin/env bash
+# Writes CDR Terraform outputs to GITHUB_OUTPUT (null-safe via terraform output -json).
+# Used by the CDR composite action when some modules are disabled (e.g. Wiz-only or full CDR).
+set -euo pipefail
+
+: "${GITHUB_OUTPUT:?GITHUB_OUTPUT must be set}"
+
+TF_OUT=$(terraform output -json)
+
+# Append "name=value" to GITHUB_OUTPUT; mask non-empty values in logs.
+out_masked() {
+ local out_name="$1" tf_key="$2"
+ local val
+ val=$(echo "$TF_OUT" | jq -r --arg k "$tf_key" '.[$k].value // empty')
+ if [ -n "$val" ]; then
+ echo "::add-mask::$val"
+ fi
+ printf '%s=%s\n' "$out_name" "$val" >>"$GITHUB_OUTPUT"
+}
+
+out_masked aws-ec2-cloudtrail-public-ip ec2_cloudtrail_public_ip
+out_masked aws-ec2-cloudtrail-key ec2_cloudtrail_key
+out_masked az-vm-activity-logs-public-ip az_vm_activity_logs_public_ip
+out_masked az-vm-activity-logs-key az_vm_activity_logs_key
+out_masked gcp-audit-logs-public-ip gcp_audit_logs_public_ip
+out_masked gcp-audit-logs-key gcp_audit_logs_key
+out_masked ec2-asset-inv-key ec2_asset_inventory_key
+out_masked asset-inv-public-ip ec2_asset_inventory_public_ip
+out_masked ec2-wiz-key ec2_wiz_key
+out_masked ec2-wiz-public-ip ec2_wiz_public_ip
+out_masked elastic-defend-linux-public-ip ec2_elastic_defend_linux_public_ip
+out_masked elastic-defend-linux-key ec2_elastic_defend_linux_key
+out_masked elastic-defend-windows-public-ip ec2_elastic_defend_windows_public_ip
+out_masked elastic-defend-windows-key ec2_elastic_defend_windows_key
+out_masked elastic-defend-windows-instance-id ec2_elastic_defend_windows_instance_id
diff --git a/deploy/test-environments/cdr/main.tf b/deploy/test-environments/cdr/main.tf
index 2c2fa65e51..f99328e693 100644
--- a/deploy/test-environments/cdr/main.tf
+++ b/deploy/test-environments/cdr/main.tf
@@ -80,4 +80,26 @@ module "aws_ec2_for_asset_inventory" {
deployment_name = "${var.deployment_name}-${random_string.suffix.result}"
specific_tags = merge(local.common_tags, { "ec2_type" : "asset_inventory" })
}
+
+module "aws_ec2_elastic_defend_linux" {
+ count = var.deploy_aws_elastic_defend_linux ? 1 : 0
+ source = "../modules/aws/ec2"
+ providers = { aws : aws }
+ aws_ami = var.ami_map[var.region]
+ deploy_k8s = false
+ deploy_agent = false
+ deployment_name = "${var.deployment_name}-${random_string.suffix.result}"
+ specific_tags = merge(local.common_tags, { "ec2_type" : "elastic_defend_linux" })
+}
+
+module "aws_ec2_elastic_defend_windows" {
+ count = var.deploy_aws_elastic_defend_windows ? 1 : 0
+ source = "../modules/aws/ec2-windows"
+ providers = { aws : aws }
+ deployment_name = "${var.deployment_name}-${random_string.suffix.result}"
+ specific_tags = merge(local.common_tags, { "ec2_type" : "elastic_defend_windows" })
+ windows_ami_id = var.windows_elastic_defend_ami_id
+ aws_ec2_instance_type = var.windows_elastic_defend_instance_type
+ winrm_ingress_cidr = var.windows_elastic_defend_winrm_ingress_cidr
+}
# ===== End Of CDR Infrastructure Resources =====
diff --git a/deploy/test-environments/cdr/output.tf b/deploy/test-environments/cdr/output.tf
index 61b532367c..fd03457df4 100644
--- a/deploy/test-environments/cdr/output.tf
+++ b/deploy/test-environments/cdr/output.tf
@@ -77,3 +77,28 @@ output "ec2_wiz_key" {
value = var.deploy_aws_ec2_wiz ? module.aws_ec2_for_wiz[0].ec2_ssh_key : null
sensitive = true
}
+
+output "ec2_elastic_defend_linux_public_ip" {
+ value = var.deploy_aws_elastic_defend_linux ? module.aws_ec2_elastic_defend_linux[0].aws_instance_cloudbeat_public_ip : null
+ sensitive = true
+}
+
+output "ec2_elastic_defend_linux_key" {
+ value = var.deploy_aws_elastic_defend_linux ? module.aws_ec2_elastic_defend_linux[0].ec2_ssh_key : null
+ sensitive = true
+}
+
+output "ec2_elastic_defend_windows_public_ip" {
+ value = var.deploy_aws_elastic_defend_windows ? module.aws_ec2_elastic_defend_windows[0].aws_instance_public_ip : null
+ sensitive = true
+}
+
+output "ec2_elastic_defend_windows_key" {
+ value = var.deploy_aws_elastic_defend_windows ? module.aws_ec2_elastic_defend_windows[0].ec2_ssh_key : null
+ sensitive = true
+}
+
+output "ec2_elastic_defend_windows_instance_id" {
+ value = var.deploy_aws_elastic_defend_windows ? module.aws_ec2_elastic_defend_windows[0].instance_id : null
+ sensitive = true
+}
diff --git a/deploy/test-environments/cdr/terraform.tf b/deploy/test-environments/cdr/terraform.tf
index d551f7454c..8a22f97752 100644
--- a/deploy/test-environments/cdr/terraform.tf
+++ b/deploy/test-environments/cdr/terraform.tf
@@ -20,6 +20,16 @@ terraform {
version = "~> 3.5.1"
}
+ tls = {
+ source = "hashicorp/tls"
+ version = "~> 4.0"
+ }
+
+ local = {
+ source = "hashicorp/local"
+ version = "~> 2.4"
+ }
+
}
required_version = ">= 1.3, <2.0.0"
diff --git a/deploy/test-environments/cdr/variables.tf b/deploy/test-environments/cdr/variables.tf
index f58b001840..18d0d11230 100644
--- a/deploy/test-environments/cdr/variables.tf
+++ b/deploy/test-environments/cdr/variables.tf
@@ -72,6 +72,36 @@ variable "deploy_aws_asset_inventory" {
default = true
}
+variable "deploy_aws_elastic_defend_linux" {
+ description = "Deploy Ubuntu EC2 for Elastic Defend. Selective for local/terraform applies (-var / .tfvars). The CDR GitHub composite always forces true."
+ type = bool
+ default = true
+}
+
+variable "deploy_aws_elastic_defend_windows" {
+ description = "Deploy Windows EC2 for Elastic Defend. Selective for local/terraform applies (-var / .tfvars). The CDR GitHub composite always forces true."
+ type = bool
+ default = true
+}
+
+variable "windows_elastic_defend_ami_id" {
+ description = "Optional Windows AMI override for Elastic Defend host. When empty, the latest Amazon Windows Server 2022 Base image for the region is used."
+ type = string
+ default = ""
+}
+
+variable "windows_elastic_defend_instance_type" {
+ description = "Instance type for Elastic Defend Windows VM"
+ type = string
+ default = "t3.large"
+}
+
+variable "windows_elastic_defend_winrm_ingress_cidr" {
+ description = "Source CIDR for WinRM HTTP (5985) on the Elastic Defend Windows host"
+ type = string
+ default = ""
+}
+
# ========= Cloud Tags ========================
variable "division" {
default = "engineering"
diff --git a/deploy/test-environments/elk-stack/main.tf b/deploy/test-environments/elk-stack/main.tf
index c4ca3f0a5b..a4ddb1ed8d 100644
--- a/deploy/test-environments/elk-stack/main.tf
+++ b/deploy/test-environments/elk-stack/main.tf
@@ -65,6 +65,8 @@ module "ec_deployment" {
"kibana" = "",
"apm" = ""
}
+
+ kibana_enable_security_solution_experimental = var.kibana_security_solution_experimental
}
module "ec_project" {
diff --git a/deploy/test-environments/elk-stack/variables.tf b/deploy/test-environments/elk-stack/variables.tf
index 5f4769912c..352a4c2f14 100644
--- a/deploy/test-environments/elk-stack/variables.tf
+++ b/deploy/test-environments/elk-stack/variables.tf
@@ -34,6 +34,12 @@ variable "serverless_mode" {
type = bool
}
+variable "kibana_security_solution_experimental" {
+ default = true
+ description = "When true (ESS only), apply Kibana user_settings_yaml for Security Solution experimental flags"
+ type = bool
+}
+
variable "deployment_template" {
default = "gcp-general-purpose"
description = "Optional deployment template. Defaults to the CPU optimized template for GCP"
diff --git a/deploy/test-environments/modules/aws/ec2-windows/main.tf b/deploy/test-environments/modules/aws/ec2-windows/main.tf
new file mode 100644
index 0000000000..762d4d6ecd
--- /dev/null
+++ b/deploy/test-environments/modules/aws/ec2-windows/main.tf
@@ -0,0 +1,110 @@
+locals {
+ windows_private_key_file = "${path.module}/cloudbeat-win-${random_id.id.hex}.pem"
+ tags = merge({
+ id = random_id.id.hex
+ provisioner = "terraform"
+ Name = var.deployment_name
+ }, var.specific_tags)
+}
+
+resource "random_id" "id" {
+ byte_length = 4
+}
+
+resource "tls_private_key" "cloudbeat_key" {
+ algorithm = "RSA"
+ rsa_bits = 4096
+}
+
+resource "aws_key_pair" "generated_key" {
+ provider = aws
+ key_name = "cloudbeat-win-${random_id.id.hex}"
+ public_key = tls_private_key.cloudbeat_key.public_key_openssh
+ tags = local.tags
+}
+
+data "aws_ami" "windows_2022" {
+ count = var.windows_ami_id == "" ? 1 : 0
+ most_recent = true
+ owners = ["801119661308"]
+
+ filter {
+ name = "name"
+ values = ["Windows_Server-2022-English-Full-Base-*"]
+ }
+
+ filter {
+ name = "virtualization-type"
+ values = ["hvm"]
+ }
+}
+
+resource "aws_security_group" "windows" {
+ provider = aws
+
+ egress {
+ from_port = 0
+ to_port = 0
+ protocol = "-1"
+ cidr_blocks = ["0.0.0.0/0"]
+ }
+
+ ingress {
+ description = "WinRM HTTP"
+ from_port = 5985
+ to_port = 5985
+ protocol = "tcp"
+ cidr_blocks = [var.winrm_ingress_cidr]
+ }
+
+ tags = local.tags
+}
+
+resource "local_file" "cloud_pem" {
+ filename = local.windows_private_key_file
+ content = tls_private_key.cloudbeat_key.private_key_pem
+ file_permission = "0400"
+}
+
+resource "aws_instance" "windows" {
+ provider = aws
+ ami = var.windows_ami_id != "" ? var.windows_ami_id : data.aws_ami.windows_2022[0].id
+ instance_type = var.aws_ec2_instance_type
+ key_name = aws_key_pair.generated_key.key_name
+
+ vpc_security_group_ids = [aws_security_group.windows.id]
+ associate_public_ip_address = true
+ iam_instance_profile = var.iam_instance_profile
+ get_password_data = true
+
+ user_data = <<-EOT
+
+ $ErrorActionPreference = "Continue"
+ try {
+ Get-NetConnectionProfile | Where-Object { $_.IPv4Connectivity -eq "Internet" } | Set-NetConnectionProfile -NetworkCategory Private -ErrorAction SilentlyContinue
+ } catch {}
+ winrm quickconfig -q -force
+ Enable-PSRemoting -Force -SkipNetworkProfileCheck
+ Set-Item WSMan:\localhost\Service\AllowUnencrypted $true
+ Set-Item WSMan:\localhost\Service\Auth\Basic $true
+ Set-Item WSMan:\localhost\Client\AllowUnencrypted $true
+ Set-Item WSMan:\localhost\Client\Auth\Basic $true
+ netsh advfirewall firewall set rule group="Windows Remote Management" new enable=yes 2>$null
+ Get-NetFirewallRule -DisplayGroup "Windows Remote Management" -ErrorAction SilentlyContinue | Where-Object { $_.Direction -eq "Inbound" } | Enable-NetFirewallRule -ErrorAction SilentlyContinue
+ New-NetFirewallRule -DisplayName "WinRM-5985-CDR" -Direction Inbound -Protocol TCP -LocalPort 5985 -Action Allow -ErrorAction SilentlyContinue
+ Set-Service WinRM -StartupType Automatic
+ Restart-Service WinRM
+
+ # Test-signed drivers / relaxed integrity — takes effect after reboot below (isolated test VMs only).
+ bcdedit /set testsigning on
+ if ($LASTEXITCODE -ne 0) { throw "bcdedit testsigning failed (exit $LASTEXITCODE)" }
+ bcdedit /set nointegritychecks on
+ if ($LASTEXITCODE -ne 0) { throw "bcdedit nointegritychecks failed (exit $LASTEXITCODE)" }
+ shutdown /r /t 30 /c "bcdedit applied for Elastic Defend test host"
+
+ EOT
+
+ tags = local.tags
+
+ depends_on = [local_file.cloud_pem]
+}
diff --git a/deploy/test-environments/modules/aws/ec2-windows/output.tf b/deploy/test-environments/modules/aws/ec2-windows/output.tf
new file mode 100644
index 0000000000..05dc0a0daf
--- /dev/null
+++ b/deploy/test-environments/modules/aws/ec2-windows/output.tf
@@ -0,0 +1,14 @@
+output "aws_instance_public_ip" {
+ description = "Windows EC2 public IP"
+ value = aws_instance.windows.public_ip
+}
+
+output "ec2_ssh_key" {
+ description = "Path to PEM for the generated instance key pair"
+ value = local.windows_private_key_file
+}
+
+output "instance_id" {
+ description = "EC2 instance id (for get-password-data)"
+ value = aws_instance.windows.id
+}
diff --git a/deploy/test-environments/modules/aws/ec2-windows/terraform.tf b/deploy/test-environments/modules/aws/ec2-windows/terraform.tf
new file mode 100644
index 0000000000..da090adbc6
--- /dev/null
+++ b/deploy/test-environments/modules/aws/ec2-windows/terraform.tf
@@ -0,0 +1,20 @@
+terraform {
+ required_providers {
+ aws = {
+ source = "hashicorp/aws"
+ version = "~> 4.67.0"
+ }
+ tls = {
+ source = "hashicorp/tls"
+ version = "~> 4.0"
+ }
+ random = {
+ source = "hashicorp/random"
+ version = "~> 3.5.1"
+ }
+ local = {
+ source = "hashicorp/local"
+ version = "~> 2.4"
+ }
+ }
+}
diff --git a/deploy/test-environments/modules/aws/ec2-windows/variables.tf b/deploy/test-environments/modules/aws/ec2-windows/variables.tf
new file mode 100644
index 0000000000..ee362ab36d
--- /dev/null
+++ b/deploy/test-environments/modules/aws/ec2-windows/variables.tf
@@ -0,0 +1,39 @@
+variable "deployment_name" {
+ description = "EC2 instance name tag"
+ type = string
+}
+
+variable "specific_tags" {
+ description = "Additional tags for this deployment"
+ type = map(string)
+ default = {}
+}
+
+variable "aws_ec2_instance_type" {
+ description = "AWS instance type for Windows Server"
+ type = string
+ default = "t3.large"
+}
+
+variable "windows_ami_id" {
+ description = "Optional override for Windows Server AMI ID. When empty, the latest Amazon Windows Server 2022 Base image for the current region is used."
+ type = string
+ default = ""
+}
+
+variable "iam_instance_profile" {
+ description = "IAM instance profile name"
+ type = string
+ default = "ec2-role-with-security-audit"
+}
+
+variable "winrm_ingress_cidr" {
+ description = "Source CIDR for WinRM HTTP (5985)"
+ type = string
+ default = ""
+
+ validation {
+ condition = var.winrm_ingress_cidr != "" && var.winrm_ingress_cidr != "0.0.0.0/0"
+ error_message = "winrm_ingress_cidr must be set to a restrictive CIDR and must not be 0.0.0.0/0, as this would expose WinRM to the public internet."
+ }
+}
diff --git a/deploy/test-environments/modules/ec/main.tf b/deploy/test-environments/modules/ec/main.tf
index 2c3e9912da..91dce8f64e 100644
--- a/deploy/test-environments/modules/ec/main.tf
+++ b/deploy/test-environments/modules/ec/main.tf
@@ -9,6 +9,22 @@ locals {
kibana_docker_image_tag_override = lookup(var.docker_image_tag_override, "kibana", "")
apm_docker_image = lookup(var.docker_image, "apm", "")
apm_docker_image_tag_override = lookup(var.docker_image_tag_override, "apm", "")
+
+ security_solution_experimental_yaml = <<-EOT
+xpack.securitySolution.enableExperimental:
+ - entityAnalyticsNewHomePageEnabled
+ - entityAnalyticsWatchlistEnabled
+EOT
+
+ kibana_docker_config = local.kibana_docker_image_tag_override != "" ? {
+ docker_image = "${local.kibana_docker_image}:${local.kibana_docker_image_tag_override}"
+ } : {}
+
+ kibana_experimental_config = var.kibana_enable_security_solution_experimental ? {
+ user_settings_yaml = local.security_solution_experimental_yaml
+ } : {}
+
+ kibana_config_merged = merge(local.kibana_docker_config, local.kibana_experimental_config)
}
data "ec_stack" "deployment_version" {
@@ -53,9 +69,7 @@ resource "ec_deployment" "deployment" {
}
kibana = {
- config = local.kibana_docker_image_tag_override != "" ? {
- docker_image = "${local.kibana_docker_image}:${local.kibana_docker_image_tag_override}"
- } : null
+ config = length(local.kibana_config_merged) > 0 ? local.kibana_config_merged : null
}
integrations_server = {
diff --git a/deploy/test-environments/modules/ec/variables.tf b/deploy/test-environments/modules/ec/variables.tf
index abae9b32e6..2f8b193f8c 100644
--- a/deploy/test-environments/modules/ec/variables.tf
+++ b/deploy/test-environments/modules/ec/variables.tf
@@ -84,3 +84,9 @@ variable "docker_image" {
type = map(string)
description = "Optional docker image overrides. The full map needs to be specified"
}
+
+variable "kibana_enable_security_solution_experimental" {
+ type = bool
+ default = true
+ description = "When true, set Kibana user_settings_yaml with Security Solution experimental feature flags (ESS only)"
+}
diff --git a/deploy/test-environments/upload_state.sh b/deploy/test-environments/upload_state.sh
index a3d276bb4e..1e31f0fbd8 100755
--- a/deploy/test-environments/upload_state.sh
+++ b/deploy/test-environments/upload_state.sh
@@ -16,12 +16,32 @@ upload_cis() {
# Function to upload additional keys for CDR
upload_cdr() {
aws s3 cp "./terraform.tfstate" "${S3_BUCKET}/cdr-terraform.tfstate"
- aws s3 cp "${CLOUDTRAIL_KEY}" "${S3_BUCKET}/cloudtrail.pem"
- aws s3 cp "${ACTIVITY_LOGS_KEY}" "${S3_BUCKET}/az_activity_logs.pem"
- aws s3 cp "${AUDIT_LOGS_KEY}" "${S3_BUCKET}/gcp_audit_logs.pem"
- aws s3 cp "${EC2_ASSET_INV_KEY}" "${S3_BUCKET}/asset_inv.pem"
- aws s3 cp "${EC2_WIZ_KEY}" "${S3_BUCKET}/wiz.pem"
+ if [ -n "${CLOUDTRAIL_KEY:-}" ] && [ -f "${CLOUDTRAIL_KEY}" ]; then
+ aws s3 cp "${CLOUDTRAIL_KEY}" "${S3_BUCKET}/cloudtrail.pem"
+ fi
+ if [ -n "${ACTIVITY_LOGS_KEY:-}" ] && [ -f "${ACTIVITY_LOGS_KEY}" ]; then
+ aws s3 cp "${ACTIVITY_LOGS_KEY}" "${S3_BUCKET}/az_activity_logs.pem"
+ fi
+ if [ -n "${AUDIT_LOGS_KEY:-}" ] && [ -f "${AUDIT_LOGS_KEY}" ]; then
+ aws s3 cp "${AUDIT_LOGS_KEY}" "${S3_BUCKET}/gcp_audit_logs.pem"
+ fi
+ if [ -n "${EC2_ASSET_INV_KEY:-}" ] && [ -f "${EC2_ASSET_INV_KEY}" ]; then
+ aws s3 cp "${EC2_ASSET_INV_KEY}" "${S3_BUCKET}/asset_inv.pem"
+ fi
+ if [ -n "${EC2_WIZ_KEY:-}" ] && [ -f "${EC2_WIZ_KEY}" ]; then
+ aws s3 cp "${EC2_WIZ_KEY}" "${S3_BUCKET}/wiz.pem"
+ fi
aws s3 cp "${INTEGRATIONS_SETUP_DIR}/state_data.json" "$S3_BUCKET/state_data.json"
+
+ if [ -n "${ELASTIC_DEFEND_LINUX_KEY:-}" ] && [ -f "${ELASTIC_DEFEND_LINUX_KEY}" ]; then
+ aws s3 cp "${ELASTIC_DEFEND_LINUX_KEY}" "${S3_BUCKET}/elastic_defend_linux.pem"
+ fi
+ if [ -n "${ELASTIC_DEFEND_WINDOWS_KEY:-}" ] && [ -f "${ELASTIC_DEFEND_WINDOWS_KEY}" ]; then
+ aws s3 cp "${ELASTIC_DEFEND_WINDOWS_KEY}" "${S3_BUCKET}/elastic_defend_windows.pem"
+ fi
+ if [ -n "${WINDOWS_DEFEND_CREDENTIALS_FILE:-}" ] && [ -f "${WINDOWS_DEFEND_CREDENTIALS_FILE}" ]; then
+ aws s3 cp "${WINDOWS_DEFEND_CREDENTIALS_FILE}" "${S3_BUCKET}/windows-defend-connection.json"
+ fi
}
# Check for valid input
diff --git a/dev-docs/Cloud-Env-Testing.md b/dev-docs/Cloud-Env-Testing.md
index 8e81e01440..cc94130089 100644
--- a/dev-docs/Cloud-Env-Testing.md
+++ b/dev-docs/Cloud-Env-Testing.md
@@ -149,6 +149,29 @@ The [`Create Environment with Cloud Logs`](https://github.com/elastic/cloudbeat/
The workflow requires a subset of input parameters. All required inputs are described [here](#how-to-run-the-workflow).
+### How to run the workflow (manual)
+
+1. Go to `Actions → Create Environment with Cloud Logs (CDR)` and click **Run workflow**.
+2. Fill the inputs:
+ - **`deployment-name`** (**required**): lowercase, starts with a letter, max 20 chars. Example: `cdr--001`.
+ - **`elk-stack-version`** (**required**): stack version string. Examples:
+ - released: `8.16.0`
+ - snapshot: `8.16.0-SNAPSHOT`
+ - **`serverless_mode`**: if `true`, deploys a Serverless project instead of ESS. Default is `false`.
+ - **`docker-image-override`**: optional docker image override for agent installs (mostly for BC/SNAPSHOT testing).
+ - **`expiration-days`**: how long the environment should be kept before cleanup. Default is `5`.
+ - **`cis-infra`**: optional. When `true`, also deploy CIS infrastructure (`infra_type=all`). When `false`, CDR only (`infra_type=cdr`).
+ - **`kibana_security_solution_experimental`** (ESS only): when `true`, applies Kibana advanced setting YAML to enable Security Solution experimental UI flags (Entity Analytics home + watchlist). Default is `true`.
+ - **`enable-entity-store-v2`**: when `true`, the workflow installs **Entity Store v2** (v2 installer script). When `false`, it installs **Entity Store v1 only**. Default is `true`.
+
+3. Click **Run workflow** and wait for completion.
+
+#### What gets installed
+
+- **Infrastructure**: CDR VMs (AWS CloudTrail EC2, Azure activity logs VM, GCP audit logs VM, Wiz EC2, Asset Inventory EC2, Elastic Defend Linux + Windows, depending on the `deploy_*` Terraform vars defaults).
+- **Integrations / agents**: CloudTrail, Azure Activity Logs, GCP Audit Logs, Wiz, Okta (optional), Elastic Defend (Fleet), Asset Inventory (gated by stack version), and Entity Store (v1 or v2 based on the checkbox).
+
+
## Install Integrations Worfklow
The [`Install Integrations`](https://github.com/elastic/cloudbeat/actions/workflows/install-integrations.yml) GitHub workflow is used when the Elastic Stack is already installed, and the user wants to add `CIS` and/or `CDR` integrations.
diff --git a/tests/commonlib/framework/reporting.py b/tests/commonlib/framework/reporting.py
index 677c33660b..aeca6b4f09 100644
--- a/tests/commonlib/framework/reporting.py
+++ b/tests/commonlib/framework/reporting.py
@@ -44,7 +44,7 @@ def skip_param_case(
marks_list = [
pytest.mark.xfail(reason=data_to_report.skip_reason),
- allure.link(
+ allure.link( # pylint: disable=c-extension-no-member
url=data_to_report.url_link,
link_type=data_to_report.link_type,
name=data_to_report.url_title,
diff --git a/tests/fleet_api/base_call_api.py b/tests/fleet_api/base_call_api.py
index 42cd3e9889..9bf4c746b9 100644
--- a/tests/fleet_api/base_call_api.py
+++ b/tests/fleet_api/base_call_api.py
@@ -34,7 +34,15 @@ def __init__(self, status_code, response_text):
self.response_text = response_text
-def perform_api_call(method, url, return_json=True, headers=None, auth=None, params=None):
+def perform_api_call(
+ method,
+ url,
+ return_json=True,
+ headers=None,
+ auth=None,
+ params=None,
+ ok_statuses=None,
+):
"""
Perform an API call using the provided parameters.
@@ -50,12 +58,13 @@ def perform_api_call(method, url, return_json=True, headers=None, auth=None, par
Defaults to None.
params (dict, optional): The parameters to be included in the API request.
Defaults to None.
+ ok_statuses (tuple, optional): HTTP status codes treated as success. Defaults to (200,).
Returns:
- dict: The JSON response from the API call.
+ dict or bytes: Parsed JSON (empty dict for 204 or empty body), or raw content.
Raises:
- APICallException: If the API call returns a non-200 status code.
+ APICallException: If the API call returns a non-success status code.
"""
if headers is None:
headers = {
@@ -66,13 +75,17 @@ def perform_api_call(method, url, return_json=True, headers=None, auth=None, par
auth = ()
if params is None:
params = {}
+ if ok_statuses is None:
+ ok_statuses = (200,)
response = requests.request(method=method, url=url, headers=headers, auth=auth, **params)
- if response.status_code != 200:
+ if response.status_code not in ok_statuses:
raise APICallException(response.status_code, response.text)
if not return_json:
return response.content
+ if response.status_code == 204 or not (response.content or b"").strip():
+ return {}
return response.json()
diff --git a/tests/fleet_api/data_view_api.py b/tests/fleet_api/data_view_api.py
index f46dfd0326..ff190623bc 100644
--- a/tests/fleet_api/data_view_api.py
+++ b/tests/fleet_api/data_view_api.py
@@ -29,10 +29,13 @@ def create_security_default_data_view(cfg: Munch, name: str, namespace: str = "d
"""
data_view_id = f"{name}-{namespace}"
- # Check if data view already exists
- if data_view_exists(cfg, name, namespace):
+ try:
+ existing = get_data_view(cfg, name, namespace)
logger.info(f"Data view '{data_view_id}' already exists.")
- return get_data_view(cfg, name, namespace)
+ return existing
+ except APICallException as exc:
+ if exc.status_code != 404:
+ raise
# Data view doesn't exist, create it
logger.info(f"Data view '{data_view_id}' not found. Creating new data view.")
@@ -55,12 +58,17 @@ def create_security_default_data_view(cfg: Munch, name: str, namespace: str = "d
url=create_url,
auth=cfg.auth,
params={"json": payload},
+ ok_statuses=(200, 201),
)
logger.info(f"Data view '{data_view_id}' created successfully.")
return response
- except APICallException as e:
- logger.error(f"Failed to create data view '{data_view_id}': {e}")
- raise
+ except APICallException as exc:
+ # If the data view already exists (race / previous attempt), just fetch it.
+ if exc.status_code == 409:
+ logger.info(f"Data view '{data_view_id}' already exists (409). Fetching.")
+ return get_data_view(cfg, name, namespace)
+ logger.error(f"Failed to create data view '{data_view_id}': {exc}")
+ raise exc
def get_data_view(cfg: Munch, name: str, namespace: str = "default") -> dict:
@@ -88,9 +96,12 @@ def get_data_view(cfg: Munch, name: str, namespace: str = "default") -> dict:
)
logger.info(f"Retrieved data view '{data_view_id}' successfully.")
return response
- except APICallException as e:
- logger.error(f"Failed to get data view '{data_view_id}': {e}")
- raise
+ except APICallException as exc:
+ if exc.status_code == 404:
+ logger.info(f"Data view '{data_view_id}' not found (404).")
+ else:
+ logger.error(f"Failed to get data view '{data_view_id}': {exc}")
+ raise exc
def data_view_exists(cfg: Munch, name: str, namespace: str = "default") -> bool:
diff --git a/tests/fleet_api/endpoint_package_policy.py b/tests/fleet_api/endpoint_package_policy.py
new file mode 100644
index 0000000000..5b9ee4525a
--- /dev/null
+++ b/tests/fleet_api/endpoint_package_policy.py
@@ -0,0 +1,83 @@
+"""
+Helpers for Elastic Defend (endpoint) Fleet package policies: malware/ransomware detect mode updates.
+"""
+
+import copy
+import time
+from typing import Any, Dict
+
+from fleet_api.base_call_api import APICallException, perform_api_call
+from fleet_api.package_policy_api import get_package_policy_by_id
+from loguru import logger
+from munch import Munch
+
+READONLY_PACKAGE_POLICY_KEYS = frozenset(
+ {"id", "revision", "created_at", "created_by", "updated_at", "updated_by"},
+)
+
+
+def _package_policy_body_for_put(item: Dict[str, Any]) -> Dict[str, Any]:
+ body = copy.deepcopy(item)
+ for k in READONLY_PACKAGE_POLICY_KEYS:
+ body.pop(k, None)
+ return body
+
+
+def _apply_detect_on_os_policy(os_policy: Dict[str, Any]) -> None:
+ for feature in ("malware", "ransomware"):
+ block = os_policy.get(feature)
+ if isinstance(block, dict) and "mode" in block and block.get("mode") != "detect":
+ block["mode"] = "detect"
+
+
+def apply_endpoint_malware_ransomware_detect_modes(package_policy: Dict[str, Any]) -> None:
+ """
+ Set malware (and ransomware where present) to mode 'detect' for windows, linux, and mac sections
+ under Fleet endpoint integration inputs.
+ """
+ inputs = package_policy.get("inputs")
+ if not isinstance(inputs, list):
+ return
+ for inp in inputs:
+ cfg_block = inp.get("config") if isinstance(inp, dict) else None
+ if not isinstance(cfg_block, dict):
+ continue
+ pol_wrapped = cfg_block.get("policy")
+ if not isinstance(pol_wrapped, dict):
+ continue
+ policy_val = pol_wrapped.get("value")
+ if not isinstance(policy_val, dict):
+ continue
+ for os_key in ("windows", "linux", "mac"):
+ os_pol = policy_val.get(os_key)
+ if isinstance(os_pol, dict):
+ _apply_detect_on_os_policy(os_pol)
+
+
+def update_package_policy(cfg: Munch, package_policy_id: str, body: Dict[str, Any]) -> None:
+ """PUT the given package policy body to Fleet (Kibana package_policies API)."""
+ url = f"{cfg.kibana_url}/api/fleet/package_policies/{package_policy_id}"
+ try:
+ perform_api_call(
+ method="PUT",
+ url=url,
+ auth=cfg.auth,
+ params={"json": body},
+ )
+ logger.info(f"Package policy '{package_policy_id}' updated successfully")
+ except APICallException as api_ex:
+ logger.error(
+ f"API call failed, status code {api_ex.status_code}. Response: {api_ex.response_text}",
+ )
+ raise
+
+
+def enable_endpoint_malware_ransomware_detect(cfg: Munch, package_policy_id: str) -> None:
+ """Fetch package policy, set malware/ransomware to detect, and persist via update_package_policy."""
+ item = get_package_policy_by_id(cfg=cfg, policy_id=package_policy_id)
+ if not item:
+ raise ValueError(f"No package policy returned for id {package_policy_id}")
+ body = _package_policy_body_for_put(item)
+ apply_endpoint_malware_ransomware_detect_modes(body)
+ update_package_policy(cfg, package_policy_id, body)
+ time.sleep(5)
diff --git a/tests/fleet_api/entity_store_api.py b/tests/fleet_api/entity_store_api.py
index b88aaeef08..bbede511fe 100644
--- a/tests/fleet_api/entity_store_api.py
+++ b/tests/fleet_api/entity_store_api.py
@@ -2,6 +2,8 @@
This module contains API calls related to Entity Store interactions.
"""
+import time
+
from fleet_api.base_call_api import (
APICallException,
perform_api_call,
@@ -9,6 +11,30 @@
from loguru import logger
from munch import Munch
+# Matches ecp-synthetics-monitors/projects/entity-store/lib/common/kibana-api.ts
+_ENTITY_STORE_V2_INTERNAL_HEADERS = {
+ "Content-Type": "application/json",
+ "kbn-xsrf": "true",
+ "x-elastic-internal-origin": "kibana",
+}
+
+_ENTITY_STORE_V2_SETTING_KEY = "securitySolution:entityStoreEnableV2"
+_ENTITY_STORE_V2_POLL_TIMEOUT_SEC = 60
+_ENTITY_STORE_V2_POLL_INTERVAL_SEC = 5
+
+
+def _entity_store_v2_setting_user_value(cfg: Munch):
+ """Read userValue for the v2 feature flag from GET /internal/kibana/settings."""
+ url = f"{cfg.kibana_url}/internal/kibana/settings"
+ data = perform_api_call(
+ "GET",
+ url,
+ auth=cfg.auth,
+ headers=_ENTITY_STORE_V2_INTERNAL_HEADERS.copy(),
+ params={"params": {"query": _ENTITY_STORE_V2_SETTING_KEY}},
+ )
+ return data.get("settings", {}).get(_ENTITY_STORE_V2_SETTING_KEY, {}).get("userValue")
+
def enable_entity_store(cfg: Munch) -> dict:
"""Enables the entity store in Kibana.
@@ -60,6 +86,111 @@ def entity_store_status(cfg: Munch) -> dict:
raise api_ex
+def entity_store_status_v2(cfg: Munch) -> dict:
+ """Checks the status of Entity Store v2 using the internal API (apiVersion=2)."""
+ url = f"{cfg.kibana_url}/internal/security/entity_store/status"
+ try:
+ response = perform_api_call(
+ method="GET",
+ url=url,
+ auth=cfg.auth,
+ headers=_ENTITY_STORE_V2_INTERNAL_HEADERS.copy(),
+ params={"params": {"apiVersion": "2"}},
+ ok_statuses=(200, 201, 204),
+ )
+ logger.info("Entity Store v2 status retrieved successfully.")
+ return response
+ except APICallException as api_ex:
+ logger.error(
+ "Entity Store v2 status API call failed, status {}. Response: {}",
+ api_ex.status_code,
+ api_ex.response_text,
+ )
+ raise api_ex
+
+
+def enable_entity_store_v2(cfg: Munch) -> None:
+ """Turn on Entity Store v2 via internal settings and poll until active.
+
+ Same sequence as enableEntityStoreV2 in kibana-api.ts (POST then GET until userValue is true).
+ """
+ url = f"{cfg.kibana_url}/internal/kibana/settings"
+ try:
+ perform_api_call(
+ "POST",
+ url,
+ auth=cfg.auth,
+ headers=_ENTITY_STORE_V2_INTERNAL_HEADERS.copy(),
+ params={"json": {"changes": {_ENTITY_STORE_V2_SETTING_KEY: True}}},
+ )
+ logger.info("Entity Store v2 setting posted; waiting until active...")
+ deadline = time.time() + _ENTITY_STORE_V2_POLL_TIMEOUT_SEC
+ while time.time() < deadline:
+ if _entity_store_v2_setting_user_value(cfg) is True:
+ logger.info("Entity Store v2 feature flag is active.")
+ return
+ time.sleep(_ENTITY_STORE_V2_POLL_INTERVAL_SEC)
+ except APICallException as api_ex:
+ logger.error(
+ "enable_entity_store_v2 failed, status {}. Response: {}",
+ api_ex.status_code,
+ api_ex.response_text,
+ )
+ raise
+ raise TimeoutError(
+ f"Entity Store v2 setting not active within {_ENTITY_STORE_V2_POLL_TIMEOUT_SEC}s",
+ )
+
+
+def install_entity_store_v2(cfg: Munch) -> dict:
+ """Install Entity Store v2 (POST /internal/security/entity_store/install?apiVersion=2, empty body).
+
+ Same as installEntityStoreV2 in kibana-api.ts.
+ """
+ url = f"{cfg.kibana_url}/internal/security/entity_store/install"
+ try:
+ result = perform_api_call(
+ "POST",
+ url,
+ auth=cfg.auth,
+ headers=_ENTITY_STORE_V2_INTERNAL_HEADERS.copy(),
+ params={"json": {}, "params": {"apiVersion": "2"}},
+ ok_statuses=(200, 201, 204),
+ )
+ logger.info("Entity Store v2 install completed.")
+ return result
+ except APICallException as api_ex:
+ logger.error(
+ "install_entity_store_v2 failed, status {}. Response: {}",
+ api_ex.status_code,
+ api_ex.response_text,
+ )
+ raise api_ex
+
+
+def init_entity_store_v2_maintainers(cfg: Munch) -> dict:
+ """Initialize Entity Store v2 maintainers (internal API, apiVersion=2 query param)."""
+ url = f"{cfg.kibana_url}/internal/security/entity_store/entity_maintainers/init"
+ try:
+ result = perform_api_call(
+ "POST",
+ url,
+ auth=cfg.auth,
+ headers=_ENTITY_STORE_V2_INTERNAL_HEADERS.copy(),
+ params={"json": {}, "params": {"apiVersion": "2"}},
+ ok_statuses=(200, 201, 204),
+ )
+ logger.info("Entity Store v2 maintainers init completed.")
+ return result
+ except APICallException as api_ex:
+ logger.error(
+ "init_entity_store_v2_maintainers failed, status {}. Response: {}",
+ api_ex.status_code,
+ api_ex.response_text,
+ )
+ raise api_ex
+
+
def is_entity_store_fully_started(cfg: Munch) -> bool:
"""Checks if the entity store is fully started (status is running and all engines are started)."""
status_response = entity_store_status(cfg)
@@ -76,3 +207,25 @@ def is_entity_store_fully_started(cfg: Munch) -> bool:
logger.info(f"Engine {engine.get('type')} is started.")
logger.info("Entity store is fully started.")
return True
+
+
+def is_entity_store_v2_fully_started(cfg: Munch) -> bool:
+ """Checks if Entity Store v2 is fully started (internal v2 status is running and all engines started)."""
+ status_response = entity_store_status_v2(cfg)
+ global_status = status_response.get("status")
+ engines = status_response.get("engines", [])
+ if global_status != "running":
+ logger.info("Entity Store v2 global status is: '{}'", global_status)
+ return False
+ logger.info("====== Entity Store v2 Engines Status ====")
+ for engine in engines:
+ if engine.get("status") != "started":
+ logger.error(
+ "Entity Store v2 engine {} status is not started: {}",
+ engine.get("type"),
+ engine.get("status"),
+ )
+ return False
+ logger.info("Entity Store v2 engine {} is started.", engine.get("type"))
+ logger.info("Entity Store v2 is fully started.")
+ return True
diff --git a/tests/fleet_api/utils.py b/tests/fleet_api/utils.py
index dc7f2f0f15..b3be6f57cd 100644
--- a/tests/fleet_api/utils.py
+++ b/tests/fleet_api/utils.py
@@ -3,6 +3,7 @@
Functions:
- read_json(json_path: Path) -> dict: Read JSON data from a file.
+- write_json(json_path: Path, data: dict, indent: int = 2) -> None: Write a dict to a JSON file (UTF-8).
- save_state(file_path: Path, data: list) -> None: Save data to a JSON file.
- delete_file(file_path: Path): Delete a file.
@@ -47,6 +48,18 @@ def read_json(json_path: Path) -> dict:
sys.exit(1)
+def write_json(json_path: Path, data: dict, indent: int = 2) -> None:
+ """
+ Write a dictionary to a JSON file using UTF-8.
+
+ Args:
+ json_path: Output file path.
+ data: Serializable dict.
+ indent: Pretty-print indentation (default 2).
+ """
+ json_path.write_text(json.dumps(data, indent=indent), encoding="utf-8")
+
+
def delete_file(file_path: Path):
"""
Delete a file.
diff --git a/tests/integrations_setup/cdr_wiz_fleet_helpers.py b/tests/integrations_setup/cdr_wiz_fleet_helpers.py
new file mode 100644
index 0000000000..03c06f3172
--- /dev/null
+++ b/tests/integrations_setup/cdr_wiz_fleet_helpers.py
@@ -0,0 +1,47 @@
+"""Shared helpers for Fleet package policies on the CDR Wiz agent policy."""
+
+import re
+import sys
+from pathlib import Path
+
+import configuration_fleet as cnfg
+from fleet_api.common_api import get_package_version
+from fleet_api.utils import read_json
+from loguru import logger
+
+# GA stacks look like 8.16.0; SNAPSHOT and BC builds add a hyphen suffix (e.g. 9.4.0-SNAPSHOT, 9.4.0-sdfjh).
+_EPM_PRERELEASE_STACK_VERSION = re.compile(r"^\d+\.\d+\.\d+-.+")
+
+
+def stack_version_uses_epm_prerelease(stack_version: str) -> bool:
+ """Whether get_package_version should pass prerelease=True (SNAPSHOT or BC hash suffix)."""
+ v = (stack_version or "").strip()
+ if not v:
+ return False
+ if "SNAPSHOT" in v.upper():
+ return True
+ return bool(_EPM_PRERELEASE_STACK_VERSION.match(v))
+
+
+def cdr_wiz_agent_policy_id(integrations_setup_dir: Path) -> str:
+ """Load agent_policy_id from cdr_wiz_agent_policy.json; exit if missing."""
+ wiz_context = read_json(integrations_setup_dir / "cdr_wiz_agent_policy.json")
+ agent_policy_id = (wiz_context.get("agent_policy_id") or "").strip()
+ if not agent_policy_id:
+ logger.error("cdr_wiz_agent_policy.json has no agent_policy_id")
+ sys.exit(1)
+ return agent_policy_id
+
+
+def fleet_epm_package_version(package_name: str, missing_log_message: str) -> str:
+ """Resolve package version from Fleet / EPM; exit if not found."""
+ prerelease = stack_version_uses_epm_prerelease(cnfg.elk_config.stack_version or "")
+ package_version = get_package_version(
+ cfg=cnfg.elk_config,
+ package_name=package_name,
+ prerelease=prerelease,
+ )
+ if not package_version:
+ logger.error(missing_log_message)
+ sys.exit(1)
+ return package_version
diff --git a/tests/integrations_setup/configuration_fleet.py b/tests/integrations_setup/configuration_fleet.py
index 27b74eadfe..98b9b1a8c0 100644
--- a/tests/integrations_setup/configuration_fleet.py
+++ b/tests/integrations_setup/configuration_fleet.py
@@ -87,3 +87,9 @@
wiz_config.client_secret = os.getenv("WIZ_CLIENT_SECRET", "")
wiz_config.url = os.getenv("WIZ_URL", "")
wiz_config.token_url = os.getenv("WIZ_TOKEN_URL", "")
+
+# Okta integration (CDR, same agent policy as Wiz)
+okta_config = Munch()
+okta_config.url = os.getenv("OKTA_LOGS_URL", "")
+okta_config.api_key = os.getenv("OKTA_API_KEY", "")
+okta_config.entity_analytics_domain = os.getenv("OKTA_ENTITY_ANALYTICS_DOMAIN", "")
diff --git a/tests/integrations_setup/data/agent-policy-elastic-defend.json b/tests/integrations_setup/data/agent-policy-elastic-defend.json
new file mode 100644
index 0000000000..0cf6af145d
--- /dev/null
+++ b/tests/integrations_setup/data/agent-policy-elastic-defend.json
@@ -0,0 +1,5 @@
+{
+ "name": "tf-ap-elastic-defend",
+ "namespace": "default",
+ "monitoring_enabled": ["logs", "metrics"]
+}
diff --git a/tests/integrations_setup/data/elastic-defend-linux.j2 b/tests/integrations_setup/data/elastic-defend-linux.j2
new file mode 100644
index 0000000000..400199cbca
--- /dev/null
+++ b/tests/integrations_setup/data/elastic-defend-linux.j2
@@ -0,0 +1,7 @@
+#!/bin/bash
+set -euo pipefail
+
+curl -L -O {{ artifacts_url }}/elastic-agent-{{ agent_version }}-linux-x86_64.tar.gz
+tar xzvf elastic-agent-{{ agent_version }}-linux-x86_64.tar.gz
+cd elastic-agent-{{ agent_version }}-linux-x86_64
+sudo ./elastic-agent install -f{% if install_servers %} {{ install_servers }}{% endif %} --url={{ fleet_url }} --enrollment-token={{ enrollment_token }}
diff --git a/tests/integrations_setup/data/elastic-defend-windows.j2 b/tests/integrations_setup/data/elastic-defend-windows.j2
new file mode 100644
index 0000000000..50c49f1cda
--- /dev/null
+++ b/tests/integrations_setup/data/elastic-defend-windows.j2
@@ -0,0 +1,7 @@
+$ErrorActionPreference = "Stop"
+$ProgressPreference = "SilentlyContinue"
+Invoke-WebRequest -Uri "{{ artifacts_url }}/elastic-agent-{{ agent_version }}-windows-x86_64.zip" -OutFile "elastic-agent.zip"
+Expand-Archive -Path "elastic-agent.zip" -DestinationPath "." -Force
+Set-Location "elastic-agent-{{ agent_version }}-windows-x86_64"
+$installArgs = @("install", "-f"{% if install_servers %}, "{{ install_servers }}"{% endif %}, "--url={{ fleet_url }}", "--enrollment-token={{ enrollment_token }}")
+& .\elastic-agent.exe @installArgs
diff --git a/tests/integrations_setup/data/entityanalytics_okta-pkg.json b/tests/integrations_setup/data/entityanalytics_okta-pkg.json
new file mode 100644
index 0000000000..286dc8461d
--- /dev/null
+++ b/tests/integrations_setup/data/entityanalytics_okta-pkg.json
@@ -0,0 +1,43 @@
+{
+ "policy_ids": [
+ ""
+ ],
+ "package": {
+ "name": "entityanalytics_okta",
+ "version": ""
+ },
+ "name": "",
+ "description": "",
+ "namespace": "default",
+ "inputs": {
+ "entity-entity-analytics": {
+ "enabled": true,
+ "streams": {
+ "entityanalytics_okta.entity": {
+ "enabled": true,
+ "vars": {
+ "okta_domain": "",
+ "okta_token": "",
+ "okta_scopes": [
+ "okta.users.read",
+ "okta.devices.read"
+ ],
+ "dataset": "all",
+ "enrich_user_roles": true,
+ "sync_interval": "24h",
+ "update_interval": "15m",
+ "http_client_timeout": "30s",
+ "ssl": "",
+ "enable_request_tracer": false,
+ "tags": [
+ "forwarded",
+ "entityanalytics_okta-entity"
+ ],
+ "preserve_original_event": false,
+ "preserve_duplicate_custom_fields": false
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/tests/integrations_setup/data/okta-pkg.json b/tests/integrations_setup/data/okta-pkg.json
new file mode 100644
index 0000000000..ddee05dd74
--- /dev/null
+++ b/tests/integrations_setup/data/okta-pkg.json
@@ -0,0 +1,41 @@
+{
+ "policy_ids": [
+ ""
+ ],
+ "package": {
+ "name": "okta",
+ "version": ""
+ },
+ "name": "",
+ "description": "",
+ "namespace": "default",
+ "inputs": {
+ "okta-httpjson": {
+ "enabled": true,
+ "vars": {
+ "interval": "60s",
+ "initial_interval": "24h",
+ "url": "",
+ "api_key": "",
+ "okta_scopes": [
+ "okta.logs.read"
+ ],
+ "enable_request_tracer": false
+ },
+ "streams": {
+ "okta.system": {
+ "enabled": true,
+ "vars": {
+ "tags": [
+ "forwarded",
+ "okta-system"
+ ],
+ "preserve_original_event": false,
+ "disable_keep_alive": false,
+ "limit": 1000
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/tests/integrations_setup/data/package-policy-elastic-defend.json b/tests/integrations_setup/data/package-policy-elastic-defend.json
new file mode 100644
index 0000000000..7340857e81
--- /dev/null
+++ b/tests/integrations_setup/data/package-policy-elastic-defend.json
@@ -0,0 +1,28 @@
+{
+ "policy_id": "",
+ "name": "pkg-plcy-elastic-defend",
+ "description": "Elastic Defend for CDR test hosts",
+ "namespace": "default",
+ "enabled": true,
+ "package": {
+ "name": "endpoint",
+ "version": "0.0.0"
+ },
+ "inputs": [
+ {
+ "enabled": true,
+ "streams": [],
+ "type": "ENDPOINT_INTEGRATION_CONFIG",
+ "config": {
+ "_config": {
+ "value": {
+ "type": "endpoint",
+ "endpointConfig": {
+ "preset": "EDRComplete"
+ }
+ }
+ }
+ }
+ }
+ ]
+}
diff --git a/tests/integrations_setup/enable_entity_store_v2.py b/tests/integrations_setup/enable_entity_store_v2.py
new file mode 100644
index 0000000000..df55ef7986
--- /dev/null
+++ b/tests/integrations_setup/enable_entity_store_v2.py
@@ -0,0 +1,56 @@
+"""
+Enable Entity Store v2 on Kibana (v2-only).
+
+Uses the same three-step flow as ecp-synthetics-monitors kibana-api.ts:
+internal settings (entityStoreEnableV2), install, then maintainers init;
+then polls public entity store status until running or timeout.
+
+Requires:
+ - configuration_fleet / elk_config with Kibana URL and auth.
+"""
+
+import sys
+import time
+
+import configuration_fleet as config_fleet
+import requests
+from fleet_api.entity_store_api import (
+ enable_entity_store_v2,
+ init_entity_store_v2_maintainers,
+ install_entity_store_v2,
+ is_entity_store_v2_fully_started,
+)
+from loguru import logger
+
+elk_config = config_fleet.elk_config
+ENTITY_STORE_INIT_TIMEOUT = 180 # seconds
+
+if __name__ == "__main__":
+ try:
+ enable_entity_store_v2(cfg=elk_config)
+ install_entity_store_v2(cfg=elk_config)
+ init_entity_store_v2_maintainers(cfg=elk_config)
+
+ start_time = time.time()
+ logger.info("====== Entity Store v2 status poll ====")
+ while time.time() - start_time < ENTITY_STORE_INIT_TIMEOUT:
+ if is_entity_store_v2_fully_started(elk_config):
+ logger.info("Entity store is fully started after v2 install.")
+ break
+ time.sleep(1)
+ else:
+ logger.error(
+ "Entity store did not fully start within {} seconds after v2 install.",
+ ENTITY_STORE_INIT_TIMEOUT,
+ )
+ sys.exit(1)
+
+ except TimeoutError as exc:
+ logger.error("Entity Store v2 setup timed out: {}", exc)
+ sys.exit(1)
+ except requests.RequestException as exc:
+ logger.error("HTTP error while enabling entity store v2: {}", exc)
+ sys.exit(1)
+ except (ValueError, KeyError) as exc:
+ logger.error("Configuration error while enabling entity store v2: {}", exc)
+ sys.exit(1)
diff --git a/tests/integrations_setup/enroll_elastic_defend_winrm.py b/tests/integrations_setup/enroll_elastic_defend_winrm.py
new file mode 100644
index 0000000000..772abfa02d
--- /dev/null
+++ b/tests/integrations_setup/enroll_elastic_defend_winrm.py
@@ -0,0 +1,79 @@
+#!/usr/bin/env python
+"""
+Run elastic-defend-windows.ps1 on a Windows host over WinRM (HTTP, basic auth).
+Credentials JSON path: WINDOWS_DEFEND_CREDENTIALS_FILE (see CDR composite action).
+"""
+
+import json
+import os
+import time
+from pathlib import Path
+from typing import Optional
+
+from loguru import logger
+
+try:
+ import winrm
+except ImportError as exc:
+ logger.error("pywinrm is required: poetry install (see tests/pyproject.toml)")
+ raise SystemExit(1) from exc
+
+
+def _load_creds(path: Path) -> dict:
+ with path.open(encoding="utf-8") as f:
+ return json.load(f)
+
+
+def main() -> None:
+ """Run elastic-defend-windows.ps1 on the Windows host via WinRM until success or retries exhausted."""
+ cred_path = os.getenv("WINDOWS_DEFEND_CREDENTIALS_FILE", "").strip()
+ ps1_path = os.getenv("ELASTIC_DEFEND_WINDOWS_PS1", "").strip()
+ if not cred_path or not ps1_path:
+ logger.error("WINDOWS_DEFEND_CREDENTIALS_FILE and ELASTIC_DEFEND_WINDOWS_PS1 must be set")
+ raise SystemExit(1)
+
+ creds = _load_creds(Path(cred_path))
+ host = creds["public_ip"]
+ port = int(creds.get("winrm_port", 5985))
+ use_ssl = bool(creds.get("winrm_use_ssl", False))
+ username = creds.get("username", "Administrator")
+ password = creds.get("password", "")
+ if not password:
+ logger.error("Password missing in credentials file")
+ raise SystemExit(1)
+
+ script = Path(ps1_path).read_text(encoding="utf-8")
+
+ max_attempts = int(os.getenv("ELASTIC_DEFEND_WINRM_RETRIES", "36"))
+ delay_s = int(os.getenv("ELASTIC_DEFEND_WINRM_RETRY_DELAY", "10"))
+
+ transport = "ssl" if use_ssl else "plaintext"
+ target = f"http://{host}:{port}/wsman" if not use_ssl else f"https://{host}:{port}/wsman"
+ session = winrm.Session(
+ target,
+ auth=(username, password),
+ transport=transport,
+ server_cert_validation="ignore",
+ )
+
+ last_err: Optional[Exception] = None
+ for attempt in range(1, max_attempts + 1):
+ try:
+ logger.info(f"WinRM exec attempt {attempt}/{max_attempts} on {target}")
+ result = session.run_ps(script)
+ if result.status_code != 0:
+ logger.error(result.std_err.decode("utf-8", errors="replace"))
+ raise RuntimeError(f"PowerShell exited with status {result.status_code}")
+ logger.info(result.std_out.decode("utf-8", errors="replace"))
+ return
+ except Exception as exc: # pylint: disable=broad-exception-caught
+ last_err = exc
+ logger.warning(f"WinRM attempt failed: {exc}")
+ time.sleep(delay_s)
+
+ logger.error(f"WinRM enrollment failed after {max_attempts} attempts: {last_err}")
+ raise SystemExit(1) from last_err
+
+
+if __name__ == "__main__":
+ main()
diff --git a/tests/integrations_setup/install_elastic_defend_integration.py b/tests/integrations_setup/install_elastic_defend_integration.py
new file mode 100644
index 0000000000..9979cf7265
--- /dev/null
+++ b/tests/integrations_setup/install_elastic_defend_integration.py
@@ -0,0 +1,162 @@
+#!/usr/bin/env python
+"""
+Fleet setup for Elastic Defend (endpoint) on CDR: one agent policy, endpoint integration,
+malware/ransomware detect modes, and Linux/Windows install artifacts.
+"""
+
+import json
+import os
+from pathlib import Path
+from typing import Dict, Tuple
+
+import configuration_fleet as cnfg
+from cdr_wiz_fleet_helpers import stack_version_uses_epm_prerelease
+from fleet_api.agent_policy_api import create_agent_policy
+from fleet_api.common_api import (
+ get_artifact_server,
+ get_enrollment_token,
+ get_fleet_server_host,
+ get_package_version,
+ update_package_version,
+)
+from fleet_api.endpoint_package_policy import enable_endpoint_malware_ransomware_detect
+from fleet_api.package_policy_api import create_integration
+from fleet_api.utils import get_install_servers_option, read_json, render_template
+from loguru import logger
+from munch import Munch
+from state_file_manager import HostType, PolicyState, state_manager
+
+AGENT_POLICY_JSON = "data/agent-policy-elastic-defend.json"
+PACKAGE_POLICY_JSON = "data/package-policy-elastic-defend.json"
+LINUX_TEMPLATE = "data/elastic-defend-linux.j2"
+WINDOWS_TEMPLATE = "data/elastic-defend-windows.j2"
+INTEGRATION_LABEL = "ELASTIC_DEFEND_CDR"
+
+
+def _truthy_env(name: str) -> bool:
+ return os.getenv(name, "false").lower() in ("1", "true", "yes")
+
+
+def _expected_enrolled_agents() -> int:
+ n = 0
+ if _truthy_env("ELASTIC_DEFEND_ENROLL_LINUX"):
+ n += 1
+ if _truthy_env("ELASTIC_DEFEND_ENROLL_WINDOWS"):
+ n += 1
+ return n
+
+
+def _agent_version() -> str:
+ v = (cnfg.elk_config.agent_version or "").strip()
+ if v:
+ return v
+ return (cnfg.elk_config.stack_version or "").strip()
+
+
+def load_policies() -> Tuple[Dict, Dict]:
+ """Load agent and package policy JSON from this package's data directory."""
+ policies_dir = Path(__file__).parent
+ agent_policy = read_json(policies_dir / AGENT_POLICY_JSON)
+ package_policy = read_json(policies_dir / PACKAGE_POLICY_JSON)
+ return agent_policy, package_policy
+
+
+def _write_hosts_metadata(
+ agt_policy_id: str,
+ pkg_policy_id: str,
+) -> None:
+ meta = {
+ "agent_policy_id": agt_policy_id,
+ "package_policy_id": pkg_policy_id,
+ "integration": INTEGRATION_LABEL,
+ "elastic_defend_linux_public_ip": os.getenv("ELASTIC_DEFEND_LINUX_PUBLIC_IP", ""),
+ "elastic_defend_windows_public_ip": os.getenv("ELASTIC_DEFEND_WINDOWS_PUBLIC_IP", ""),
+ "elastic_defend_windows_instance_id": os.getenv("ELASTIC_DEFEND_WINDOWS_INSTANCE_ID", ""),
+ }
+ out = Path(__file__).parent / "elastic_defend_hosts.json"
+ out.write_text(json.dumps(meta, indent=2), encoding="utf-8")
+ logger.info(f"Wrote {out}")
+
+
+def main() -> None:
+ """Create Fleet policies and write Elastic Defend install artifacts for CDR."""
+ prerelease = stack_version_uses_epm_prerelease(cnfg.elk_config.stack_version or "")
+ package_version = get_package_version(
+ cfg=cnfg.elk_config,
+ package_name="endpoint",
+ prerelease=prerelease,
+ )
+ if not package_version:
+ logger.error("Could not resolve endpoint package version from Fleet")
+ raise SystemExit(1)
+ logger.info(f"Endpoint package version: {package_version}")
+
+ update_package_version(
+ cfg=cnfg.elk_config,
+ package_name="endpoint",
+ package_version=package_version,
+ )
+
+ agent_data, package_data = load_policies()
+ package_data["package"]["version"] = package_version
+
+ logger.info("Create Elastic Defend agent policy")
+ agent_policy_id = create_agent_policy(cfg=cnfg.elk_config, json_policy=agent_data)
+
+ logger.info("Create Elastic Defend integration")
+ package_policy_id = create_integration(
+ cfg=cnfg.elk_config,
+ pkg_policy=package_data,
+ agent_policy_id=agent_policy_id,
+ data={},
+ )
+
+ logger.info("Set endpoint malware/ransomware modes to detect")
+ enable_endpoint_malware_ransomware_detect(cfg=cnfg.elk_config, package_policy_id=package_policy_id)
+
+ enroll_expected = _expected_enrolled_agents()
+ if enroll_expected > 0:
+ state_manager.add_policy(
+ PolicyState(
+ agent_policy_id,
+ package_policy_id,
+ enroll_expected,
+ [],
+ HostType.LINUX_TAR.value,
+ agent_data["name"],
+ ),
+ )
+ else:
+ logger.info("Skipping state_manager entry (no enroll steps; expected agents = 0)")
+
+ _write_hosts_metadata(agent_policy_id, package_policy_id)
+
+ enrollment_token = get_enrollment_token(cfg=cnfg.elk_config, policy_id=agent_policy_id)
+ fleet_url = get_fleet_server_host(cfg=cnfg.elk_config)
+ agent_version = _agent_version()
+ artifacts_url = get_artifact_server(agent_version)
+ install_servers = get_install_servers_option(agent_version)
+
+ manifest_params = Munch(
+ enrollment_token=enrollment_token,
+ fleet_url=fleet_url,
+ agent_version=agent_version,
+ artifacts_url=artifacts_url,
+ )
+ if install_servers:
+ manifest_params.install_servers = install_servers
+
+ base = Path(__file__).parent
+ linux_rendered = render_template(base / LINUX_TEMPLATE, manifest_params.toDict())
+ (base / "elastic-defend-linux.sh").write_text(linux_rendered, encoding="utf-8")
+ logger.info("Wrote elastic-defend-linux.sh")
+
+ win_rendered = render_template(base / WINDOWS_TEMPLATE, manifest_params.toDict())
+ (base / "elastic-defend-windows.ps1").write_text(win_rendered, encoding="utf-8")
+ logger.info("Wrote elastic-defend-windows.ps1")
+
+ logger.info("Elastic Defend Fleet setup finished")
+
+
+if __name__ == "__main__":
+ main()
diff --git a/tests/integrations_setup/install_entityanalytics_okta_integration.py b/tests/integrations_setup/install_entityanalytics_okta_integration.py
new file mode 100644
index 0000000000..f01af98d8b
--- /dev/null
+++ b/tests/integrations_setup/install_entityanalytics_okta_integration.py
@@ -0,0 +1,67 @@
+#!/usr/bin/env python
+"""
+Fleet-only entityanalytics_okta package policy on the CDR Wiz agent policy (same EC2 host).
+Requires install_wiz_integration.py to have run first (cdr_wiz_agent_policy.json).
+Reuses OKTA_API_KEY as okta_token; OKTA_ENTITY_ANALYTICS_DOMAIN is the Okta org hostname only.
+"""
+import sys
+from pathlib import Path
+
+import configuration_fleet as cnfg
+from cdr_wiz_fleet_helpers import cdr_wiz_agent_policy_id, fleet_epm_package_version
+from fleet_api.package_policy_api import create_integration
+from fleet_api.utils import read_json, update_key_value
+from loguru import logger
+from package_policy import generate_random_name
+
+
+def _skip_entityanalytics_okta() -> bool:
+ """True when OKTA_ENTITY_ANALYTICS_DOMAIN is unset or left at the workflow default."""
+ domain = (cnfg.okta_config.entity_analytics_domain or "").strip()
+ return domain in ("", "default")
+
+
+def main() -> None:
+ """Create entityanalytics_okta package policy on the Wiz CDR agent policy, or skip if not configured."""
+ if _skip_entityanalytics_okta():
+ logger.info(
+ "OKTA_ENTITY_ANALYTICS_DOMAIN unset or default; skipping Okta Entity Analytics Fleet integration",
+ )
+ return
+
+ api_key = (cnfg.okta_config.api_key or "").strip()
+ if not api_key or api_key == "default":
+ logger.error("OKTA_API_KEY is required when OKTA_ENTITY_ANALYTICS_DOMAIN is set")
+ sys.exit(1)
+
+ base = Path(__file__).parent
+ agent_policy_id = cdr_wiz_agent_policy_id(base)
+ package_version = fleet_epm_package_version(
+ "entityanalytics_okta",
+ "Could not resolve entityanalytics_okta package version from Fleet",
+ )
+ logger.info(f"entityanalytics_okta package version: {package_version}")
+
+ package_data = read_json(base / "data/entityanalytics_okta-pkg.json")
+ package_data["name"] = generate_random_name("pkg-entityanalytics-okta-cdr")
+ package_data["package"]["version"] = package_version
+
+ entity_input = package_data["inputs"]["entity-entity-analytics"]
+ for key, value in (
+ ("okta_domain", cnfg.okta_config.entity_analytics_domain.strip()),
+ ("okta_token", cnfg.okta_config.api_key),
+ ):
+ update_key_value(data=entity_input, search_key=key, value_to_apply=value)
+
+ logger.info("Create Okta Entity Analytics integration on Wiz agent policy")
+ create_integration(
+ cfg=cnfg.elk_config,
+ pkg_policy=package_data,
+ agent_policy_id=agent_policy_id,
+ data={},
+ )
+ logger.info("Okta Entity Analytics Fleet integration finished")
+
+
+if __name__ == "__main__":
+ main()
diff --git a/tests/integrations_setup/install_okta_integration.py b/tests/integrations_setup/install_okta_integration.py
new file mode 100644
index 0000000000..4e2773f103
--- /dev/null
+++ b/tests/integrations_setup/install_okta_integration.py
@@ -0,0 +1,64 @@
+#!/usr/bin/env python
+"""
+Fleet-only Okta package policy on the CDR Wiz agent policy (same EC2 host).
+Requires install_wiz_integration.py to have run first (cdr_wiz_agent_policy.json).
+"""
+import sys
+from pathlib import Path
+
+import configuration_fleet as cnfg
+from cdr_wiz_fleet_helpers import cdr_wiz_agent_policy_id, fleet_epm_package_version
+from fleet_api.package_policy_api import create_integration
+from fleet_api.utils import read_json, update_key_value
+from loguru import logger
+from package_policy import generate_random_name
+
+
+def _skip_okta() -> bool:
+ """True when OKTA_LOGS_URL is unset or left at the workflow default."""
+ url = (cnfg.okta_config.url or "").strip()
+ return url in ("", "default")
+
+
+def main() -> None:
+ """Create Okta package policy on the Wiz CDR agent policy, or skip/no-op if not configured."""
+ if _skip_okta():
+ logger.info("OKTA_LOGS_URL unset or default; skipping Okta Fleet integration")
+ return
+
+ api_key = (cnfg.okta_config.api_key or "").strip()
+ if not api_key or api_key == "default":
+ logger.error("OKTA_API_KEY is required when OKTA_LOGS_URL is set")
+ sys.exit(1)
+
+ base = Path(__file__).parent
+ agent_policy_id = cdr_wiz_agent_policy_id(base)
+ package_version = fleet_epm_package_version(
+ "okta",
+ "Could not resolve okta package version from Fleet",
+ )
+ logger.info(f"Okta package version: {package_version}")
+
+ package_data = read_json(base / "data/okta-pkg.json")
+ package_data["name"] = generate_random_name("pkg-okta-cdr")
+ package_data["package"]["version"] = package_version
+
+ for key, value in (("url", cnfg.okta_config.url), ("api_key", cnfg.okta_config.api_key)):
+ update_key_value(
+ data=package_data["inputs"]["okta-httpjson"],
+ search_key=key,
+ value_to_apply=value,
+ )
+
+ logger.info("Create Okta integration on Wiz agent policy")
+ create_integration(
+ cfg=cnfg.elk_config,
+ pkg_policy=package_data,
+ agent_policy_id=agent_policy_id,
+ data={},
+ )
+ logger.info("Okta Fleet integration finished")
+
+
+if __name__ == "__main__":
+ main()
diff --git a/tests/integrations_setup/install_wiz_integration.py b/tests/integrations_setup/install_wiz_integration.py
index a1ed3e75b9..94302b3267 100755
--- a/tests/integrations_setup/install_wiz_integration.py
+++ b/tests/integrations_setup/install_wiz_integration.py
@@ -23,6 +23,7 @@
read_json,
render_template,
update_key_value,
+ write_json,
)
from loguru import logger
from munch import Munch
@@ -64,6 +65,10 @@
logger.info("Create agent policy")
agent_policy_id = create_agent_policy(cfg=cnfg.elk_config, json_policy=agent_data)
+ wiz_context_path = Path(__file__).parent / "cdr_wiz_agent_policy.json"
+ write_json(wiz_context_path, {"agent_policy_id": agent_policy_id})
+ logger.info(f"Wrote {wiz_context_path} for shared CDR integrations (e.g. Okta)")
+
logger.info(f"Create {INTEGRATION_NAME} integration")
package_policy_id = create_integration(
cfg=cnfg.elk_config,
diff --git a/tests/poetry.lock b/tests/poetry.lock
index d3d7bec589..92b2e85674 100644
--- a/tests/poetry.lock
+++ b/tests/poetry.lock
@@ -1,4 +1,4 @@
-# This file is automatically @generated by Poetry 2.1.1 and should not be changed by hand.
+# This file is automatically @generated by Poetry 2.1.2 and should not be changed by hand.
[[package]]
name = "allure-pytest"
@@ -96,6 +96,104 @@ files = [
{file = "certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9"},
]
+[[package]]
+name = "cffi"
+version = "2.0.0"
+description = "Foreign Function Interface for Python calling C code."
+optional = false
+python-versions = ">=3.9"
+groups = ["main"]
+markers = "platform_python_implementation != \"PyPy\""
+files = [
+ {file = "cffi-2.0.0-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:0cf2d91ecc3fcc0625c2c530fe004f82c110405f101548512cce44322fa8ac44"},
+ {file = "cffi-2.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f73b96c41e3b2adedc34a7356e64c8eb96e03a3782b535e043a986276ce12a49"},
+ {file = "cffi-2.0.0-cp310-cp310-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:53f77cbe57044e88bbd5ed26ac1d0514d2acf0591dd6bb02a3ae37f76811b80c"},
+ {file = "cffi-2.0.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:3e837e369566884707ddaf85fc1744b47575005c0a229de3327f8f9a20f4efeb"},
+ {file = "cffi-2.0.0-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:5eda85d6d1879e692d546a078b44251cdd08dd1cfb98dfb77b670c97cee49ea0"},
+ {file = "cffi-2.0.0-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:9332088d75dc3241c702d852d4671613136d90fa6881da7d770a483fd05248b4"},
+ {file = "cffi-2.0.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fc7de24befaeae77ba923797c7c87834c73648a05a4bde34b3b7e5588973a453"},
+ {file = "cffi-2.0.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:cf364028c016c03078a23b503f02058f1814320a56ad535686f90565636a9495"},
+ {file = "cffi-2.0.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e11e82b744887154b182fd3e7e8512418446501191994dbf9c9fc1f32cc8efd5"},
+ {file = "cffi-2.0.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:8ea985900c5c95ce9db1745f7933eeef5d314f0565b27625d9a10ec9881e1bfb"},
+ {file = "cffi-2.0.0-cp310-cp310-win32.whl", hash = "sha256:1f72fb8906754ac8a2cc3f9f5aaa298070652a0ffae577e0ea9bd480dc3c931a"},
+ {file = "cffi-2.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:b18a3ed7d5b3bd8d9ef7a8cb226502c6bf8308df1525e1cc676c3680e7176739"},
+ {file = "cffi-2.0.0-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:b4c854ef3adc177950a8dfc81a86f5115d2abd545751a304c5bcf2c2c7283cfe"},
+ {file = "cffi-2.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2de9a304e27f7596cd03d16f1b7c72219bd944e99cc52b84d0145aefb07cbd3c"},
+ {file = "cffi-2.0.0-cp311-cp311-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:baf5215e0ab74c16e2dd324e8ec067ef59e41125d3eade2b863d294fd5035c92"},
+ {file = "cffi-2.0.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:730cacb21e1bdff3ce90babf007d0a0917cc3e6492f336c2f0134101e0944f93"},
+ {file = "cffi-2.0.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:6824f87845e3396029f3820c206e459ccc91760e8fa24422f8b0c3d1731cbec5"},
+ {file = "cffi-2.0.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:9de40a7b0323d889cf8d23d1ef214f565ab154443c42737dfe52ff82cf857664"},
+ {file = "cffi-2.0.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:8941aaadaf67246224cee8c3803777eed332a19d909b47e29c9842ef1e79ac26"},
+ {file = "cffi-2.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a05d0c237b3349096d3981b727493e22147f934b20f6f125a3eba8f994bec4a9"},
+ {file = "cffi-2.0.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:94698a9c5f91f9d138526b48fe26a199609544591f859c870d477351dc7b2414"},
+ {file = "cffi-2.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:5fed36fccc0612a53f1d4d9a816b50a36702c28a2aa880cb8a122b3466638743"},
+ {file = "cffi-2.0.0-cp311-cp311-win32.whl", hash = "sha256:c649e3a33450ec82378822b3dad03cc228b8f5963c0c12fc3b1e0ab940f768a5"},
+ {file = "cffi-2.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:66f011380d0e49ed280c789fbd08ff0d40968ee7b665575489afa95c98196ab5"},
+ {file = "cffi-2.0.0-cp311-cp311-win_arm64.whl", hash = "sha256:c6638687455baf640e37344fe26d37c404db8b80d037c3d29f58fe8d1c3b194d"},
+ {file = "cffi-2.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6d02d6655b0e54f54c4ef0b94eb6be0607b70853c45ce98bd278dc7de718be5d"},
+ {file = "cffi-2.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8eca2a813c1cb7ad4fb74d368c2ffbbb4789d377ee5bb8df98373c2cc0dee76c"},
+ {file = "cffi-2.0.0-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:21d1152871b019407d8ac3985f6775c079416c282e431a4da6afe7aefd2bccbe"},
+ {file = "cffi-2.0.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b21e08af67b8a103c71a250401c78d5e0893beff75e28c53c98f4de42f774062"},
+ {file = "cffi-2.0.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:1e3a615586f05fc4065a8b22b8152f0c1b00cdbc60596d187c2a74f9e3036e4e"},
+ {file = "cffi-2.0.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:81afed14892743bbe14dacb9e36d9e0e504cd204e0b165062c488942b9718037"},
+ {file = "cffi-2.0.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3e17ed538242334bf70832644a32a7aae3d83b57567f9fd60a26257e992b79ba"},
+ {file = "cffi-2.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3925dd22fa2b7699ed2617149842d2e6adde22b262fcbfada50e3d195e4b3a94"},
+ {file = "cffi-2.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2c8f814d84194c9ea681642fd164267891702542f028a15fc97d4674b6206187"},
+ {file = "cffi-2.0.0-cp312-cp312-win32.whl", hash = "sha256:da902562c3e9c550df360bfa53c035b2f241fed6d9aef119048073680ace4a18"},
+ {file = "cffi-2.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:da68248800ad6320861f129cd9c1bf96ca849a2771a59e0344e88681905916f5"},
+ {file = "cffi-2.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:4671d9dd5ec934cb9a73e7ee9676f9362aba54f7f34910956b84d727b0d73fb6"},
+ {file = "cffi-2.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:00bdf7acc5f795150faa6957054fbbca2439db2f775ce831222b66f192f03beb"},
+ {file = "cffi-2.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:45d5e886156860dc35862657e1494b9bae8dfa63bf56796f2fb56e1679fc0bca"},
+ {file = "cffi-2.0.0-cp313-cp313-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:07b271772c100085dd28b74fa0cd81c8fb1a3ba18b21e03d7c27f3436a10606b"},
+ {file = "cffi-2.0.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d48a880098c96020b02d5a1f7d9251308510ce8858940e6fa99ece33f610838b"},
+ {file = "cffi-2.0.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:f93fd8e5c8c0a4aa1f424d6173f14a892044054871c771f8566e4008eaa359d2"},
+ {file = "cffi-2.0.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:dd4f05f54a52fb558f1ba9f528228066954fee3ebe629fc1660d874d040ae5a3"},
+ {file = "cffi-2.0.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c8d3b5532fc71b7a77c09192b4a5a200ea992702734a2e9279a37f2478236f26"},
+ {file = "cffi-2.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:d9b29c1f0ae438d5ee9acb31cadee00a58c46cc9c0b2f9038c6b0b3470877a8c"},
+ {file = "cffi-2.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6d50360be4546678fc1b79ffe7a66265e28667840010348dd69a314145807a1b"},
+ {file = "cffi-2.0.0-cp313-cp313-win32.whl", hash = "sha256:74a03b9698e198d47562765773b4a8309919089150a0bb17d829ad7b44b60d27"},
+ {file = "cffi-2.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:19f705ada2530c1167abacb171925dd886168931e0a7b78f5bffcae5c6b5be75"},
+ {file = "cffi-2.0.0-cp313-cp313-win_arm64.whl", hash = "sha256:256f80b80ca3853f90c21b23ee78cd008713787b1b1e93eae9f3d6a7134abd91"},
+ {file = "cffi-2.0.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:fc33c5141b55ed366cfaad382df24fe7dcbc686de5be719b207bb248e3053dc5"},
+ {file = "cffi-2.0.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c654de545946e0db659b3400168c9ad31b5d29593291482c43e3564effbcee13"},
+ {file = "cffi-2.0.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:24b6f81f1983e6df8db3adc38562c83f7d4a0c36162885ec7f7b77c7dcbec97b"},
+ {file = "cffi-2.0.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:12873ca6cb9b0f0d3a0da705d6086fe911591737a59f28b7936bdfed27c0d47c"},
+ {file = "cffi-2.0.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:d9b97165e8aed9272a6bb17c01e3cc5871a594a446ebedc996e2397a1c1ea8ef"},
+ {file = "cffi-2.0.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:afb8db5439b81cf9c9d0c80404b60c3cc9c3add93e114dcae767f1477cb53775"},
+ {file = "cffi-2.0.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:737fe7d37e1a1bffe70bd5754ea763a62a066dc5913ca57e957824b72a85e205"},
+ {file = "cffi-2.0.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:38100abb9d1b1435bc4cc340bb4489635dc2f0da7456590877030c9b3d40b0c1"},
+ {file = "cffi-2.0.0-cp314-cp314-win32.whl", hash = "sha256:087067fa8953339c723661eda6b54bc98c5625757ea62e95eb4898ad5e776e9f"},
+ {file = "cffi-2.0.0-cp314-cp314-win_amd64.whl", hash = "sha256:203a48d1fb583fc7d78a4c6655692963b860a417c0528492a6bc21f1aaefab25"},
+ {file = "cffi-2.0.0-cp314-cp314-win_arm64.whl", hash = "sha256:dbd5c7a25a7cb98f5ca55d258b103a2054f859a46ae11aaf23134f9cc0d356ad"},
+ {file = "cffi-2.0.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:9a67fc9e8eb39039280526379fb3a70023d77caec1852002b4da7e8b270c4dd9"},
+ {file = "cffi-2.0.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:7a66c7204d8869299919db4d5069a82f1561581af12b11b3c9f48c584eb8743d"},
+ {file = "cffi-2.0.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7cc09976e8b56f8cebd752f7113ad07752461f48a58cbba644139015ac24954c"},
+ {file = "cffi-2.0.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:92b68146a71df78564e4ef48af17551a5ddd142e5190cdf2c5624d0c3ff5b2e8"},
+ {file = "cffi-2.0.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:b1e74d11748e7e98e2f426ab176d4ed720a64412b6a15054378afdb71e0f37dc"},
+ {file = "cffi-2.0.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:28a3a209b96630bca57cce802da70c266eb08c6e97e5afd61a75611ee6c64592"},
+ {file = "cffi-2.0.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:7553fb2090d71822f02c629afe6042c299edf91ba1bf94951165613553984512"},
+ {file = "cffi-2.0.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:6c6c373cfc5c83a975506110d17457138c8c63016b563cc9ed6e056a82f13ce4"},
+ {file = "cffi-2.0.0-cp314-cp314t-win32.whl", hash = "sha256:1fc9ea04857caf665289b7a75923f2c6ed559b8298a1b8c49e59f7dd95c8481e"},
+ {file = "cffi-2.0.0-cp314-cp314t-win_amd64.whl", hash = "sha256:d68b6cef7827e8641e8ef16f4494edda8b36104d79773a334beaa1e3521430f6"},
+ {file = "cffi-2.0.0-cp314-cp314t-win_arm64.whl", hash = "sha256:0a1527a803f0a659de1af2e1fd700213caba79377e27e4693648c2923da066f9"},
+ {file = "cffi-2.0.0-cp39-cp39-macosx_10_13_x86_64.whl", hash = "sha256:fe562eb1a64e67dd297ccc4f5addea2501664954f2692b69a76449ec7913ecbf"},
+ {file = "cffi-2.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:de8dad4425a6ca6e4e5e297b27b5c824ecc7581910bf9aee86cb6835e6812aa7"},
+ {file = "cffi-2.0.0-cp39-cp39-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:4647afc2f90d1ddd33441e5b0e85b16b12ddec4fca55f0d9671fef036ecca27c"},
+ {file = "cffi-2.0.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:3f4d46d8b35698056ec29bca21546e1551a205058ae1a181d871e278b0b28165"},
+ {file = "cffi-2.0.0-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:e6e73b9e02893c764e7e8d5bb5ce277f1a009cd5243f8228f75f842bf937c534"},
+ {file = "cffi-2.0.0-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:cb527a79772e5ef98fb1d700678fe031e353e765d1ca2d409c92263c6d43e09f"},
+ {file = "cffi-2.0.0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:61d028e90346df14fedc3d1e5441df818d095f3b87d286825dfcbd6459b7ef63"},
+ {file = "cffi-2.0.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:0f6084a0ea23d05d20c3edcda20c3d006f9b6f3fefeac38f59262e10cef47ee2"},
+ {file = "cffi-2.0.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:1cd13c99ce269b3ed80b417dcd591415d3372bcac067009b6e0f59c7d4015e65"},
+ {file = "cffi-2.0.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:89472c9762729b5ae1ad974b777416bfda4ac5642423fa93bd57a09204712322"},
+ {file = "cffi-2.0.0-cp39-cp39-win32.whl", hash = "sha256:2081580ebb843f759b9f617314a24ed5738c51d2aee65d31e02f6f7a2b97707a"},
+ {file = "cffi-2.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:b882b3df248017dba09d6b16defe9b5c407fe32fc7c65a9c69798e6175601be9"},
+ {file = "cffi-2.0.0.tar.gz", hash = "sha256:44d1b5909021139fe36001ae048dbdde8214afa20200eda0f64c068cac5d5529"},
+]
+
+[package.dependencies]
+pycparser = {version = "*", markers = "implementation_name != \"PyPy\""}
+
[[package]]
name = "charset-normalizer"
version = "3.3.2"
@@ -209,6 +307,191 @@ files = [
{file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"},
]
+[[package]]
+name = "cryptography"
+version = "43.0.3"
+description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers."
+optional = false
+python-versions = ">=3.7"
+groups = ["main"]
+markers = "python_version == \"3.9\""
+files = [
+ {file = "cryptography-43.0.3-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:bf7a1932ac4176486eab36a19ed4c0492da5d97123f1406cf15e41b05e787d2e"},
+ {file = "cryptography-43.0.3-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63efa177ff54aec6e1c0aefaa1a241232dcd37413835a9b674b6e3f0ae2bfd3e"},
+ {file = "cryptography-43.0.3-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e1ce50266f4f70bf41a2c6dc4358afadae90e2a1e5342d3c08883df1675374f"},
+ {file = "cryptography-43.0.3-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:443c4a81bb10daed9a8f334365fe52542771f25aedaf889fd323a853ce7377d6"},
+ {file = "cryptography-43.0.3-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:74f57f24754fe349223792466a709f8e0c093205ff0dca557af51072ff47ab18"},
+ {file = "cryptography-43.0.3-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:9762ea51a8fc2a88b70cf2995e5675b38d93bf36bd67d91721c309df184f49bd"},
+ {file = "cryptography-43.0.3-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:81ef806b1fef6b06dcebad789f988d3b37ccaee225695cf3e07648eee0fc6b73"},
+ {file = "cryptography-43.0.3-cp37-abi3-win32.whl", hash = "sha256:cbeb489927bd7af4aa98d4b261af9a5bc025bd87f0e3547e11584be9e9427be2"},
+ {file = "cryptography-43.0.3-cp37-abi3-win_amd64.whl", hash = "sha256:f46304d6f0c6ab8e52770addfa2fc41e6629495548862279641972b6215451cd"},
+ {file = "cryptography-43.0.3-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:8ac43ae87929a5982f5948ceda07001ee5e83227fd69cf55b109144938d96984"},
+ {file = "cryptography-43.0.3-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:846da004a5804145a5f441b8530b4bf35afbf7da70f82409f151695b127213d5"},
+ {file = "cryptography-43.0.3-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f996e7268af62598f2fc1204afa98a3b5712313a55c4c9d434aef49cadc91d4"},
+ {file = "cryptography-43.0.3-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:f7b178f11ed3664fd0e995a47ed2b5ff0a12d893e41dd0494f406d1cf555cab7"},
+ {file = "cryptography-43.0.3-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:c2e6fc39c4ab499049df3bdf567f768a723a5e8464816e8f009f121a5a9f4405"},
+ {file = "cryptography-43.0.3-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:e1be4655c7ef6e1bbe6b5d0403526601323420bcf414598955968c9ef3eb7d16"},
+ {file = "cryptography-43.0.3-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:df6b6c6d742395dd77a23ea3728ab62f98379eff8fb61be2744d4679ab678f73"},
+ {file = "cryptography-43.0.3-cp39-abi3-win32.whl", hash = "sha256:d56e96520b1020449bbace2b78b603442e7e378a9b3bd68de65c782db1507995"},
+ {file = "cryptography-43.0.3-cp39-abi3-win_amd64.whl", hash = "sha256:0c580952eef9bf68c4747774cde7ec1d85a6e61de97281f2dba83c7d2c806362"},
+ {file = "cryptography-43.0.3-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d03b5621a135bffecad2c73e9f4deb1a0f977b9a8ffe6f8e002bf6c9d07b918c"},
+ {file = "cryptography-43.0.3-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:a2a431ee15799d6db9fe80c82b055bae5a752bef645bba795e8e52687c69efe3"},
+ {file = "cryptography-43.0.3-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:281c945d0e28c92ca5e5930664c1cefd85efe80e5c0d2bc58dd63383fda29f83"},
+ {file = "cryptography-43.0.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:f18c716be16bc1fea8e95def49edf46b82fccaa88587a45f8dc0ff6ab5d8e0a7"},
+ {file = "cryptography-43.0.3-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:4a02ded6cd4f0a5562a8887df8b3bd14e822a90f97ac5e544c162899bc467664"},
+ {file = "cryptography-43.0.3-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:53a583b6637ab4c4e3591a15bc9db855b8d9dee9a669b550f311480acab6eb08"},
+ {file = "cryptography-43.0.3-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:1ec0bcf7e17c0c5669d881b1cd38c4972fade441b27bda1051665faaa89bdcaa"},
+ {file = "cryptography-43.0.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2ce6fae5bdad59577b44e4dfed356944fbf1d925269114c28be377692643b4ff"},
+ {file = "cryptography-43.0.3.tar.gz", hash = "sha256:315b9001266a492a6ff443b61238f956b214dbec9910a081ba5b6646a055a805"},
+]
+
+[package.dependencies]
+cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""}
+
+[package.extras]
+docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"]
+docstest = ["pyenchant (>=1.6.11)", "readme-renderer", "sphinxcontrib-spelling (>=4.0.1)"]
+nox = ["nox"]
+pep8test = ["check-sdist", "click", "mypy", "ruff"]
+sdist = ["build"]
+ssh = ["bcrypt (>=3.1.5)"]
+test = ["certifi", "cryptography-vectors (==43.0.3)", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"]
+test-randomorder = ["pytest-randomly"]
+
+[[package]]
+name = "cryptography"
+version = "45.0.7"
+description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers."
+optional = false
+python-versions = "!=3.9.0,!=3.9.1,>=3.7"
+groups = ["main"]
+markers = "python_version == \"3.10\""
+files = [
+ {file = "cryptography-45.0.7-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:3be4f21c6245930688bd9e162829480de027f8bf962ede33d4f8ba7d67a00cee"},
+ {file = "cryptography-45.0.7-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:67285f8a611b0ebc0857ced2081e30302909f571a46bfa7a3cc0ad303fe015c6"},
+ {file = "cryptography-45.0.7-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:577470e39e60a6cd7780793202e63536026d9b8641de011ed9d8174da9ca5339"},
+ {file = "cryptography-45.0.7-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:4bd3e5c4b9682bc112d634f2c6ccc6736ed3635fc3319ac2bb11d768cc5a00d8"},
+ {file = "cryptography-45.0.7-cp311-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:465ccac9d70115cd4de7186e60cfe989de73f7bb23e8a7aa45af18f7412e75bf"},
+ {file = "cryptography-45.0.7-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:16ede8a4f7929b4b7ff3642eba2bf79aa1d71f24ab6ee443935c0d269b6bc513"},
+ {file = "cryptography-45.0.7-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:8978132287a9d3ad6b54fcd1e08548033cc09dc6aacacb6c004c73c3eb5d3ac3"},
+ {file = "cryptography-45.0.7-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:b6a0e535baec27b528cb07a119f321ac024592388c5681a5ced167ae98e9fff3"},
+ {file = "cryptography-45.0.7-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:a24ee598d10befaec178efdff6054bc4d7e883f615bfbcd08126a0f4931c83a6"},
+ {file = "cryptography-45.0.7-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:fa26fa54c0a9384c27fcdc905a2fb7d60ac6e47d14bc2692145f2b3b1e2cfdbd"},
+ {file = "cryptography-45.0.7-cp311-abi3-win32.whl", hash = "sha256:bef32a5e327bd8e5af915d3416ffefdbe65ed975b646b3805be81b23580b57b8"},
+ {file = "cryptography-45.0.7-cp311-abi3-win_amd64.whl", hash = "sha256:3808e6b2e5f0b46d981c24d79648e5c25c35e59902ea4391a0dcb3e667bf7443"},
+ {file = "cryptography-45.0.7-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:bfb4c801f65dd61cedfc61a83732327fafbac55a47282e6f26f073ca7a41c3b2"},
+ {file = "cryptography-45.0.7-cp37-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:81823935e2f8d476707e85a78a405953a03ef7b7b4f55f93f7c2d9680e5e0691"},
+ {file = "cryptography-45.0.7-cp37-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3994c809c17fc570c2af12c9b840d7cea85a9fd3e5c0e0491f4fa3c029216d59"},
+ {file = "cryptography-45.0.7-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:dad43797959a74103cb59c5dac71409f9c27d34c8a05921341fb64ea8ccb1dd4"},
+ {file = "cryptography-45.0.7-cp37-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:ce7a453385e4c4693985b4a4a3533e041558851eae061a58a5405363b098fcd3"},
+ {file = "cryptography-45.0.7-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:b04f85ac3a90c227b6e5890acb0edbaf3140938dbecf07bff618bf3638578cf1"},
+ {file = "cryptography-45.0.7-cp37-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:48c41a44ef8b8c2e80ca4527ee81daa4c527df3ecbc9423c41a420a9559d0e27"},
+ {file = "cryptography-45.0.7-cp37-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:f3df7b3d0f91b88b2106031fd995802a2e9ae13e02c36c1fc075b43f420f3a17"},
+ {file = "cryptography-45.0.7-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:dd342f085542f6eb894ca00ef70236ea46070c8a13824c6bde0dfdcd36065b9b"},
+ {file = "cryptography-45.0.7-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:1993a1bb7e4eccfb922b6cd414f072e08ff5816702a0bdb8941c247a6b1b287c"},
+ {file = "cryptography-45.0.7-cp37-abi3-win32.whl", hash = "sha256:18fcf70f243fe07252dcb1b268a687f2358025ce32f9f88028ca5c364b123ef5"},
+ {file = "cryptography-45.0.7-cp37-abi3-win_amd64.whl", hash = "sha256:7285a89df4900ed3bfaad5679b1e668cb4b38a8de1ccbfc84b05f34512da0a90"},
+ {file = "cryptography-45.0.7-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:de58755d723e86175756f463f2f0bddd45cc36fbd62601228a3f8761c9f58252"},
+ {file = "cryptography-45.0.7-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:a20e442e917889d1a6b3c570c9e3fa2fdc398c20868abcea268ea33c024c4083"},
+ {file = "cryptography-45.0.7-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:258e0dff86d1d891169b5af222d362468a9570e2532923088658aa866eb11130"},
+ {file = "cryptography-45.0.7-pp310-pypy310_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:d97cf502abe2ab9eff8bd5e4aca274da8d06dd3ef08b759a8d6143f4ad65d4b4"},
+ {file = "cryptography-45.0.7-pp310-pypy310_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:c987dad82e8c65ebc985f5dae5e74a3beda9d0a2a4daf8a1115f3772b59e5141"},
+ {file = "cryptography-45.0.7-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:c13b1e3afd29a5b3b2656257f14669ca8fa8d7956d509926f0b130b600b50ab7"},
+ {file = "cryptography-45.0.7-pp311-pypy311_pp73-macosx_10_9_x86_64.whl", hash = "sha256:4a862753b36620af6fc54209264f92c716367f2f0ff4624952276a6bbd18cbde"},
+ {file = "cryptography-45.0.7-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:06ce84dc14df0bf6ea84666f958e6080cdb6fe1231be2a51f3fc1267d9f3fb34"},
+ {file = "cryptography-45.0.7-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:d0c5c6bac22b177bf8da7435d9d27a6834ee130309749d162b26c3105c0795a9"},
+ {file = "cryptography-45.0.7-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:2f641b64acc00811da98df63df7d59fd4706c0df449da71cb7ac39a0732b40ae"},
+ {file = "cryptography-45.0.7-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:f5414a788ecc6ee6bc58560e85ca624258a55ca434884445440a810796ea0e0b"},
+ {file = "cryptography-45.0.7-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:1f3d56f73595376f4244646dd5c5870c14c196949807be39e79e7bd9bac3da63"},
+ {file = "cryptography-45.0.7.tar.gz", hash = "sha256:4b1654dfc64ea479c242508eb8c724044f1e964a47d1d1cacc5132292d851971"},
+]
+
+[package.dependencies]
+cffi = {version = ">=1.14", markers = "platform_python_implementation != \"PyPy\""}
+
+[package.extras]
+docs = ["sphinx (>=5.3.0)", "sphinx-inline-tabs ; python_full_version >= \"3.8.0\"", "sphinx-rtd-theme (>=3.0.0) ; python_full_version >= \"3.8.0\""]
+docstest = ["pyenchant (>=3)", "readme-renderer (>=30.0)", "sphinxcontrib-spelling (>=7.3.1)"]
+nox = ["nox (>=2024.4.15)", "nox[uv] (>=2024.3.2) ; python_full_version >= \"3.8.0\""]
+pep8test = ["check-sdist ; python_full_version >= \"3.8.0\"", "click (>=8.0.1)", "mypy (>=1.4)", "ruff (>=0.3.6)"]
+sdist = ["build (>=1.0.0)"]
+ssh = ["bcrypt (>=3.1.5)"]
+test = ["certifi (>=2024)", "cryptography-vectors (==45.0.7)", "pretend (>=0.7)", "pytest (>=7.4.0)", "pytest-benchmark (>=4.0)", "pytest-cov (>=2.10.1)", "pytest-xdist (>=3.5.0)"]
+test-randomorder = ["pytest-randomly"]
+
+[[package]]
+name = "cryptography"
+version = "46.0.6"
+description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers."
+optional = false
+python-versions = "!=3.9.0,!=3.9.1,>=3.8"
+groups = ["main"]
+markers = "python_version >= \"3.11\""
+files = [
+ {file = "cryptography-46.0.6-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:64235194bad039a10bb6d2d930ab3323baaec67e2ce36215fd0952fad0930ca8"},
+ {file = "cryptography-46.0.6-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:26031f1e5ca62fcb9d1fcb34b2b60b390d1aacaa15dc8b895a9ed00968b97b30"},
+ {file = "cryptography-46.0.6-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:9a693028b9cbe51b5a1136232ee8f2bc242e4e19d456ded3fa7c86e43c713b4a"},
+ {file = "cryptography-46.0.6-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:67177e8a9f421aa2d3a170c3e56eca4e0128883cf52a071a7cbf53297f18b175"},
+ {file = "cryptography-46.0.6-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:d9528b535a6c4f8ff37847144b8986a9a143585f0540fbcb1a98115b543aa463"},
+ {file = "cryptography-46.0.6-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:22259338084d6ae497a19bae5d4c66b7ca1387d3264d1c2c0e72d9e9b6a77b97"},
+ {file = "cryptography-46.0.6-cp311-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:760997a4b950ff00d418398ad73fbc91aa2894b5c1db7ccb45b4f68b42a63b3c"},
+ {file = "cryptography-46.0.6-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:3dfa6567f2e9e4c5dceb8ccb5a708158a2a871052fa75c8b78cb0977063f1507"},
+ {file = "cryptography-46.0.6-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:cdcd3edcbc5d55757e5f5f3d330dd00007ae463a7e7aa5bf132d1f22a4b62b19"},
+ {file = "cryptography-46.0.6-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:d4e4aadb7fc1f88687f47ca20bb7227981b03afaae69287029da08096853b738"},
+ {file = "cryptography-46.0.6-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:2b417edbe8877cda9022dde3a008e2deb50be9c407eef034aeeb3a8b11d9db3c"},
+ {file = "cryptography-46.0.6-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:380343e0653b1c9d7e1f55b52aaa2dbb2fdf2730088d48c43ca1c7c0abb7cc2f"},
+ {file = "cryptography-46.0.6-cp311-abi3-win32.whl", hash = "sha256:bcb87663e1f7b075e48c3be3ecb5f0b46c8fc50b50a97cf264e7f60242dca3f2"},
+ {file = "cryptography-46.0.6-cp311-abi3-win_amd64.whl", hash = "sha256:6739d56300662c468fddb0e5e291f9b4d084bead381667b9e654c7dd81705124"},
+ {file = "cryptography-46.0.6-cp314-cp314t-macosx_10_9_universal2.whl", hash = "sha256:2ef9e69886cbb137c2aef9772c2e7138dc581fad4fcbcf13cc181eb5a3ab6275"},
+ {file = "cryptography-46.0.6-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7f417f034f91dcec1cb6c5c35b07cdbb2ef262557f701b4ecd803ee8cefed4f4"},
+ {file = "cryptography-46.0.6-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d24c13369e856b94892a89ddf70b332e0b70ad4a5c43cf3e9cb71d6d7ffa1f7b"},
+ {file = "cryptography-46.0.6-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:aad75154a7ac9039936d50cf431719a2f8d4ed3d3c277ac03f3339ded1a5e707"},
+ {file = "cryptography-46.0.6-cp314-cp314t-manylinux_2_28_ppc64le.whl", hash = "sha256:3c21d92ed15e9cfc6eb64c1f5a0326db22ca9c2566ca46d845119b45b4400361"},
+ {file = "cryptography-46.0.6-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:4668298aef7cddeaf5c6ecc244c2302a2b8e40f384255505c22875eebb47888b"},
+ {file = "cryptography-46.0.6-cp314-cp314t-manylinux_2_31_armv7l.whl", hash = "sha256:8ce35b77aaf02f3b59c90b2c8a05c73bac12cea5b4e8f3fbece1f5fddea5f0ca"},
+ {file = "cryptography-46.0.6-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:c89eb37fae9216985d8734c1afd172ba4927f5a05cfd9bf0e4863c6d5465b013"},
+ {file = "cryptography-46.0.6-cp314-cp314t-manylinux_2_34_ppc64le.whl", hash = "sha256:ed418c37d095aeddf5336898a132fba01091f0ac5844e3e8018506f014b6d2c4"},
+ {file = "cryptography-46.0.6-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:69cf0056d6947edc6e6760e5f17afe4bea06b56a9ac8a06de9d2bd6b532d4f3a"},
+ {file = "cryptography-46.0.6-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:8e7304c4f4e9490e11efe56af6713983460ee0780f16c63f219984dab3af9d2d"},
+ {file = "cryptography-46.0.6-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:b928a3ca837c77a10e81a814a693f2295200adb3352395fad024559b7be7a736"},
+ {file = "cryptography-46.0.6-cp314-cp314t-win32.whl", hash = "sha256:97c8115b27e19e592a05c45d0dd89c57f81f841cc9880e353e0d3bf25b2139ed"},
+ {file = "cryptography-46.0.6-cp314-cp314t-win_amd64.whl", hash = "sha256:c797e2517cb7880f8297e2c0f43bb910e91381339336f75d2c1c2cbf811b70b4"},
+ {file = "cryptography-46.0.6-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:12cae594e9473bca1a7aceb90536060643128bb274fcea0fc459ab90f7d1ae7a"},
+ {file = "cryptography-46.0.6-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:639301950939d844a9e1c4464d7e07f902fe9a7f6b215bb0d4f28584729935d8"},
+ {file = "cryptography-46.0.6-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:ed3775295fb91f70b4027aeba878d79b3e55c0b3e97eaa4de71f8f23a9f2eb77"},
+ {file = "cryptography-46.0.6-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:8927ccfbe967c7df312ade694f987e7e9e22b2425976ddbf28271d7e58845290"},
+ {file = "cryptography-46.0.6-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:b12c6b1e1651e42ab5de8b1e00dc3b6354fdfd778e7fa60541ddacc27cd21410"},
+ {file = "cryptography-46.0.6-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:063b67749f338ca9c5a0b7fe438a52c25f9526b851e24e6c9310e7195aad3b4d"},
+ {file = "cryptography-46.0.6-cp38-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:02fad249cb0e090b574e30b276a3da6a149e04ee2f049725b1f69e7b8351ec70"},
+ {file = "cryptography-46.0.6-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:7e6142674f2a9291463e5e150090b95a8519b2fb6e6aaec8917dd8d094ce750d"},
+ {file = "cryptography-46.0.6-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:456b3215172aeefb9284550b162801d62f5f264a081049a3e94307fe20792cfa"},
+ {file = "cryptography-46.0.6-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:341359d6c9e68834e204ceaf25936dffeafea3829ab80e9503860dcc4f4dac58"},
+ {file = "cryptography-46.0.6-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:9a9c42a2723999a710445bc0d974e345c32adfd8d2fac6d8a251fa829ad31cfb"},
+ {file = "cryptography-46.0.6-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:6617f67b1606dfd9fe4dbfa354a9508d4a6d37afe30306fe6c101b7ce3274b72"},
+ {file = "cryptography-46.0.6-cp38-abi3-win32.whl", hash = "sha256:7f6690b6c55e9c5332c0b59b9c8a3fb232ebf059094c17f9019a51e9827df91c"},
+ {file = "cryptography-46.0.6-cp38-abi3-win_amd64.whl", hash = "sha256:79e865c642cfc5c0b3eb12af83c35c5aeff4fa5c672dc28c43721c2c9fdd2f0f"},
+ {file = "cryptography-46.0.6-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:2ea0f37e9a9cf0df2952893ad145fd9627d326a59daec9b0802480fa3bcd2ead"},
+ {file = "cryptography-46.0.6-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:a3e84d5ec9ba01f8fd03802b2147ba77f0c8f2617b2aff254cedd551844209c8"},
+ {file = "cryptography-46.0.6-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:12f0fa16cc247b13c43d56d7b35287ff1569b5b1f4c5e87e92cc4fcc00cd10c0"},
+ {file = "cryptography-46.0.6-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:50575a76e2951fe7dbd1f56d181f8c5ceeeb075e9ff88e7ad997d2f42af06e7b"},
+ {file = "cryptography-46.0.6-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:90e5f0a7b3be5f40c3a0a0eafb32c681d8d2c181fc2a1bdabe9b3f611d9f6b1a"},
+ {file = "cryptography-46.0.6-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:6728c49e3b2c180ef26f8e9f0a883a2c585638db64cf265b49c9ba10652d430e"},
+ {file = "cryptography-46.0.6.tar.gz", hash = "sha256:27550628a518c5c6c903d84f637fbecf287f6cb9ced3804838a1295dc1fd0759"},
+]
+
+[package.dependencies]
+cffi = {version = ">=2.0.0", markers = "python_full_version >= \"3.9.0\" and platform_python_implementation != \"PyPy\""}
+
+[package.extras]
+docs = ["sphinx (>=5.3.0)", "sphinx-inline-tabs", "sphinx-rtd-theme (>=3.0.0)"]
+docstest = ["pyenchant (>=3)", "readme-renderer (>=30.0)", "sphinxcontrib-spelling (>=7.3.1)"]
+nox = ["nox[uv] (>=2024.4.15)"]
+pep8test = ["check-sdist", "click (>=8.0.1)", "mypy (>=1.14)", "ruff (>=0.11.11)"]
+sdist = ["build (>=1.0.0)"]
+ssh = ["bcrypt (>=3.1.5)"]
+test = ["certifi (>=2024)", "cryptography-vectors (==46.0.6)", "pretend (>=0.7)", "pytest (>=7.4.0)", "pytest-benchmark (>=4.0)", "pytest-cov (>=2.10.1)", "pytest-xdist (>=3.5.0)"]
+test-randomorder = ["pytest-randomly"]
+
[[package]]
name = "dill"
version = "0.3.8"
@@ -298,7 +581,7 @@ description = "Backport of PEP 654 (exception groups)"
optional = false
python-versions = ">=3.7"
groups = ["main"]
-markers = "python_version <= \"3.10\""
+markers = "python_version < \"3.11\""
files = [
{file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"},
{file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"},
@@ -698,6 +981,32 @@ files = [
[package.dependencies]
pyasn1 = ">=0.4.6,<0.7.0"
+[[package]]
+name = "pycparser"
+version = "2.23"
+description = "C parser in Python"
+optional = false
+python-versions = ">=3.8"
+groups = ["main"]
+markers = "platform_python_implementation != \"PyPy\" and implementation_name != \"PyPy\" and python_version == \"3.9\""
+files = [
+ {file = "pycparser-2.23-py3-none-any.whl", hash = "sha256:e5c6e8d3fbad53479cab09ac03729e0a9faf2bee3db8208a550daf5af81a5934"},
+ {file = "pycparser-2.23.tar.gz", hash = "sha256:78816d4f24add8f10a06d6f05b4d424ad9e96cfebf68a4ddc99c65c0720d00c2"},
+]
+
+[[package]]
+name = "pycparser"
+version = "3.0"
+description = "C parser in Python"
+optional = false
+python-versions = ">=3.10"
+groups = ["main"]
+markers = "platform_python_implementation != \"PyPy\" and implementation_name != \"PyPy\" and python_version >= \"3.10\""
+files = [
+ {file = "pycparser-3.0-py3-none-any.whl", hash = "sha256:b727414169a36b7d524c1c3e31839a521725078d7b2ff038656844266160a992"},
+ {file = "pycparser-3.0.tar.gz", hash = "sha256:600f49d217304a5902ac3c37e1281c9fe94e4d0489de643a9504c5cdfdfc6b29"},
+]
+
[[package]]
name = "pylint"
version = "2.17.7"
@@ -728,6 +1037,26 @@ typing-extensions = {version = ">=3.10.0", markers = "python_version < \"3.10\""
spelling = ["pyenchant (>=3.2,<4.0)"]
testutils = ["gitpython (>3)"]
+[[package]]
+name = "pyspnego"
+version = "0.12.1"
+description = "Windows Negotiate Authentication Client and Server"
+optional = false
+python-versions = ">=3.9"
+groups = ["main"]
+files = [
+ {file = "pyspnego-0.12.1-py3-none-any.whl", hash = "sha256:7237cb47985ccf5da512106ddb2731e4f9cefec00991f76c054488eb95fb1a2d"},
+ {file = "pyspnego-0.12.1.tar.gz", hash = "sha256:ff4fb6df38202a012ea2a0f43091ae9680878443f0ea61c9ea0e2e8152a4b810"},
+]
+
+[package.dependencies]
+cryptography = "*"
+sspilib = {version = ">=0.3.0", markers = "sys_platform == \"win32\""}
+
+[package.extras]
+kerberos = ["gssapi (>=1.6.0) ; sys_platform != \"win32\"", "krb5 (>=0.3.0) ; sys_platform != \"win32\""]
+yaml = ["ruamel.yaml"]
+
[[package]]
name = "pytest"
version = "7.4.4"
@@ -861,6 +1190,27 @@ files = [
{file = "pywin32-306-cp39-cp39-win_amd64.whl", hash = "sha256:39b61c15272833b5c329a2989999dcae836b1eed650252ab1b7bfbe1d59f30f4"},
]
+[[package]]
+name = "pywinrm"
+version = "0.5.0"
+description = "Python library for Windows Remote Management"
+optional = false
+python-versions = ">=3.8"
+groups = ["main"]
+files = [
+ {file = "pywinrm-0.5.0-py3-none-any.whl", hash = "sha256:c267046d281de613fc7c8a528cdd261564d9b99bdb7c2926221eff3263b700c8"},
+ {file = "pywinrm-0.5.0.tar.gz", hash = "sha256:5428eb1e494af7954546cd4ff15c9ef1a30a75e05b25a39fd606cef22201e9f1"},
+]
+
+[package.dependencies]
+requests = ">=2.9.1"
+requests-ntlm = ">=1.1.0"
+xmltodict = "*"
+
+[package.extras]
+credssp = ["requests-credssp (>=1.0.0)"]
+kerberos = ["pykerberos (>=1.2.1,<2.0.0) ; sys_platform != \"win32\"", "winkerberos (>=0.5.0) ; sys_platform == \"win32\""]
+
[[package]]
name = "pyyaml"
version = "6.0.2"
@@ -946,6 +1296,23 @@ urllib3 = ">=1.21.1,<3"
socks = ["PySocks (>=1.5.6,!=1.5.7)"]
use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"]
+[[package]]
+name = "requests-ntlm"
+version = "1.3.0"
+description = "This package allows for HTTP NTLM authentication using the requests library."
+optional = false
+python-versions = ">=3.8"
+groups = ["main"]
+files = [
+ {file = "requests_ntlm-1.3.0-py3-none-any.whl", hash = "sha256:4c7534a7d0e482bb0928531d621be4b2c74ace437e88c5a357ceb7452d25a510"},
+ {file = "requests_ntlm-1.3.0.tar.gz", hash = "sha256:b29cc2462623dffdf9b88c43e180ccb735b4007228a542220e882c58ae56c668"},
+]
+
+[package.dependencies]
+cryptography = ">=1.3"
+pyspnego = ">=0.4.0"
+requests = ">=2.0.0"
+
[[package]]
name = "requests-oauthlib"
version = "2.0.0"
@@ -987,7 +1354,7 @@ description = "Pure-Python RSA implementation"
optional = false
python-versions = ">=3.6,<4"
groups = ["main"]
-markers = "python_version <= \"3.10\""
+markers = "python_version < \"3.11\""
files = [
{file = "rsa-4.9-py3-none-any.whl", hash = "sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7"},
{file = "rsa-4.9.tar.gz", hash = "sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21"},
@@ -1109,6 +1476,46 @@ files = [
{file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"},
]
+[[package]]
+name = "sspilib"
+version = "0.5.0"
+description = "SSPI API bindings for Python"
+optional = false
+python-versions = ">=3.9"
+groups = ["main"]
+markers = "sys_platform == \"win32\""
+files = [
+ {file = "sspilib-0.5.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:9c64965de12f3eea242fd57045e546721929f62da65a3e5d629bee39314394d1"},
+ {file = "sspilib-0.5.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:bab2f828bfb8750091f17a71cc753402ecc24544331570edeaa04b97e0f97193"},
+ {file = "sspilib-0.5.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:05f13740f16ffd14ef2d383fc2f9d376a414052263bc7595929019195dfc001a"},
+ {file = "sspilib-0.5.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:07efa91478c3e6de328fe65226172b8e4cf08976be859d4dbf03655be6b6c4ab"},
+ {file = "sspilib-0.5.0-cp310-cp310-win32.whl", hash = "sha256:8dab68e994d24a08f854d36ac96409b3b8cc03fdebc590925f76f9d733c3a902"},
+ {file = "sspilib-0.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:9e1947df07110ee1861009fc117bd089a7710403f3f5c488fb52a6e00b7c5b84"},
+ {file = "sspilib-0.5.0-cp310-cp310-win_arm64.whl", hash = "sha256:28d0eb944f7ff70bc99fe729d06fa230aec1649c5bc216809e359cd0c77d4840"},
+ {file = "sspilib-0.5.0-cp311-abi3-macosx_10_12_x86_64.whl", hash = "sha256:eaba0331997368ffbdedff5e95f4fec18b19c809637e8848a4c673130fb4dd9e"},
+ {file = "sspilib-0.5.0-cp311-abi3-macosx_11_0_arm64.whl", hash = "sha256:3dbb80bfe0a17f272c68e9d86a0375d11f98157abe998272a540e635694c1540"},
+ {file = "sspilib-0.5.0-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:4266eda17b81f50e71a3f75cdddf424d76046de68d8014d289a895ec008df4e4"},
+ {file = "sspilib-0.5.0-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:57ea0ce644339bb71ee3eb226b22a121d011b0aea5e2f32d078d88f4a269359b"},
+ {file = "sspilib-0.5.0-cp311-abi3-win32.whl", hash = "sha256:fcb57b41b3200ef2e6e8846e2a13799d20b35b796267f2f75cc65e3883e8eeb6"},
+ {file = "sspilib-0.5.0-cp311-abi3-win_amd64.whl", hash = "sha256:ca2a21a4e90db563c2cec639c66b3a29ea53129a0c55ff1e4154a02937f6bd45"},
+ {file = "sspilib-0.5.0-cp311-abi3-win_arm64.whl", hash = "sha256:6893bad16f122fc3c4bd908461b9728694465c05ca97c22f7e2094791c4ee3cb"},
+ {file = "sspilib-0.5.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:10018d475022643d11b1aeef08e674d8a3f8b03a597ad31fa8c8c302a58ee960"},
+ {file = "sspilib-0.5.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:2124dece22406b71294311bcef7e0e3fae88e85fee77039a96771f6972c98377"},
+ {file = "sspilib-0.5.0-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:963ca7d7158b19b46fcfd8c3bb5f94696ff6e9cf7b01911586717105f11336b0"},
+ {file = "sspilib-0.5.0-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:13d9fbe9a2e0df6405cac39a2a5a93f0f04c67b8e8d5e4c6cd27f8f76a16ce9c"},
+ {file = "sspilib-0.5.0-cp314-cp314t-win32.whl", hash = "sha256:9dad272abf3f4cf0bf95d495075d2987f6ba1fb300f8d603661ccac07d11272f"},
+ {file = "sspilib-0.5.0-cp314-cp314t-win_amd64.whl", hash = "sha256:7d7724d5dbb31f68e62465863dfb862fe2793281ce40d0c8f2dc60c8f07998f2"},
+ {file = "sspilib-0.5.0-cp314-cp314t-win_arm64.whl", hash = "sha256:8ce23ec740dee025136370ed4ae64b7d1535368321049ef960012a57c93ebe15"},
+ {file = "sspilib-0.5.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:cbf2c8a907fb51f0522a1360a7d9dc8b08fd0a6edf978ce6f3b96a0d95df6a60"},
+ {file = "sspilib-0.5.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a9f40f896beecaa33530a55201c5c78f840ffc42aaeb9b0a41caaf0626508282"},
+ {file = "sspilib-0.5.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:2b1d24be70dfe01733161c05862f6ab653d2f6d2e5bd52138f9dae9c91c23f2e"},
+ {file = "sspilib-0.5.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:0f1b75967135d065139b94095c5b5312eb9e3316e6f6a9b317310f6b16ea47ba"},
+ {file = "sspilib-0.5.0-cp39-cp39-win32.whl", hash = "sha256:ef3cbac859b2dfc50c9ee6e822bd0f65538196fe5c863f11a53ac13e6d08641c"},
+ {file = "sspilib-0.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:a72f6c64e16e9f9ac7a0652579590de54b1c35d1304382d9a1195be1e1a3be6c"},
+ {file = "sspilib-0.5.0-cp39-cp39-win_arm64.whl", hash = "sha256:f7a81176e0b59e68259c22f712ce3c411975b897dd32649f22a3aad41e621a21"},
+ {file = "sspilib-0.5.0.tar.gz", hash = "sha256:b62f7f2602aa1add0505eee2417e2df24421224cb411e53bf3ae42a71b62fe98"},
+]
+
[[package]]
name = "tomli"
version = "2.0.1"
@@ -1116,7 +1523,7 @@ description = "A lil' TOML parser"
optional = false
python-versions = ">=3.7"
groups = ["main"]
-markers = "python_version <= \"3.10\""
+markers = "python_version < \"3.11\""
files = [
{file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"},
{file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"},
@@ -1141,7 +1548,7 @@ description = "Backported and Experimental Type Hints for Python 3.8+"
optional = false
python-versions = ">=3.8"
groups = ["main"]
-markers = "python_version <= \"3.10\""
+markers = "python_version < \"3.11\""
files = [
{file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"},
{file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"},
@@ -1278,7 +1685,22 @@ files = [
{file = "wrapt-1.16.0.tar.gz", hash = "sha256:5f370f952971e7d17c7d1ead40e49f32345a7f7a5373571ef44d800d06b1899d"},
]
+[[package]]
+name = "xmltodict"
+version = "1.0.4"
+description = "Makes working with XML feel like you are working with JSON"
+optional = false
+python-versions = ">=3.9"
+groups = ["main"]
+files = [
+ {file = "xmltodict-1.0.4-py3-none-any.whl", hash = "sha256:a4a00d300b0e1c59fc2bfccb53d7b2e88c32f200df138a0dd2229f842497026a"},
+ {file = "xmltodict-1.0.4.tar.gz", hash = "sha256:6d94c9f834dd9e44514162799d344d815a3a4faec913717a9ecbfa5be1bb8e61"},
+]
+
+[package.extras]
+test = ["pytest", "pytest-cov"]
+
[metadata]
lock-version = "2.1"
python-versions = ">=3.9"
-content-hash = "3871359db6797870614461a732ec6aed89c4b5c5276adabb2ddb6bf952ec204e"
+content-hash = "3652536ffea4c8944feaf0ea0f300aed68c9da842335bce0a3a1161756c6e343"
diff --git a/tests/pyproject.toml b/tests/pyproject.toml
index 74f5847a59..7bd9166bd9 100644
--- a/tests/pyproject.toml
+++ b/tests/pyproject.toml
@@ -21,6 +21,7 @@ dependencies = [
"requests >= 2.32.2",
"ruamel-yaml >= 0.18.5",
"jinja2 >= 3.1.5",
+ "pywinrm (>=0.5.0,<0.6.0)",
]
[tool.poetry]