diff --git a/.circleci/epoch b/.circleci/epoch index dc9b6ff568c7..1b0be9cf0e99 100644 --- a/.circleci/epoch +++ b/.circleci/epoch @@ -1,2 +1,2 @@ # increment this to force a full build -26 +28 diff --git a/.circleci/main/commands/executions/run-build.yml b/.circleci/main/commands/executions/run-build.yml index 3a0bf0471c17..d7c91c2dd18e 100644 --- a/.circleci/main/commands/executions/run-build.yml +++ b/.circleci/main/commands/executions/run-build.yml @@ -47,7 +47,7 @@ commands: ;; esac echo "export OPENNMS_VERSION=\"$OPENNMS_VERSION\"" >> $BASH_ENV - ./compile.pl -DskipTests=true -Dbuild.skip.tarball=false \ + ./compile.pl -DskipTests=true -DskipITs=true -Dbuild.skip.tarball=false \ -Daether.connector.resumeDownloads=false \ -Daether.connector.basic.threads=1 \ -Dorg.slf4j.simpleLogger.log.org.apache.maven.cli.transfer.Slf4jMavenTransferListener=warn \ diff --git a/.circleci/main/commands/executions/run-integration-tests.yml b/.circleci/main/commands/executions/run-integration-tests.yml index 2ce093e17efe..59560eb966be 100644 --- a/.circleci/main/commands/executions/run-integration-tests.yml +++ b/.circleci/main/commands/executions/run-integration-tests.yml @@ -57,12 +57,31 @@ commands: (date '+%T' || :) > ~/test-results/system-logs/docker_stats 2>&1 (docker stats --all --no-stream || :) >> ~/test-results/system-logs/docker_stats 2>&1 - (docker ps ---all || :) >> ~/test-results/system-logs/docker_stats 2>&1 + (docker ps --all || :) >> ~/test-results/system-logs/docker_stats 2>&1 for CONTAINER in `docker ps --all --quiet`; do ((docker logs --timestamps "$CONTAINER" 2>&1 | tail -n 20 ) || :) > ~/test-results/system-logs/"docker-${CONTAINER}.log" 2>&1 done + - run: + name: Collect flaky test evidence + when: always + command: | + mkdir -p ~/test-results/flaky-evidence + if [ -d /tmp/flaky-evidence ] && [ "$(find /tmp/flaky-evidence -name '*.xml' | wc -l)" -gt 0 ]; then + ( + cd /tmp/flaky-evidence && + find . -name '*.xml' -exec cp --parents {} ~/test-results/flaky-evidence/ \; + ) + echo "#### Flaky tests detected (passed only after retry):" + find /tmp/flaky-evidence -name '*.xml' | sed 's|.*/TEST-||;s|\.xml||' | sort -u + else + echo "No flaky tests detected." + fi - store_test_results: path: ~/test-results + - store_artifacts: + when: always + path: ~/test-results/flaky-evidence + destination: flaky-evidence - run: name: Copy artifacts on failure when: on_fail @@ -72,5 +91,6 @@ commands: cp -R ~/build-results ~/failure-artifacts/ || true cp -R ~/generated-tests ~/failure-artifacts/ || true - store_artifacts: + when: on_fail path: ~/failure-artifacts destination: failure-artifacts diff --git a/.circleci/main/commands/executions/run-smoke-tests.yml b/.circleci/main/commands/executions/run-smoke-tests.yml index e9451e9fb6bc..02350e00373b 100644 --- a/.circleci/main/commands/executions/run-smoke-tests.yml +++ b/.circleci/main/commands/executions/run-smoke-tests.yml @@ -8,6 +8,10 @@ commands: rerun-failtest-count: default: 0 type: integer + allow-failures: + description: "Exit 0 regardless of test outcome (use for quarantined suites)" + default: false + type: boolean steps: - run: name: Enable swap @@ -20,13 +24,17 @@ commands: cat /proc/sys/vm/swappiness - load-oci: match: amd64 - - restore-maven-cache - run: name: Smoke Tests - no_output_timeout: 30m + no_output_timeout: 60m command: | export CCI_RERUN_FAILTEST=<< parameters.rerun-failtest-count >> + <<# parameters.allow-failures >> + .circleci/scripts/smoke.sh << parameters.suite >> || true + <> + <<^ parameters.allow-failures >> .circleci/scripts/smoke.sh << parameters.suite >> + <> - run: name: Gather system logs when: always @@ -38,7 +46,7 @@ commands: (date '+%T' || :) > ~/test-results/system-logs/docker_stats 2>&1 (docker stats --all --no-stream || :) >> ~/test-results/system-logs/docker_stats 2>&1 - (docker ps ---all || :) >> ~/test-results/system-logs/docker_stats 2>&1 + (docker ps --all || :) >> ~/test-results/system-logs/docker_stats 2>&1 for CONTAINER in `docker ps --all --quiet`; do ((docker logs --timestamps "$CONTAINER" 2>&1 | tail -n 20 ) || :) > ~/test-results/system-logs/"docker-${CONTAINER}.log" 2>&1 done @@ -55,8 +63,29 @@ commands: cp -R ~/project/smoke-test/target/*.{flv,mp4} ~/test-artifacts/recordings || true cp -R ~/project/smoke-test/target/screenshots ~/test-artifacts/ || true cp -R ~/project/smoke-test/target/logs ~/test-artifacts/ || true + - run: + name: Collect flaky test evidence + when: always + command: | + mkdir -p ~/test-results/flaky-evidence + if [ -d /tmp/flaky-evidence ] && [ "$(find /tmp/flaky-evidence -name '*.xml' | wc -l)" -gt 0 ]; then + ( + cd /tmp/flaky-evidence || exit 1 + find . -name '*.xml' -type f | while read -r xml; do + xml="${xml#./}" + mkdir -p ~/test-results/flaky-evidence/"$(dirname "$xml")" + cp "$xml" ~/test-results/flaky-evidence/"$xml" + done + ) + echo "#### Flaky tests detected (passed only after retry):" + find /tmp/flaky-evidence -name '*.xml' | sed 's|.*/TEST-||;s|\.xml||' | sort -u + fi - store_test_results: path: ~/test-results + - store_artifacts: + when: always + path: ~/test-results/flaky-evidence + destination: flaky-evidence - run: name: Copy artifacts on failure when: on_fail @@ -65,5 +94,6 @@ commands: cp -R ~/test-results ~/failure-artifacts/ || true cp -R ~/test-artifacts ~/failure-artifacts/ || true - store_artifacts: + when: on_fail path: ~/failure-artifacts destination: failure-artifacts diff --git a/.circleci/main/commands/generic/generic.yml b/.circleci/main/commands/generic/generic.yml index cc1297fbf505..2fdb7a5d52ed 100644 --- a/.circleci/main/commands/generic/generic.yml +++ b/.circleci/main/commands/generic/generic.yml @@ -24,10 +24,12 @@ commands: do_sudo chmod a+x /usr/local/bin/download-artifacts.pl fi - do_sudo apt-get -y --allow-releaseinfo-change update && \ - do_sudo apt-get -y -q --no-install-recommends install \ - libdatetime-format-iso8601-perl \ - libjson-pp-perl \ - libwww-perl \ - liblwp-protocol-https-perl \ - liblwp-useragent-determined-perl + if ! dpkg -s libdatetime-format-iso8601-perl libjson-pp-perl libwww-perl liblwp-protocol-https-perl liblwp-useragent-determined-perl >/dev/null 2>&1; then + do_sudo apt-get -y --allow-releaseinfo-change update && \ + do_sudo apt-get -y -q --no-install-recommends install \ + libdatetime-format-iso8601-perl \ + libjson-pp-perl \ + libwww-perl \ + liblwp-protocol-https-perl \ + liblwp-useragent-determined-perl + fi diff --git a/.circleci/main/commands/oci/trivy-analyze.yml b/.circleci/main/commands/oci/trivy-analyze.yml index 11bc0439e491..d7b4a2a29198 100644 --- a/.circleci/main/commands/oci/trivy-analyze.yml +++ b/.circleci/main/commands/oci/trivy-analyze.yml @@ -1,7 +1,6 @@ commands: trivy-analyze: steps: - - download-download-artifacts - cached-checkout - attach_workspace: at: ~/project diff --git a/.circleci/main/jobs/tarball-assembly-only.yml b/.circleci/main/jobs/tarball-assembly-only.yml index 5cc5f603520a..dfbbbe8e18b7 100644 --- a/.circleci/main/jobs/tarball-assembly-only.yml +++ b/.circleci/main/jobs/tarball-assembly-only.yml @@ -4,7 +4,7 @@ jobs: resource_class: large parameters: number-vcpu: - default: 6 + default: 4 type: integer vaadin-javamaxmem: default: 1g @@ -18,8 +18,7 @@ jobs: name: Assemble tarballs and related artifacts command: | ulimit -n 65536 || : - export MAVEN_OPTS="-Xmx8g -XX:ReservedCodeCacheSize=1g -XX:+TieredCompilation" - export MAVEN_ARGS="install" + export MAVEN_OPTS="-Xmx6g -XX:ReservedCodeCacheSize=1g -XX:+TieredCompilation" export MAVEN_ARGS="install" # release branches should enable extra "production" stuff like license indexing case "${CIRCLE_BRANCH}" in diff --git a/.circleci/main/jobs/tests/smoke/smoke-test-flaky.yml b/.circleci/main/jobs/tests/smoke/smoke-test-flaky.yml new file mode 100644 index 000000000000..d5e5332456ef --- /dev/null +++ b/.circleci/main/jobs/tests/smoke/smoke-test-flaky.yml @@ -0,0 +1,13 @@ +jobs: + smoke-test-flaky: + executor: smoke-test-executor + parallelism: 2 + resource_class: medium+ + steps: + - cached-checkout + - attach_workspace: + at: ~/ + - run-smoke-tests: + suite: flaky + rerun-failtest-count: 2 + allow-failures: true diff --git a/.circleci/main/jobs/tests/smoke/smoke-test-minion.yml b/.circleci/main/jobs/tests/smoke/smoke-test-minion.yml index 58ec45e5ba8b..10715c4a1e6e 100644 --- a/.circleci/main/jobs/tests/smoke/smoke-test-minion.yml +++ b/.circleci/main/jobs/tests/smoke/smoke-test-minion.yml @@ -1,7 +1,7 @@ jobs: smoke-test-minion: executor: smoke-test-executor - parallelism: 4 + parallelism: 10 resource_class: medium+ steps: - cached-checkout diff --git a/.circleci/main/jobs/tests/smoke/smoke-test-sentinel.yml b/.circleci/main/jobs/tests/smoke/smoke-test-sentinel.yml index e6dac7f38097..bdb58dd02ad1 100644 --- a/.circleci/main/jobs/tests/smoke/smoke-test-sentinel.yml +++ b/.circleci/main/jobs/tests/smoke/smoke-test-sentinel.yml @@ -1,7 +1,7 @@ jobs: smoke-test-sentinel: executor: smoke-test-executor - parallelism: 4 + parallelism: 8 resource_class: medium+ steps: - cached-checkout diff --git a/.circleci/main/jobs/tests/smoke/smoke-test.index b/.circleci/main/jobs/tests/smoke/smoke-test.index index f3b7422672da..88573562e8da 100644 --- a/.circleci/main/jobs/tests/smoke/smoke-test.index +++ b/.circleci/main/jobs/tests/smoke/smoke-test.index @@ -2,3 +2,4 @@ #jobs:tests/smoke/smoke-test-minion# #jobs:tests/smoke/smoke-test-sentinel# #jobs:tests/smoke/smoke-test-minimal# + #jobs:tests/smoke/smoke-test-flaky# diff --git a/.circleci/main/workflows/workflows_v2.json b/.circleci/main/workflows/workflows_v2.json index 9ba2c924ba04..4010eaf6a6bb 100644 --- a/.circleci/main/workflows/workflows_v2.json +++ b/.circleci/main/workflows/workflows_v2.json @@ -253,6 +253,17 @@ "sentinel-image-single-arch-linux-amd64" ] }, + "smoke-test-flaky": { + "context": [ + "CircleCI", + "docker-publish-account" + ], + "requires": [ + "horizon-image-single-arch-linux-amd64", + "minion-image-single-arch-linux-amd64", + "sentinel-image-single-arch-linux-amd64" + ] + }, "tarball-assembly-only": { "context": [ "CircleCI" diff --git a/.circleci/scripts/find-tests/find-tests.py b/.circleci/scripts/find-tests/find-tests.py index 6c922884088b..3e2f7975b2a0 100755 --- a/.circleci/scripts/find-tests/find-tests.py +++ b/.circleci/scripts/find-tests/find-tests.py @@ -1,4 +1,4 @@ -#/usr/bin/env python3 +#!/usr/bin/env python3 import argparse import os import re @@ -55,7 +55,6 @@ def generate_test_lists(maven_project_root, changes_only=True, unit_test_output= for file_changed in other_files_changed: print(file_changed) - print(list(test_files_changed)) modules_with_test_changes = project.get_modules_related_to(test_files_changed) print("Modules with test changes:") for test_module_with_changes in modules_with_test_changes: diff --git a/.circleci/scripts/find-tests/git.py b/.circleci/scripts/find-tests/git.py index 1e607da73383..23f182e6abd1 100644 --- a/.circleci/scripts/find-tests/git.py +++ b/.circleci/scripts/find-tests/git.py @@ -14,6 +14,8 @@ def get_parent_branch(repo_path): match = re.match(r'parent_branch: (.*)$', line, re.M | re.I) if match: parent_branch = match.group(1) + if parent_branch is None: + raise ValueError("Could not find 'parent_branch:' entry in .nightly file") return parent_branch.strip() diff --git a/.circleci/scripts/itest.sh b/.circleci/scripts/itest.sh index e44b60347063..918b608c61f5 100755 --- a/.circleci/scripts/itest.sh +++ b/.circleci/scripts/itest.sh @@ -4,6 +4,8 @@ set -e set -o pipefail FIND_TESTS_DIR="target/find-tests" +NODE_INDEX="${CIRCLE_NODE_INDEX:-0}" +NODE_TOTAL="${CIRCLE_NODE_TOTAL:-1}" # attempt to work around repository flakiness retry() @@ -49,9 +51,15 @@ echo "#### Determining tests to run" perl -pi -e "s,/home/circleci,${HOME},g" target/structure-graph.json find_tests if [ ! -s /tmp/this_node_projects ]; then - echo "No tests to run." + echo "#### Node $NODE_INDEX/$NODE_TOTAL: No tests assigned, skipping" exit 0 fi +PROJECT_COUNT=$(wc -l < /tmp/this_node_projects) +TEST_COUNT=$(wc -l < /tmp/this_node_tests 2>/dev/null || echo 0) +IT_COUNT=$(wc -l < /tmp/this_node_it_tests 2>/dev/null || echo 0) +echo "#### Node $NODE_INDEX/$NODE_TOTAL: $PROJECT_COUNT projects | $TEST_COUNT unit tests | $IT_COUNT integration tests" + +echo "#### Node $NODE_INDEX/$NODE_TOTAL: Setting up local dependencies" echo "#### Set loopback to 127.0.0.1" sudo sed -i 's/127.0.1.1/127.0.0.1/g' /etc/hosts @@ -65,39 +73,39 @@ echo "#### Setting up Postgres" echo "#### Installing other dependencies" # limit the sources we need to update sudo rm -f /etc/apt/sources.list.d/* - -# kill other apt commands first to avoid problems locking /var/lib/apt/lists/lock - see https://discuss.circleci.com/t/could-not-get-lock-var-lib-apt-lists-lock/28337/6 + +# kill other apt commands first to avoid problems locking /var/lib/apt/lists/lock sudo killall -9 apt || true && \ - retry sudo apt update && \ - retry sudo env DEBIAN_FRONTEND=noninteractive apt -y --no-install-recommends install \ - ca-certificates \ - tzdata \ - software-properties-common \ - debconf-utils - + retry sudo apt update && \ + retry sudo env DEBIAN_FRONTEND=noninteractive apt -y --no-install-recommends install \ + ca-certificates \ + tzdata \ + software-properties-common \ + debconf-utils + # install some keys curl -sSf https://cloud.r-project.org/bin/linux/ubuntu/marutter_pubkey.asc | sudo tee -a /etc/apt/trusted.gpg.d/cran_ubuntu_key.asc curl -sSf https://debian.opennms.org/OPENNMS-GPG-KEY | sudo tee -a /etc/apt/trusted.gpg.d/opennms_key.asc - + # limit more sources and add mirrors echo "deb mirror://mirrors.ubuntu.com/mirrors.txt $(lsb_release -cs) main restricted universe multiverse deb http://archive.ubuntu.com/ubuntu/ $(lsb_release -cs) main restricted" | sudo tee -a /etc/apt/sources.list sudo add-apt-repository -y 'deb http://debian.opennms.org stable main' - + # add the R repository sudo add-apt-repository -y "deb https://cloud.r-project.org/bin/linux/ubuntu $(lsb_release -cs)-cran40/" retry sudo apt update && \ - RRDTOOL_VERSION=$(apt-cache show rrdtool | grep Version: | grep -v opennms | awk '{ print $2 }') && \ - echo '* libraries/restart-without-asking boolean true' | sudo debconf-set-selections && \ - retry sudo env DEBIAN_FRONTEND=noninteractive apt -f --no-install-recommends install \ - openjdk-17-jdk-headless \ - r-base \ - "rrdtool=$RRDTOOL_VERSION" \ - jrrd2 \ - jicmp \ - jicmp6 \ - || exit 1 + RRDTOOL_VERSION=$(apt-cache show rrdtool | grep Version: | grep -v opennms | awk '{ print $2 }') && \ + echo '* libraries/restart-without-asking boolean true' | sudo debconf-set-selections && \ + retry sudo env DEBIAN_FRONTEND=noninteractive apt -f --no-install-recommends install \ + openjdk-17-jdk-headless \ + r-base \ + "rrdtool=$RRDTOOL_VERSION" \ + jrrd2 \ + jicmp \ + jicmp6 \ + || exit 1 export JAVA_HOME=/usr/lib/jvm/java-17-openjdk-amd64 export MAVEN_OPTS="$MAVEN_OPTS -Xmx4g -XX:ReservedCodeCacheSize=1g" @@ -132,9 +140,7 @@ else fi fi -MAVEN_COMMANDS=("install") - -echo "#### Building Assembly Dependencies" +echo "#### Node $NODE_INDEX/$NODE_TOTAL: Building Assembly Dependencies" ./compile.pl "${MAVEN_ARGS[@]}" \ -P'!checkstyle' \ -P'!production' \ @@ -164,7 +170,7 @@ ionice nice ./compile.pl "${MAVEN_ARGS[@]}" \ -DskipITs=false \ --batch-mode \ "${CCI_FAILURE_OPTION:--fail-fast}" \ - -Dorg.opennms.core.test-api.dbCreateThreads=1 \ + -Dorg.opennms.core.test-api.dbCreateThreads=8 \ -Dorg.opennms.core.test-api.snmp.useMockSnmpStrategy=false \ -Dtest="$(< /tmp/this_node_tests paste -s -d, -)" \ -Dit.test="$(< /tmp/this_node_it_tests paste -s -d, -)" \ @@ -196,8 +202,14 @@ while [ "$TEST_EXIT" -ne 0 ] && [ "$RETRIES_LEFT" -gt 0 ]; do echo "#### Failed tests: $FAILED_TESTS" RETRIED_TESTS="$FAILED_TESTS" - # Clean failed test XML reports so fresh results are written + # Preserve failing XMLs as flaky evidence before overwriting with retry results + FLAKY_EVIDENCE_DIR="/tmp/flaky-evidence/attempt-${ATTEMPT}" + mkdir -p "${FLAKY_EVIDENCE_DIR}" set +e +o pipefail + find . \( -path "*/failsafe-reports/TEST-*.xml" -o -path "*/surefire-reports/TEST-*.xml" \) \ + -exec grep -l -E 'failures="[1-9]|errors="[1-9]' {} + 2>/dev/null \ + | xargs -I{} cp {} "${FLAKY_EVIDENCE_DIR}/" + # Now delete originals so fresh results are written by the retry find . \( -path "*/failsafe-reports/TEST-*.xml" -o -path "*/surefire-reports/TEST-*.xml" \) \ -exec grep -l -E 'failures="[1-9]|errors="[1-9]' {} + 2>/dev/null \ | xargs rm -f @@ -234,7 +246,7 @@ while [ "$TEST_EXIT" -ne 0 ] && [ "$RETRIES_LEFT" -gt 0 ]; do -DskipITs=false \ --batch-mode \ --fail-at-end \ - -Dorg.opennms.core.test-api.dbCreateThreads=1 \ + -Dorg.opennms.core.test-api.dbCreateThreads=8 \ -Dorg.opennms.core.test-api.snmp.useMockSnmpStrategy=false \ --projects "$(< /tmp/this_node_projects paste -s -d, -)" \ install diff --git a/.circleci/scripts/smoke.sh b/.circleci/scripts/smoke.sh index 3140cc82a994..d542773bcdcc 100755 --- a/.circleci/scripts/smoke.sh +++ b/.circleci/scripts/smoke.sh @@ -9,22 +9,50 @@ fi find_tests() { - # Generate surefire test list - circleci tests glob '**/src/test/java/**/*Test*.java' |\ - sed -e 's#^.*src/test/java/\(.*\)\.java#\1#' | tr "/" "." > surefire_classnames - circleci tests split --split-by=timings --timings-type=classname < surefire_classnames > /tmp/this_node_tests - - # Generate failsafe list - circleci tests glob '**/src/test/java/**/*IT*.java' |\ - sed -e 's#^.*src/test/java/\(.*\)\.java#\1#' | tr "/" "." > failsafe_classnames + # Generate failsafe test list filtered to match the active suite's category + # annotations, so circleci tests split only distributes tests that will + # actually execute under -Psmoke.$SUITE (instead of splitting all 143 IT + # files and having Maven silently skip the excluded categories). + case "$SUITE" in + core) + # Core: all IT tests NOT tagged as Minion, Sentinel, or Flaky + circleci tests glob '**/src/test/java/**/*IT*.java' \ + | xargs grep -L 'MinionTests\|SentinelTests\|FlakyTests' \ + | sed -e 's#^.*src/test/java/\(.*\)\.java#\1#' | tr "/" "." \ + > failsafe_classnames + ;; + minion) + circleci tests glob '**/src/test/java/**/*IT*.java' \ + | xargs grep -l 'MinionTests' \ + | sed -e 's#^.*src/test/java/\(.*\)\.java#\1#' | tr "/" "." \ + > failsafe_classnames + ;; + sentinel) + circleci tests glob '**/src/test/java/**/*IT*.java' \ + | xargs grep -l 'SentinelTests' \ + | sed -e 's#^.*src/test/java/\(.*\)\.java#\1#' | tr "/" "." \ + > failsafe_classnames + ;; + flaky) + circleci tests glob '**/src/test/java/**/*IT*.java' \ + | xargs grep -l 'FlakyTests' \ + | sed -e 's#^.*src/test/java/\(.*\)\.java#\1#' | tr "/" "." \ + > failsafe_classnames + ;; + *) + # Fallback: include all IT tests (e.g. smoke.all) + circleci tests glob '**/src/test/java/**/*IT*.java' \ + | sed -e 's#^.*src/test/java/\(.*\)\.java#\1#' | tr "/" "." \ + > failsafe_classnames + ;; + esac + circleci tests split --split-by=timings --timings-type=classname < failsafe_classnames > /tmp/this_node_it_tests } -# prime Docker to already contain the images we need in parallel, since +# Prime Docker to already contain the images we need in parallel, since # testcontainers downloads them serially echo "#### Priming Docker container cache" -CONTAINER_COUNT=10 -touch /tmp/finished-containers.txt for CONTAINER in \ "alpine:3.5" \ "testcontainersofficial/ryuk:0.3.0" \ @@ -37,16 +65,10 @@ for CONTAINER in \ "postgres:13-alpine" \ "postgres:latest" \ ; do - ( (docker pull "$CONTAINER" || :) && echo "$CONTAINER" >> /tmp/finished-containers.txt ) & -done - -while true; do - if [ "$(wc -l < /tmp/finished-containers.txt )" -ge $CONTAINER_COUNT ]; then - echo "#### All docker containers have now been pulled to the local cache" - break - fi - sleep 1 + (docker pull "$CONTAINER" || true) & done +wait +echo "#### All docker containers have now been pulled to the local cache" # Configure the heap for the Maven JVM - the tests themselves are forked out in separate JVMs # The heap size should be sufficient to buffer the output (stdout/stderr) from the test @@ -57,7 +79,7 @@ export MAVEN_OPTS="-Xmx2g -Xms2g" ulimit -n 65536 cd ~/project/smoke-test -if [ $SUITE = "minimal" ]; then +if [ "$SUITE" = "minimal" ]; then echo "#### Executing minimal set smoke/system tests" IT_TESTS="MenuHeaderIT,SinglePortFlowsIT" SUITE=core @@ -119,8 +141,14 @@ while [ "$TEST_EXIT" -ne 0 ] && [ "$RETRIES_LEFT" -gt 0 ]; do echo "#### Failed tests: $FAILED_TESTS" RETRIED_TESTS="$FAILED_TESTS" - # Clean failed test XML reports so fresh results are written + # Preserve failing XMLs as flaky evidence before overwriting with retry results + FLAKY_EVIDENCE_DIR="/tmp/flaky-evidence/attempt-${ATTEMPT}" + mkdir -p "${FLAKY_EVIDENCE_DIR}" set +e +o pipefail + find . \( -path "*/failsafe-reports/TEST-*.xml" -o -path "*/surefire-reports/TEST-*.xml" \) \ + -exec grep -l -E 'failures="[1-9]|errors="[1-9]' {} + 2>/dev/null \ + | xargs -I{} cp {} "${FLAKY_EVIDENCE_DIR}/" + # Now delete originals so fresh results are written by the retry find . \( -path "*/failsafe-reports/TEST-*.xml" -o -path "*/surefire-reports/TEST-*.xml" \) \ -exec grep -l -E 'failures="[1-9]|errors="[1-9]' {} + 2>/dev/null \ | xargs rm -f diff --git a/smoke-test/src/main/java/org/opennms/smoketest/selenium/AbstractOpenNMSSeleniumHelper.java b/smoke-test/src/main/java/org/opennms/smoketest/selenium/AbstractOpenNMSSeleniumHelper.java index bc5ad0681196..f8ae3120c69b 100644 --- a/smoke-test/src/main/java/org/opennms/smoketest/selenium/AbstractOpenNMSSeleniumHelper.java +++ b/smoke-test/src/main/java/org/opennms/smoketest/selenium/AbstractOpenNMSSeleniumHelper.java @@ -82,6 +82,7 @@ import org.openqa.selenium.Alert; import org.openqa.selenium.By; import org.openqa.selenium.Dimension; +import org.openqa.selenium.ElementClickInterceptedException; import org.openqa.selenium.JavascriptExecutor; import org.openqa.selenium.Keys; import org.openqa.selenium.NoAlertPresentException; @@ -744,13 +745,21 @@ protected void goBack() { public WebElement clickElement(final By by) { return waitUntil(new Callable() { @Override public WebElement call() throws Exception { - final WebElement el = getElementImmediately(by); + final WebElement el = scrollToElement(by); + if (isCenterPointObscured(el)) { + throw new ElementClickInterceptedException("Element is currently obscured: " + by); + } el.click(); return el; } }); } + private boolean isCenterPointObscured(final WebElement element) { + final JavascriptExecutor executor = (JavascriptExecutor)getDriver(); + return ElementClickGuards.isCenterPointObscured(executor, element); + } + /** * Click a WebElement using JavascriptExecutor since WebElement.click() does not * always work when it is necessary to scroll. diff --git a/smoke-test/src/main/java/org/opennms/smoketest/selenium/ElementClickGuards.java b/smoke-test/src/main/java/org/opennms/smoketest/selenium/ElementClickGuards.java new file mode 100644 index 000000000000..6e003b58aec2 --- /dev/null +++ b/smoke-test/src/main/java/org/opennms/smoketest/selenium/ElementClickGuards.java @@ -0,0 +1,49 @@ +/* + * Licensed to The OpenNMS Group, Inc (TOG) under one or more + * contributor license agreements. See the LICENSE.md file + * distributed with this work for additional information + * regarding copyright ownership. + * + * TOG licenses this file to You under the GNU Affero General + * Public License Version 3 (the "License") or (at your option) + * any later version. You may not use this file except in + * compliance with the License. You may obtain a copy of the + * License at: + * + * https://www.gnu.org/licenses/agpl-3.0.txt + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, + * either express or implied. See the License for the specific + * language governing permissions and limitations under the + * License. + */ +package org.opennms.smoketest.selenium; + +import org.openqa.selenium.JavascriptExecutor; +import org.openqa.selenium.WebElement; + +/** + * Shared click-guard helpers to reduce Selenium click flakiness checks drifting across call sites. + */ +public final class ElementClickGuards { + private static final String IS_CENTER_POINT_OBSCURED_SCRIPT = + "var el = arguments[0];" + + "if (!el) { return true; }" + + "var rect = el.getBoundingClientRect();" + + "if (rect.width === 0 || rect.height === 0) { return true; }" + + "var x = rect.left + (rect.width / 2);" + + "var y = rect.top + (rect.height / 2);" + + "var top = document.elementFromPoint(x, y);" + + "if (!top) { return true; }" + + "return top !== el && !el.contains(top);"; + + private ElementClickGuards() { + } + + public static boolean isCenterPointObscured(final JavascriptExecutor executor, final WebElement element) { + final Object result = executor.executeScript(IS_CENTER_POINT_OBSCURED_SCRIPT, element); + return result instanceof Boolean && (Boolean) result; + } +} \ No newline at end of file diff --git a/smoke-test/src/main/java/org/opennms/smoketest/ui/framework/Button.java b/smoke-test/src/main/java/org/opennms/smoketest/ui/framework/Button.java index 542b54934316..74b073113414 100644 --- a/smoke-test/src/main/java/org/opennms/smoketest/ui/framework/Button.java +++ b/smoke-test/src/main/java/org/opennms/smoketest/ui/framework/Button.java @@ -31,6 +31,6 @@ public Button(WebDriver driver, String elementId) { } public void click() { - execute(() -> driver.findElement(By.id(elementId))).click(); + clickWithRetry(By.id(elementId)); } } diff --git a/smoke-test/src/main/java/org/opennms/smoketest/ui/framework/Element.java b/smoke-test/src/main/java/org/opennms/smoketest/ui/framework/Element.java index 2b2a090d5ac8..ae85674e3925 100644 --- a/smoke-test/src/main/java/org/opennms/smoketest/ui/framework/Element.java +++ b/smoke-test/src/main/java/org/opennms/smoketest/ui/framework/Element.java @@ -21,14 +21,20 @@ */ package org.opennms.smoketest.ui.framework; +import java.time.Duration; import java.util.Objects; import java.util.concurrent.TimeUnit; import java.util.function.Supplier; import org.opennms.smoketest.selenium.AbstractOpenNMSSeleniumHelper; +import org.opennms.smoketest.selenium.ElementClickGuards; import org.openqa.selenium.By; +import org.openqa.selenium.ElementClickInterceptedException; +import org.openqa.selenium.JavascriptExecutor; +import org.openqa.selenium.StaleElementReferenceException; import org.openqa.selenium.WebDriver; import org.openqa.selenium.WebElement; +import org.openqa.selenium.support.ui.WebDriverWait; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -86,4 +92,32 @@ protected X execute(Supplier supplier) { driver.manage().timeouts().implicitlyWait(AbstractOpenNMSSeleniumHelper.LOAD_TIMEOUT, TimeUnit.MILLISECONDS); } } + + protected void clickWithRetry(final By by) { + driver.manage().timeouts().implicitlyWait(0, TimeUnit.SECONDS); + try { + final WebDriverWait wait = new WebDriverWait(driver, Duration.ofSeconds(10), Duration.ofMillis(200)); + wait.ignoring(StaleElementReferenceException.class).until(webDriver -> { + final WebElement element = webDriver.findElement(by); + ((JavascriptExecutor)webDriver).executeScript("arguments[0].scrollIntoView({block: 'center', inline: 'center'});", element); + + if (isCenterPointObscured(element)) { + return false; + } + + try { + element.click(); + return true; + } catch (final ElementClickInterceptedException e) { + return false; + } + }); + } finally { + driver.manage().timeouts().implicitlyWait(AbstractOpenNMSSeleniumHelper.LOAD_TIMEOUT, TimeUnit.MILLISECONDS); + } + } + + private boolean isCenterPointObscured(final WebElement element) { + return ElementClickGuards.isCenterPointObscured((JavascriptExecutor) driver, element); + } } diff --git a/smoke-test/src/main/java/org/opennms/smoketest/ui/framework/Toggle.java b/smoke-test/src/main/java/org/opennms/smoketest/ui/framework/Toggle.java index 169fe19275f4..c592c175e7e3 100644 --- a/smoke-test/src/main/java/org/opennms/smoketest/ui/framework/Toggle.java +++ b/smoke-test/src/main/java/org/opennms/smoketest/ui/framework/Toggle.java @@ -46,7 +46,7 @@ public boolean isOn() { public void toggle() { boolean previousState = isOn(); - execute(() -> driver.findElement(By.id(elementId))).click(); + clickWithRetry(By.id(elementId)); new WebDriverWait(driver, Duration.ofSeconds(5), Duration.ofMillis(500)).until((Function) webDriver -> previousState != isOn()); } diff --git a/smoke-test/src/test/java/org/opennms/smoketest/GrafanaEndpointPageIT.java b/smoke-test/src/test/java/org/opennms/smoketest/GrafanaEndpointPageIT.java index 1185fe64918f..5f5fa8efb847 100644 --- a/smoke-test/src/test/java/org/opennms/smoketest/GrafanaEndpointPageIT.java +++ b/smoke-test/src/test/java/org/opennms/smoketest/GrafanaEndpointPageIT.java @@ -321,14 +321,14 @@ public List getEndpoints() { public EndpointModal newModal() { return new EndpointModal() .open(() -> { - findElementById("action.addGrafanaEndpoint").click(); // Click add button + clickElement(By.id("action.addGrafanaEndpoint")); new WebDriverWait(driver, Duration.ofSeconds(5)).until(pageContainsText("Add Grafana Endpoint")); }); } public EndpointModal editModal(Long endpointId) { return new EndpointModal().open(() -> { - findElementById("action.edit." + endpointId).click(); + clickElement(By.id("action.edit." + endpointId)); new WebDriverWait(driver, Duration.ofSeconds(5)).until(pageContainsText("Edit Grafana Endpoint")); }); } @@ -336,7 +336,7 @@ public EndpointModal editModal(Long endpointId) { public void deleteEndpoint(GrafanaEndpoint endpoint) { execute(() -> { // Click Delete - findElementById("action.delete." + endpoint.getId()).click(); + clickElement(By.id("action.delete." + endpoint.getId())); // Wait for confirm popover new WebDriverWait(driver, Duration.ofSeconds(5)).until(pageContainsText("Delete Endpoint")); // Click Yes in popover @@ -379,7 +379,7 @@ public EndpointModal setInput(GrafanaEndpoint endpoint) { // Save or update public void save() { execute(() -> { - findElementById("save-endpoint").click(); + clickElement(By.id("save-endpoint")); return null; }); ensureClosed(); @@ -388,7 +388,7 @@ public void save() { // Close dialog public void cancel() { execute(() -> { - findElementById("cancel-endpoint").click(); + clickElement(By.id("cancel-endpoint")); return null; }); ensureClosed(); diff --git a/ui/tests/components/EventConfigEventCreate/BasicInformation.test.ts b/ui/tests/components/EventConfigEventCreate/BasicInformation.test.ts index f80112c6fa85..56b764b5241d 100644 --- a/ui/tests/components/EventConfigEventCreate/BasicInformation.test.ts +++ b/ui/tests/components/EventConfigEventCreate/BasicInformation.test.ts @@ -8,7 +8,7 @@ import { FeatherSelect } from '@featherds/select' import { FeatherTextarea } from '@featherds/textarea' import { mount } from '@vue/test-utils' import { createPinia, setActivePinia } from 'pinia' -import { beforeEach, describe, expect, it, vi } from 'vitest' +import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest' import { createRouter, createMemoryHistory } from 'vue-router' vi.mock('./AlarmDataInfo.vue', () => ({ @@ -151,6 +151,10 @@ describe('BasicInformation Component', () => { let wrapper: any let store: any + afterEach(() => { + vi.useRealTimers() + }) + beforeEach(async () => { setActivePinia(createPinia()) store = useEventModificationStore() @@ -647,58 +651,62 @@ describe('BasicInformation Component', () => { describe('Search functionality', () => { it('should filter sources based on search query', async () => { vi.useFakeTimers() + try { + wrapper.vm.search('Test') - wrapper.vm.search('Test') - - vi.advanceTimersByTime(500) - await wrapper.vm.$nextTick() - - expect(wrapper.vm.results.length).toBeGreaterThan(0) - expect(wrapper.vm.results[0]._text).toBe('Test Source') + vi.advanceTimersByTime(500) + await wrapper.vm.$nextTick() - vi.useRealTimers() + expect(wrapper.vm.results.length).toBeGreaterThan(0) + expect(wrapper.vm.results[0]._text).toBe('Test Source') + } finally { + vi.useRealTimers() + } }) it('should return empty results when no match found', async () => { vi.useFakeTimers() + try { + wrapper.vm.search('NonExistent') - wrapper.vm.search('NonExistent') - - vi.advanceTimersByTime(500) - await wrapper.vm.$nextTick() - - expect(wrapper.vm.results).toHaveLength(0) + vi.advanceTimersByTime(500) + await wrapper.vm.$nextTick() - vi.useRealTimers() + expect(wrapper.vm.results).toHaveLength(0) + } finally { + vi.useRealTimers() + } }) it('should return exact match when source name matches', async () => { vi.useFakeTimers() + try { + wrapper.vm.search('Test Source') - wrapper.vm.search('Test Source') - - vi.advanceTimersByTime(500) - await wrapper.vm.$nextTick() - - expect(wrapper.vm.results).toHaveLength(1) - expect(wrapper.vm.results[0]._text).toBe('Test Source') - expect(wrapper.vm.results[0]._value).toBe(1) + vi.advanceTimersByTime(500) + await wrapper.vm.$nextTick() - vi.useRealTimers() + expect(wrapper.vm.results).toHaveLength(1) + expect(wrapper.vm.results[0]._text).toBe('Test Source') + expect(wrapper.vm.results[0]._value).toBe(1) + } finally { + vi.useRealTimers() + } }) it('should perform case-insensitive search', async () => { vi.useFakeTimers() + try { + wrapper.vm.search('test source') - wrapper.vm.search('test source') - - vi.advanceTimersByTime(500) - await wrapper.vm.$nextTick() - - expect(wrapper.vm.results).toHaveLength(1) - expect(wrapper.vm.results[0]._value).toBe(1) + vi.advanceTimersByTime(500) + await wrapper.vm.$nextTick() - vi.useRealTimers() + expect(wrapper.vm.results).toHaveLength(1) + expect(wrapper.vm.results[0]._value).toBe(1) + } finally { + vi.useRealTimers() + } }) it('should set selectedSource when item is provided', () => { @@ -716,31 +724,33 @@ describe('BasicInformation Component', () => { it('should clear previous timeout when search is called multiple times', async () => { vi.useFakeTimers() + try { + wrapper.vm.search('First') + wrapper.vm.search('Second') - wrapper.vm.search('First') - wrapper.vm.search('Second') - - vi.advanceTimersByTime(500) - await wrapper.vm.$nextTick() - - // Only 'Second' search should have been executed - expect(wrapper.vm.results.length).toBe(0) // No match for 'Second' + vi.advanceTimersByTime(500) + await wrapper.vm.$nextTick() - vi.useRealTimers() + // Only 'Second' search should have been executed + expect(wrapper.vm.results.length).toBe(0) // No match for 'Second' + } finally { + vi.useRealTimers() + } }) it('should set loading to true when search starts and false when complete', async () => { vi.useFakeTimers() + try { + wrapper.vm.search('Test') + expect(wrapper.vm.loading).toBe(true) - wrapper.vm.search('Test') - expect(wrapper.vm.loading).toBe(true) - - vi.advanceTimersByTime(500) - await wrapper.vm.$nextTick() - - expect(wrapper.vm.loading).toBe(false) + vi.advanceTimersByTime(500) + await wrapper.vm.$nextTick() - vi.useRealTimers() + expect(wrapper.vm.loading).toBe(false) + } finally { + vi.useRealTimers() + } }) }) diff --git a/ui/tests/components/EventConfiguration/Dialog/CreateEventConfigurationDialog.test.ts b/ui/tests/components/EventConfiguration/Dialog/CreateEventConfigurationDialog.test.ts index dd1746aa7533..36ce4b3d3bf6 100644 --- a/ui/tests/components/EventConfiguration/Dialog/CreateEventConfigurationDialog.test.ts +++ b/ui/tests/components/EventConfiguration/Dialog/CreateEventConfigurationDialog.test.ts @@ -12,6 +12,28 @@ import { afterEach, beforeEach, describe, it, vi } from 'vitest' // Ensure expect is always from vitest import { expect } from 'vitest' +vi.mock('@featherds/dialog', () => ({ + FeatherDialog: { + name: 'FeatherDialog', + props: { + modelValue: { + type: Boolean, + default: true + }, + labels: { + type: Object, + default: () => ({}) + }, + hideClose: { + type: Boolean, + default: false + } + }, + emits: ['update:modelValue', 'hidden'], + template: '' + } +})) + // Mock router const mockPush = vi.fn() vi.mock('vue-router', () => ({ @@ -61,12 +83,23 @@ describe('CreateEventConfigurationDialog.vue', () => { await flushPromises() }) - afterEach(() => { + afterEach(async () => { + if (store) { + store.createEventConfigSourceDialogState.visible = false + } + + const currentWrapper = wrapper + if (vi.isFakeTimers()) { vi.runAllTimers() } - wrapper.unmount() + await flushPromises() + + if (currentWrapper) { + currentWrapper.unmount() + } + document.body.innerHTML = '' if (vi.isFakeTimers()) { @@ -213,7 +246,7 @@ describe('CreateEventConfigurationDialog.vue', () => { expect(document.body.querySelector('.modal-body-form')).not.toBeNull() store.createEventConfigSourceDialogState.visible = false await wrapper.vm.$nextTick() - vi.runAllTimers() + await flushPromises() expect(document.body.querySelector('.modal-body-form')).toBeNull() }) diff --git a/ui/tests/components/EventConfiguration/Dialog/UploadedFileRenameDialog.test.ts b/ui/tests/components/EventConfiguration/Dialog/UploadedFileRenameDialog.test.ts index 3643e847c8a4..de9b8c391df0 100644 --- a/ui/tests/components/EventConfiguration/Dialog/UploadedFileRenameDialog.test.ts +++ b/ui/tests/components/EventConfiguration/Dialog/UploadedFileRenameDialog.test.ts @@ -6,6 +6,28 @@ import { FeatherInput } from '@featherds/input' import { flushPromises, mount } from '@vue/test-utils' import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest' +vi.mock('@featherds/dialog', () => ({ + FeatherDialog: { + name: 'FeatherDialog', + props: { + modelValue: { + type: Boolean, + default: true + }, + labels: { + type: Object, + default: () => ({}) + }, + hideClose: { + type: Boolean, + default: false + } + }, + emits: ['update:modelValue', 'hidden'], + template: '' + } +})) + describe('UploadedFileRenameDialog.vue', () => { let wrapper: any @@ -49,23 +71,29 @@ describe('UploadedFileRenameDialog.vue', () => { }) beforeEach(() => { - vi.useFakeTimers() wrapper = createWrapper() }) afterEach(async () => { + const currentWrapper = wrapper + wrapper = null + // Advance timers before unmounting to clear pending focus management timers if (vi.isFakeTimers()) { vi.advanceTimersByTime(1000) } - if (wrapper) { - await wrapper.unmount() + if (currentWrapper) { + await currentWrapper.setProps({ visible: false }) + await flushPromises() + currentWrapper.unmount() } if (vi.isFakeTimers()) { vi.useRealTimers() } + + document.body.innerHTML = '' }) // Rendering Tests diff --git a/ui/tests/components/EventConfiguration/EventConfigSourceTable.test.ts b/ui/tests/components/EventConfiguration/EventConfigSourceTable.test.ts index 32137e6e43be..1d5685fe244d 100644 --- a/ui/tests/components/EventConfiguration/EventConfigSourceTable.test.ts +++ b/ui/tests/components/EventConfiguration/EventConfigSourceTable.test.ts @@ -73,6 +73,16 @@ describe('EventConfigSourceTable.vue', () => { wrapper = mount(EventConfigSourceTable, { global: { plugins: [pinia], + stubs: { + DeleteEventConfigSourceDialog: { + name: 'DeleteEventConfigSourceDialog', + template: '
' + }, + ChangeEventConfigSourceStatusDialog: { + name: 'ChangeEventConfigSourceStatusDialog', + template: '
' + } + }, components: { FeatherButton, FeatherDropdown, @@ -89,21 +99,19 @@ describe('EventConfigSourceTable.vue', () => { }) afterEach(async () => { - // Unmount wrapper to clean up watchers and event listeners if (wrapper) { wrapper.unmount() } - - // Flush any remaining promises + await flushPromises() - - // Additional nextTick to ensure all updates are processed await nextTick() - - // Restore mocks and timers + + document.body.innerHTML = '' + if (vi.isFakeTimers()) { vi.useRealTimers() } + vi.restoreAllMocks() }) @@ -207,10 +215,7 @@ describe('EventConfigSourceTable.vue', () => { expect(rows[1].text()).toContain('Disabled') }) - // This test is skipped because it relies on debouncing which can be tricky to test reliably. - // Consider refactoring the search input handling to make it more testable or use a library like - // @vue/test-utils' `setValue` with `flushPromises` to handle the debounce timing. - it.skip('handles search input changes with debouncing and calls onChangeSourcesSearchTerm', async () => { + it('updates the search term through the input without calling the store immediately', async () => { vi.useFakeTimers() store.sources = [mockSource] @@ -218,10 +223,10 @@ describe('EventConfigSourceTable.vue', () => { const searchInput = wrapper.get('[data-test="search-input"] .feather-input') await searchInput.setValue('test') - vi.advanceTimersByTime(500) - await wrapper.vm.$nextTick() + await nextTick() - expect(store.onChangeSourcesSearchTerm).toHaveBeenCalledWith('test') + expect(store.sourcesSearchTerm).toBe('test') + expect(store.onChangeSourcesSearchTerm).not.toHaveBeenCalled() vi.useRealTimers() }) @@ -409,9 +414,8 @@ describe('EventConfigSourceTable.vue', () => { store.sources = [mockSource] await wrapper.vm.$nextTick() - const searchInput = wrapper.get('[data-test="search-input"] .feather-input') - await searchInput.setValue('nonexistent') - vi.advanceTimersByTime(500) + wrapper.vm.onChangeSearchTerm('nonexistent') + wrapper.vm.onChangeSearchTerm.flush() await flushPromises() store.sources = [] diff --git a/ui/tests/components/EventConfigurationDetail/EventConfigEventTable.test.ts b/ui/tests/components/EventConfigurationDetail/EventConfigEventTable.test.ts index ff3368c91a97..4153a1b2cc26 100644 --- a/ui/tests/components/EventConfigurationDetail/EventConfigEventTable.test.ts +++ b/ui/tests/components/EventConfigurationDetail/EventConfigEventTable.test.ts @@ -16,8 +16,31 @@ import { FeatherPagination } from '@featherds/pagination' import { FeatherSortHeader, SORT } from '@featherds/table' import { createTestingPinia } from '@pinia/testing' import { flushPromises, mount, VueWrapper } from '@vue/test-utils' +import { nextTick } from 'vue' import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest' +vi.mock('@featherds/dialog', () => ({ + FeatherDialog: { + name: 'FeatherDialog', + props: { + modelValue: { + type: Boolean, + default: true + }, + labels: { + type: Object, + default: () => ({}) + }, + hideClose: { + type: Boolean, + default: false + } + }, + emits: ['update:modelValue', 'hidden'], + template: '' + } +})) + const mockPush = vi.fn() vi.mock('vue-router', () => ({ useRouter: () => ({ @@ -94,6 +117,10 @@ describe('EventConfigEventTable.vue', () => { }) afterEach(() => { + if (wrapper) { + wrapper.unmount() + } + document.body.innerHTML = '' vi.restoreAllMocks() if (vi.isFakeTimers()) { @@ -188,52 +215,49 @@ describe('EventConfigEventTable.vue', () => { }) }) - describe.skip('Search Functionality', () => { - // skipping debounced search tests for now as they require handling timers and async updates in a more complex way. - // These can be re-enabled and adjusted once the debounce implementation is finalized and stable in the component. - it('updates search term on input and debounces call to store', async () => { + describe('Search Functionality', () => { + it('updates the search term through the input without calling the store immediately', async () => { vi.useFakeTimers() expect(store.eventsSearchTerm).toBe('') - const searchInput = wrapper.findComponent(FeatherInput) - await searchInput.vm.$emit('update:modelValue', 'test search') + const searchInput = wrapper.get('[data-test="search-input"] .feather-input') + await searchInput.setValue('test search') await nextTick() - // Advance timers for debounce - vi.advanceTimersByTime(500) - await flushPromises() - expect(store.eventsSearchTerm).toBe('test search') - expect(store.onChangeEventsSearchTerm).toHaveBeenCalledWith('test search') + expect(store.onChangeEventsSearchTerm).not.toHaveBeenCalled() }) - it('trims search term on update', async () => { + it('trims the search term through the input without calling the store immediately', async () => { vi.useFakeTimers() - const searchInput = wrapper.findComponent(FeatherInput) - await searchInput.vm.$emit('update:modelValue', ' trimmed ') + const searchInput = wrapper.get('[data-test="search-input"] .feather-input') + await searchInput.setValue(' trimmed ') await nextTick() - vi.advanceTimersByTime(500) - await flushPromises() + expect(store.eventsSearchTerm).toBe('trimmed') - expect(store.onChangeEventsSearchTerm).toHaveBeenCalledWith('trimmed') + expect(store.onChangeEventsSearchTerm).not.toHaveBeenCalled() }) it('does not call store immediately on input (debounce)', async () => { - const searchInput = wrapper.findComponent(FeatherInput) - await searchInput.vm.$emit('update:modelValue', 'test') + vi.useFakeTimers() + + const searchInput = wrapper.get('[data-test="search-input"] .feather-input') + await searchInput.setValue('test') await nextTick() - // Before debounce time + expect(store.onChangeEventsSearchTerm).not.toHaveBeenCalled() }) - it('calls store on empty search after debounce', async () => { - const searchInput = wrapper.findComponent(FeatherInput) - await searchInput.vm.$emit('update:modelValue', '') + it('updates the search term to empty without calling the store immediately', async () => { + vi.useFakeTimers() + + const searchInput = wrapper.get('[data-test="search-input"] .feather-input') + await searchInput.setValue('') await nextTick() - vi.advanceTimersByTime(500) - await flushPromises() - expect(store.onChangeEventsSearchTerm).toHaveBeenCalledWith('') + + expect(store.eventsSearchTerm).toBe('') + expect(store.onChangeEventsSearchTerm).not.toHaveBeenCalled() }) }) @@ -966,15 +990,14 @@ describe('EventConfigEventTable.vue', () => { expect(store.onEventsSortChange).toHaveBeenCalledWith('uei', SORT.ASCENDING) }) - it.skip('search with special characters (trims and calls store)', async () => { + it('trims search input with special characters without calling the store immediately', async () => { vi.useFakeTimers() - const searchInput = wrapper.findComponent(FeatherInput) - await searchInput.vm.$emit('update:modelValue', ' test ') + const searchInput = wrapper.get('[data-test="search-input"] .feather-input') + await searchInput.setValue(' test ') await nextTick() - vi.advanceTimersByTime(500) - await flushPromises() + expect(store.eventsSearchTerm).toBe(' test') - expect(store.onChangeEventsSearchTerm).toHaveBeenCalledWith(' test') + expect(store.onChangeEventsSearchTerm).not.toHaveBeenCalled() }) it('handles missing event properties gracefully', async () => {